query
stringlengths
9
3.4k
document
stringlengths
9
87.4k
metadata
dict
negatives
sequencelengths
4
101
negative_scores
sequencelengths
4
101
document_score
stringlengths
3
10
document_rank
stringclasses
102 values
Remove a pen from this window.
def _removePen(self,pen): if pen in self._pencils: self._pencils.remove(pen)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __del__(self):\n self._screen._removePen(self)\n del self._turtle", "def remove(self) -> None:\n self.map.remove_brush(self)", "def removePick(self):\n self.pnt = None\n vtkRenWin.delMarker(self.renWin)", "def remove_brush(self, brush: 'Solid') -> None:\n try:\n self.brushes.remove(brush)\n except ValueError:\n pass # Already removed.", "def penup(self):\n if not self._drawing:\n return\n self.pen(pendown=False)", "def delwin(self):\n\t\tfor c in self.components:\n\t\t\tc.delwin()\n\t\tself.win = None", "def RemoveShape(self, *args):\n return _XCAFDoc.XCAFDoc_ShapeTool_RemoveShape(self, *args)", "def remove_curve(self, name):\n self._curve_reg.__delitem__(name)", "def remove_drawing_rect(self):\n self.drawing_rect = QPolygonF()\n if self.connecting_rect:\n self.connecting_rect.setVisible(False)\n self.connecting_rect = None\n self.first_draw = True", "def delete_current_shape(self):\n print(\"deleting shape!\")\n self.shapes.remove(self.current_shape)\n self.current_shape = None\n self.changed()", "def remove_animation(attr):\n pm.cutKey(attr, clear=True)", "def undraw(self):\n \n if not self.canvas: return\n if not self.canvas.isClosed():\n #self.canvas.delete(self.id)\n _tkExec(self.canvas.delete, self.id)\n if self.canvas.autoflush:\n #_root.update()\n _tkCall(_root.update)\n pass\n self.canvas = None\n self.id = None", "def removeDisplayOverrides(self, primPath):\n with self.editInPrimStateLayer():\n self._stage.RemovePrim(primPath)", "def removeScene(self):\n del self.scene, self.imgPixmapItem", "def OnRemoveAutomation(self, event, automation):\n\n self.app.RemoveAutomation(automation)\n for child in self.GetChildren():\n child.Destroy()\n\n self.Draw()", "def __del__(self):\n self.clear()\n self._screen._removeTurtle(self)\n del self._turtle", "def erase_plot(self, line_position=0):\n self.axplot.lines.pop(line_position).remove\n self.fig.canvas.draw()\n return", "def remove_canvas(self,):\r\n # reset plot view beofre change\r\n self.canvas.toolbar.home()\r\n # remove widgets from canvas_vlayout\r\n self.canvas_vlayout.removeWidget(self.toolbar)\r\n self.toolbar.close()\r\n self.canvas_vlayout.removeWidget(self.canvas)\r\n self.canvas.close()", "def remove_button(self):\n self.scene.remove_child(self.toggle_button_el)", "def __remove_brick(self, g_object):\n if type(g_object) == GRect:\n self.__window.remove(g_object)\n self.__bricks_total -= 1\n self.__score += 1\n self.__set_record_board()", "def removeTooltip(self): \n if self.tooltipWindow:\n self.window.remove_child(self.tooltipWindow) \n self.tooltipWindow.destroy ()\n self.tooltipWindow = None", "def pointer_clear_focus(self) -> None:\n\n return lib.wlr_seat_pointer_clear_focus(self._ptr)", "def cleanup(self):\r\n\r\n # Remove strip from window.\r", "def del_curve(self, key):\n del self[key]\n del self._labels[key]", "def remove(self):\n\n\t\t\t\tself.parent.thing.remove_sheet(self.thing)\n\t\t\t\tdel self.parent[self.label]", "def remove_piece(self, piece):\n\n self._active_pieces[piece.get_color()].remove(piece)", "def pop_focus(self):\n self._focus.pop()", "def delX(self):\n del self.components[0]", "def delX(self):\n del self.components[0]", "def cog_unload(self):\n self._get_sketch_prompt.cancel()", "def delete_win(self, *args):\n if cmds.window(self.win_name, ex=1):\n cmds.deleteUI(self.win_name)", "def remove_drawing_poly(self):\n\n self.drawing_poly = QPolygonF()\n self.drawing_points_coords = []\n\n for p in self.drawing_points:\n p.setVisible(False)\n\n for line in self.connecting_line_list:\n line.setVisible(False)\n if self.connecting_line:\n self.connecting_line.setVisible(False)\n self.connecting_line = None\n self.first_draw = True\n if self.set_tooltip:\n self.set_tooltip(\"\")", "def erase(self):\r\n self.in_arrow = None\r\n self.out_arrow = None", "def subtract(self):\n self.parent.copyCurrentWinState(self.pltw)\n self.pltw.blklst[self.blkno][self.ypos] = self.data[1] - self.data[2]\n self.pltw.updatePlot()\n self.pltw.dirty = True\n self.pltw.activecurv = self.cpos\n self.parent.updateUI()\n self.hide()", "def remove_piece(self) -> None:\r\n if self.has_piece():\r\n self.piece.square = None\r\n self.piece = None", "def remove_from_hand(self):\n pass", "def _clear_drawing(self) -> None:\n self.vertices.clear()\n self.edges.clear()\n self.subplot.clear()\n self.selected_element = None\n self.pressed_elements.clear()", "def clear_press(self):\n\n for win in self.window.additional_windows:\n win.del_win()\n\n pos = self.window.physics_canvas.physics_objects\n self.window.physics_canvas.physics_objects = []\n\n for obj in pos:\n self.window.physics_canvas.canvas.delete(obj.canvas_id)\n\n for force in self.window.physics_canvas.interacting_forces:\n force.remove()\n\n for particle in self.window.physics_canvas.particles:\n self.window.physics_canvas.canvas.delete(particle.canvas_id)", "def delete(self, *args):\n if self.cur == Win.left:\n self.commands.delpl([])\n else:\n cur_song = self.rightwin.highlighted()\n\n self.rightwin.delete(cur_song)\n\n if not self.rightwin.data:\n self.switch_view_left()", "def removeFromParentAndDelete(self):\n return _libsbml.KineticLaw_removeFromParentAndDelete(self)", "def remove_object_from_canvas(self, tk_object):\n self.canvas.delete(tk_object)", "def delete_selection(self):\n if self.selected_point_index is not None:\n del self.current_shape[self.selected_point_index]\n self.selected_point_index = None\n self.changed()", "def _onRemove(self, event):\n index = self.colorlist.GetSelection()\n del self.graphColors[index]\n self._tupleListToStrings()\n if len(self.graphColors) > 0:\n self.colorlist.SetSelection(0)\n self._updateButtons(None)", "def clear(self):\n self._turtle.clear()", "def clear(self):\n self._turtle.clear()", "def pendown(self):\n if self._drawing:\n return\n self.pen(pendown=True)", "def remove(self):\n if self._parent:\n self._parent.removeChild(self)\n else:\n self.clear()", "def clear(self, event):\r\n self.selectedRegion = None\r\n self.paint()", "def delete_ball(self):\r\n self.movement = \"\"\r\n self.canvas.delete(self.ball)", "def clearScreen():\n dislin.erase()", "def SetConnectionPen(self, pen):\r\n\r\n self._dottedPen = pen\r\n self._dirty = True", "def del_symbol(self):\n if not self.tbl_symbols.selectedRanges():\n return\n\n confirm_message = \"Delete the selected symbol?\"\n reply = QtWidgets.QMessageBox().question(self, 'Message',\n confirm_message,\n QtWidgets.QMessageBox.Yes,\n QtWidgets.QMessageBox.No)\n\n if reply == QtWidgets.QMessageBox.Yes:\n row = self.tbl_symbols.currentRow()\n state_item = self.tbl_symbols.item(row, 0)\n state = state_item.text()\n self.symbols.pop(state, None)\n self.tbl_symbols.removeRow(row)\n self.clear_form()", "def __del__(self):\n\n # Delete sprite (if it has been defined)\n try:\n self.canvas.delete(self.sprite)\n except AttributeError:\n pass\n except tk.TclError:\n pass", "def rmv_chnl(self, chnl):\n\n chnl = self.chnls.pop(self.chnls.index(chnl))\n\n self.channels_layout.removeWidget(chnl)\n\n chnl.setParent(None)", "def unbind(self, *args, **kwargs):\n self._canvas.unbind(*args, **kwargs)", "def SetBorderPen(self, pen):\r\n\r\n self._borderPen = pen\r\n self.RefreshSelected()", "def __editDelete(self):\n if QApplication.focusWidget() == e5App().getObject(\"Shell\"):\n e5App().getObject(\"Shell\").clear()\n else:\n self.activeWindow().clear()", "def pen(self, width=None, rgb=None, alpha=None):\n self.call('pen', width, rgb, alpha)", "def removeReferenceGlyph(self, *args):\n return _libsbml.GeneralGlyph_removeReferenceGlyph(self, *args)", "def stopLineDrawing(self):\n taskMgr.remove(\"drawLineTask\")\n if self.line is not None:\n self.line.reset()\n self.line = None", "def clear_visualization(self) -> None:\n if self._drawing_handle is not None:\n sim.simAddDrawingObjectItem(self._drawing_handle, None)", "def setPen(self, *args, **kwargs):\n if kwargs == {} and (args == () or args == ('default',)):\n self.opts['pen'] = fn.mkPen(getConfigOption('foreground'))\n else:\n self.opts['pen'] = fn.mkPen(*args, **kwargs)\n\n self.picture = None\n self.update()", "def on_unhovered(self):\n if not self.is_selected:\n self.colour = self.normal_colour\n self.is_hovered = False\n self.redraw()", "def _removeClicked(self):\n isosurface = self.isosurface()\n if isosurface is not None:\n volume = isosurface.parent()\n if volume is not None:\n volume.removeIsosurface(isosurface)", "def remove_stroke(settings):\r\n if settings.fillstyle == 'border':\r\n settings.fillstyle = 'none'\r\n elif settings.fillstyle == 'fill+border':\r\n settings.fg = settings.bg\r\n settings.fillstyle = 'fill'", "def delete(self):\n\t\tself.canvas.delete('node_'+self.identifier)\n\t\tself.canvas.tag_unbind('node_'+self.identifier,\"<Any>\")", "def mouseDubbleClicked(self,mouseEvent):\n\t\tself.canvas.nodeDelete(self)", "def delete(self):\n subprocess.run([\"axicli\", \"--mode\", \"manual\", \"-M\", \"enable_xy\"])\n subprocess.run([\"axicli\", \"--mode\", \"manual\", \"-M\", \"raise_pen\"])\n subprocess.run([\"axicli\", \"--mode\", \"manual\", \"-M\", \"disable_xy\"])\n\n return self.get()", "def remove(self):\n self.hide()\n self.deleteLater()", "def remove_song(self):\n self.stop()\n self.listbox.delete(\"anchor\")\n pygame.mixer.music.stop()", "def clear(self):\n self._plt.clear()\n self._layer_items = {}", "def clear_main(self):\n\n if self.terminate:\n return\n\n self.windows['MAIN'].erase()\n self.windows['MAIN'].border(' ', ' ',\n curses.ACS_HLINE, curses.ACS_HLINE,\n curses.ACS_HLINE, curses.ACS_HLINE,\n curses.ACS_HLINE, curses.ACS_HLINE)", "def removeFromParentAndDelete(self):\n return _libsbml.StoichiometryMath_removeFromParentAndDelete(self)", "def release():\n gui.mouseUp()", "def unset_underline_style(self):\n self.font.set_underline(False)", "def pointer_notify_clear_focus(self) -> None:\n return lib.wlr_seat_pointer_notify_clear_focus(self._ptr)", "def erase(self, x, y):\n self.console.draw_char(x, y, ' ', bg=None)", "def discard(self) -> None:\n\n self.plot.close()", "def OnMouseOut( self, event ):\n self.whichChoice = 0\n event.context.triggerRedraw(1)", "def remove(self):\n self.node.destroy()", "def remove_selection(self, coord):\n button = self.grid[coord]\n button['bg'] = default_color\n button['activebackground'] = '#38dcf5'", "def detachFromPlotItem(self):\n raise NotImplementedError() # TODO", "def remove(self, widget):\n self.widgets.remove(widget)\n widget.destroy()", "def down(self):\r\n self.brush_on = True", "def DeleteWindow(self):\r\n\r\n if self._wnd:\r\n self._wnd.Destroy()\r\n self._wnd = None", "def _clear(self, event):\n if self.ignore(event) or self._changed_canvas():\n return\n self._background = self.canvas.copy_from_bbox(self.ax.bbox)\n self.ax.draw_artist(self._buttons)\n if hasattr(self, \"_circles\"):\n for circle in self._circles:\n self.ax.draw_artist(circle)", "def keyboard_clear_focus(self) -> None:\n lib.wlr_seat_keyboard_clear_focus(self._ptr)", "def remove(self, *args):\n return _libsbml.ListOfCompartmentGlyphs_remove(self, *args)", "def RemoveCaption():\r\n \r\n if pane.HasNotebook(): \r\n notebook = self._notebooks[pane.notebook_id] \r\n self.GetPane(notebook).CaptionVisible(False).PaneBorder(False) \r\n self.Update()", "def remove_curve(self, pv_name):\n curve = self.chart.findCurve(pv_name)\n if curve:\n self.chart.removeYChannel(curve)\n del self.channel_map[pv_name]\n self.chart.removeLegendItem(pv_name)\n\n widgets = self.findChildren((QCheckBox, QLabel, QPushButton, QGroupBox), pv_name)\n for w in widgets:\n w.deleteLater()\n\n if len(self.chart.getCurves()) < 1:\n self.enable_chart_control_buttons(False)\n self.show_legend_chk.setChecked(False)", "def clear_strip(self):\r\n wlogger.log_info(\"Clear Strip\")\r\n for led in range(self.num_led):\r\n self.set_pixel(led, 0, 0, 0)\r\n self.show()", "def remove_from_drawn(section: str, index: int):\r\n del drawn[section][index]", "def remove(self):\n\t\tcall_sdk_function('PrlBootDev_Remove', self.handle)", "def up(self):\r\n self.brush_on = False", "def __delitem__(self, key):\n self.deleteCurve(key)", "def erase(self):\n output = Output(self.stdout)\n\n output.cursor_backward(self._cursor_pos.x)\n output.cursor_up(self._cursor_pos.y)\n output.erase_down()\n output.reset_attributes()\n output.flush()\n\n self.reset()", "def remove_layer(self, layer_pos):\n self.stack.pop(layer_pos)\n return", "def delete(self):\n # exit contains our clean up code\n self.exit()\n GenericAnimatedProp.GenericAnimatedProp.delete(self)", "def kill(self):\r\n plt.close(self.fig)", "def erase(self):\n self.view.erase_status('00_git_gutter')" ]
[ "0.6923281", "0.6495933", "0.62122864", "0.616254", "0.60080194", "0.590251", "0.56725013", "0.56136733", "0.5546344", "0.55072165", "0.5487803", "0.54865557", "0.5449742", "0.5432165", "0.5357028", "0.53393936", "0.53330404", "0.52942204", "0.52776873", "0.5274253", "0.5271509", "0.5266129", "0.5238867", "0.5234722", "0.5217645", "0.52117854", "0.5207225", "0.5206207", "0.5206207", "0.52000946", "0.5198204", "0.51939744", "0.5191032", "0.51834226", "0.51765794", "0.5169632", "0.51536936", "0.5139831", "0.51396406", "0.5134221", "0.5132155", "0.51205784", "0.5118157", "0.5114785", "0.5114785", "0.510344", "0.5098179", "0.5091294", "0.5072982", "0.5069713", "0.5063907", "0.50627595", "0.5061206", "0.5056436", "0.5055551", "0.5053955", "0.5050251", "0.50487983", "0.5047476", "0.50405467", "0.5024402", "0.502277", "0.5020006", "0.5018299", "0.5015097", "0.50012934", "0.49913213", "0.49887815", "0.49869084", "0.49861407", "0.4985581", "0.49730137", "0.49717715", "0.49653536", "0.49614534", "0.49525627", "0.49522686", "0.49481753", "0.49443698", "0.49436903", "0.49401", "0.49375156", "0.49349082", "0.49341068", "0.4929916", "0.4928556", "0.49274778", "0.49217728", "0.49204823", "0.49188244", "0.49176416", "0.49123833", "0.4909241", "0.49062228", "0.49030846", "0.48962802", "0.48878455", "0.4882706", "0.4878022", "0.48761693" ]
0.74714136
0
Erase the contents of this Window All Turtles and Pens are eliminated from the Window. Any attempt to use a previously created Turtle or Pen will fail.
def clear(self): self._frame.clear() self._turtles = [] self._gpens = []
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def clear(self):\n self._turtle.clear()", "def clear(self):\n self._turtle.clear()", "def __del__(self):\n self._screen._removePen(self)\n del self._turtle", "def __del__(self):\n self.clear()\n self._screen._removeTurtle(self)\n del self._turtle", "def _destroy(self):\n root = self._root\n turtle.Turtle._pen = None\n turtle.Turtle._screen = None\n self._root = None\n self._canvas = None\n turtle.TurtleScreen._RUNNING = True\n root.destroy()", "def reset(self):\n TNavigator.reset(self)\n TPen._reset(self)\n self._clear()\n self._drawturtle()\n self._update()", "def _clear_drawing(self) -> None:\n self.vertices.clear()\n self.edges.clear()\n self.subplot.clear()\n self.selected_element = None\n self.pressed_elements.clear()", "def clear_press(self):\n\n for win in self.window.additional_windows:\n win.del_win()\n\n pos = self.window.physics_canvas.physics_objects\n self.window.physics_canvas.physics_objects = []\n\n for obj in pos:\n self.window.physics_canvas.canvas.delete(obj.canvas_id)\n\n for force in self.window.physics_canvas.interacting_forces:\n force.remove()\n\n for particle in self.window.physics_canvas.particles:\n self.window.physics_canvas.canvas.delete(particle.canvas_id)", "def bye(self):\n self._frame._destroy()\n self._turtles = []\n self._gpens = []\n del self._frame", "def clear(self):\n self._delayvalue = _CFG[\"delay\"]\n self._colormode = _CFG[\"colormode\"]\n self._delete(\"all\")\n self._bgpic = self._createimage(\"\")\n self._bgpicname = \"nopic\"\n self._tracing = 1\n self._updatecounter = 0\n self._turtles = []\n self.bgcolor(\"white\")\n for btn in 1, 2, 3:\n self.onclick(None, btn)\n self.onkeypress(None)\n for key in self._keys[:]:\n self.onkey(None, key)\n self.onkeypress(None, key)\n Myturtle._pen = None", "def delwin(self):\n\t\tfor c in self.components:\n\t\t\tc.delwin()\n\t\tself.win = None", "def _clear_drawing(self) -> None:\n self.vertices.clear()\n self.edges.clear()\n self.subplot.clear()\n self.subplot2.clear()", "def __del__(self):\n try:\n self._frame._destroy()\n except:\n pass\n self._turtles = []\n self._pencils = []\n del self._frame", "def cleanup(self):\r\n\r\n # Remove strip from window.\r", "def reset(self):\n if hasattr(self, \"W\"):\n del self.W\n if hasattr(self, \"T\"):\n del self.T\n if hasattr(self, \"P\"):\n del self.P", "def clear(self):\r\n if self.groundPath:\r\n self.groundPath.clearProjectTexture(self.stage)\r\n self.groundPath = None\r\n\r\n if self.lightPath:\r\n self.lightPath.detachNode()\r\n self.lightPath = None\r\n\r\n if self.cameraPath:\r\n self.cameraPath.detachNode()\r\n self.cameraPath = None\r\n self.camera = None\r\n self.lens = None\r\n\r\n if self.buffer:\r\n base.graphicsEngine.removeWindow(self.buffer)\r\n self.tex = None\r\n self.buffer = None", "def clear(self):\n black = neo.Color(0,0,0)\n self.set_all(black)\n self.draw()", "def reset(self, window):\n self.__close_preview(window)\n self.__clear_context()", "def _clear_window(self):\n self.buf[:] = []", "def clear_scene(self, event):\n self.shapes = []\n self.redraw()", "def clear(self) -> None:\n self.screen.clear()", "def reset(self):\n for Myturtle in self._turtles:\n Myturtle._setmode(self._mode)\n Myturtle.reset()", "def clear(self) -> None:\n\n self.screen.fill(self.bg)", "def destroy(self):\n for window in self.windows:\n try:\n destroy_window(window)\n except:\n pass", "def clear(self):\n self.animation.stop()\n self.draw(0, 0, 0, 0, 0)", "def reset(self):\n self._turtle.clear()\n self._turtle.setposition((0,0)) \n self._turtle.shape('turtle')\n self.color = 'red'\n self.heading = 180\n self.speed = 0", "def erase(self):\n\tself.state={}\n\tself.display(update_board=0)", "def clear_screen(self):\r\n lst_grid = self.root.grid_slaves()\r\n for widget in lst_grid:\r\n widget.destroy()\r\n lst_pack = self.root.pack_slaves()\r\n for widget in lst_pack:\r\n widget.destroy()", "def undraw(self):\n \n if not self.canvas: return\n if not self.canvas.isClosed():\n #self.canvas.delete(self.id)\n _tkExec(self.canvas.delete, self.id)\n if self.canvas.autoflush:\n #_root.update()\n _tkCall(_root.update)\n pass\n self.canvas = None\n self.id = None", "def clearScreen():\n dislin.erase()", "def reset(self):\n self._turtle.clear()\n self._turtle.setposition((0,0)) \n try:\n self._turtle.shape('pen.gif')\n except:\n self._turtle.shape('classic')\n self._turtle.color('red')\n self.speed = 0\n \n #pair = self._turtle.color()\n self._pencolor = self._turtle.color()[0]\n self._fillcolor = self._turtle.color()[0]", "def destroy_all(self):\n\n for k in self.widgets:\n self.widgets[k].destroy()\n self.widgets = {}\n self.window.destroy()\n self.window = tk.Frame(self.root)\n self.window.pack(side=\"top\", fill=\"both\", expand=True)", "def _clear(self):\n self._fillitem = self._fillpath = None\n for item in self.items:\n self.screen._delete(item)\n self.currentLineItem = self.screen._createline()\n self.currentLine = []\n if self._drawing:\n self.currentLine.append(self._position)\n self.items = [self.currentLineItem]\n self.clearstamps()", "def clean_all(self):\n self.scene.clear()\n self.image.fill(Qt.color0)", "def discard(self) -> None:\n\n self.plot.close()", "def clear(self):\n self._x_prev = None\n self._y_prev = None", "def clearwin(event=None):\r\n # for child in mframe.winfo_children():\r\n # child.destroy()\r\n global mframe\r\n mframe.destroy()\r\n mframe = tkinter.Frame(main, width=800, height=600, background='pink')\r\n mframe.pack(fill=\"both\", expand=True, padx=20, pady=20)", "def erase(self):\r\n self.in_arrow = None\r\n self.out_arrow = None", "def destroy(self):\r\n self._tidy()\r\n self.stop()\r\n try:\r\n self.opengl.destroy(self)\r\n except:\r\n pass\r\n if self.external_mouse:\r\n try:\r\n self.external_mouse.stop()\r\n except:\r\n pass_\r\n try:\r\n self.mouse.stop()\r\n except:\r\n pass\r\n try:\r\n self.tkwin.destroy()\r\n except:\r\n pass\r\n Display.INSTANCE = None", "def unload(self):\n if self.material_background:\n self.parent.removeItem(self.material_background)\n self.material_background = None\n if self.mod_background:\n self.parent.removeItem(self.mod_background)\n self.mod_background = None\n if self.material_foreground:\n self.parent.removeItem(self.material_foreground)\n self.material_foreground = None\n if self.mod_foreground:\n self.parent.removeItem(self.mod_foreground)\n self.mod_foreground = None\n if self.liquid:\n self.parent.removeItem(self.liquid)\n self.liquid = None", "def clear_canvas():\n self.parent_class.canvas.delete(\"all\")", "def clear():\n\tglobal _s\n\t_s.screen.fill(_s.back)\n\t_s.tab(0,0)\n\t_flip()", "def flush(self):\n if self.fill:\n self._turtle.fill(False)\n self._turtle.fill(True)", "def clear_main(self):\n\n if self.terminate:\n return\n\n self.windows['MAIN'].erase()\n self.windows['MAIN'].border(' ', ' ',\n curses.ACS_HLINE, curses.ACS_HLINE,\n curses.ACS_HLINE, curses.ACS_HLINE,\n curses.ACS_HLINE, curses.ACS_HLINE)", "def destroy(self):\n self.window.destroy_output_panel(self.name)", "def clear_visualization(self) -> None:\n if self._drawing_handle is not None:\n sim.simAddDrawingObjectItem(self._drawing_handle, None)", "def destroy(self):\r\n self.visible = False", "def clear(self):\n self._plt.clear()\n self._layer_items = {}", "def clearPlayground(self):\n\n for cell in self.cells:\n cell.delete()\n self.cells = []\n self.generation = 0", "def clear(self):\n pygame.draw.rect(self.screen,BLACK,(0,0,WINDOWWIDTH,\n WINDOWHEIGHT))\n pygame.display.update()", "def erase(self):\n pass", "def destroy_window(self) -> None:\n self.master.destroy()\n self.master.master.create_right_left_containers()", "def clear(self):\n\n self.wads = []\n\n self.sprites = {}\n self.sprite_image_cache = {}\n self.palette = None\n\n self.sound_cache = {}", "def erase(self):\n output = Output(self.stdout)\n\n output.cursor_backward(self._cursor_pos.x)\n output.cursor_up(self._cursor_pos.y)\n output.erase_down()\n output.reset_attributes()\n output.flush()\n\n self.reset()", "def clear(self):\n self.entry_guessN.delete(0,'end')\n self.text_instructions.delete(1.0,'end')\n self.text_results.delete(1.0,'end')\n \n # activate the play button again!!", "def destroy(self):\n bullet_tools.tear_down_scene()", "def clear_scene(self):\n # Save grid visibility\n restore = self.__grid_visibility\n\n # Set invis\n if restore:\n self.__graphics_grid.set_visibility(False)\n\n # Set all objects invis\n for obj in self.scene.objects:\n obj.visible = False\n\n # Restore grid (if needed)\n if restore:\n self.__graphics_grid.set_visibility(True)", "def clear(self):\r\n ElementSet.clear(self)\r\n self.update()", "def clearCanvas():\n global c, coordinates\n c.delete(\"all\")\n drawMusicLines()\n coordinates.clear()", "def DeleteWindow(self):\r\n\r\n if self._wnd:\r\n self._wnd.Destroy()\r\n self._wnd = None", "def destroy (self,event=None):\n \n # This is enough to disable fillbox.\n self.top.withdraw()", "def clear(self):\n self._clear_without_update()\n self.update()", "def deinit(self):\n self.reset()", "def clear(self):\n self._fig = go.Figure()", "def clearViewer(self):\n\n self.removeScene()\n self.createScene()", "def reset(self):\n self.destroy()\n self.open()", "def destroy(self):\n tk.Frame.destroy(self)", "def discard(self, obj):\n self._drawables.discard(obj)\n self._updateables.discard(obj)\n self._collidables.discard(obj)\n self._projectiles.discard(obj)\n self._textboxes.discard(obj)\n self.__len__.cache_clear()", "def clear(self):\n self.root = None", "def clear(self):\r\n self.delete(0, tkinter.END)", "def clear(self):\n try:\n # This causes stupid errors with tkagg, so just wrap it in\n # try-except for now\n self.fig.clear()\n except: pass\n self.annotators.clear()\n self.dims.clear()\n self.ph.remove(self.ID)", "def strip(self):\n self.graph = None\n self.session = None\n\n return self", "def clearScreen(self):\n background = pygame.Surface(self.getSize())\n background = background.convert()\n background.fill((0, 0, 0))\n self.screen.blit(background, (0, 0))", "def clear(screen):\n screen.clear()\n screen.refresh()", "def clear(self):\n for key in self.__columns:\n self.__widths[key] = 0\n self.__data = []\n self.__selectedRow = -1\n self.__formatString = \"\"\n self._window.clear()\n self.drawBorder()", "def destroy(self):\n\n sceneOpts = self.sceneOpts\n contentPanel = self.contentPanel\n\n sceneOpts .removeListener('showXCanvas', self.name)\n sceneOpts .removeListener('showYCanvas', self.name)\n sceneOpts .removeListener('showZCanvas', self.name)\n sceneOpts .removeListener('labelSize', self.name)\n sceneOpts .removeListener('fgColour', self.name)\n sceneOpts .removeListener('showLabels', self.name)\n self.displayCtx .removeListener('location', self.name)\n self.displayCtx .removeListener('bounds', self.name)\n self.displayCtx .removeListener('selectedOverlay', self.name)\n self.displayCtx .removeListener('displaySpace', self.name)\n self.displayCtx .removeListener('radioOrientation', self.name)\n self.overlayList.removeListener('overlays', self.name)\n\n self.__labelMgr.destroy()\n self.__xcanvas.destroy()\n self.__ycanvas.destroy()\n self.__zcanvas.destroy()\n self.__removeEditMenu()\n\n contentPanel.Unbind(wx.EVT_SIZE)\n\n self.__xcanvas = None\n self.__ycanvas = None\n self.__zcanvas = None\n self.__focusedCanvas = None\n self.__labelMgr = None\n\n canvaspanel.CanvasPanel.destroy(self)", "def clean(self):\n for i in self.winfo_children():\n i.destroy()", "def clearScreen(self):\n self.removeFrame(self.frame1)\n self.removeFrame(self.frame2)\n self.separator.destroy()\n #Here, the app will lose the row and column configuration and does not\n #apply new configuration. Don't know why?. So that, I destroy the\n #parent (in this case, a frame), create a new frame and set it again.\n self.parent.destroy()\n mainFrame = tk.Frame(self.store[\"root\"], bg=\"#FFF\")\n self.parent = mainFrame\n self.parent.grid(column=0, row=0, sticky=\"nsew\")", "def deinit(self):\n self._font.close()", "def destroy(self):\n self.root.stop()", "def undoChanges(self):\n Objects.undoChanges(self)\n self.draw()", "def clear_scene(self):\n # Set all robots variables as invisible\n for robot in self.__robots:\n robot.set_reference_visibility(False)\n robot.set_robot_visibility(False)\n\n self.scene.waitfor(\"draw_complete\")\n\n new_list = []\n for name in self.__ui_controls.get('menu_robots').choices:\n new_list.append(name)\n\n self.__selected_robot = 0\n self.__reload_caption(new_list)", "def DetachWindow(self, window):\n assert window in self._windows, \"Unknown window!\"\n idx = self._windows.index(window)\n del self._windows[idx]\n del self._sashes[idx]\n self._SizeWindows()", "def clear(self):\n self._clear()\n self._update()", "def clear(self):\n self.display(Image.new(self.mode, self.size))", "def clear_sclasses(self, w: Wrapper) -> None:\n w.setProperty(self.style_sclass_property, '')", "def reset_window(self):\n self.sorting = False\n self.sort_list = []\n self.window.delete('all')\n for i in range(100):\n random_height = randint(40,280)\n line_id = self.window.create_line(4*i+50, 20, 4*i+50, random_height)\n self.sort_list.append([random_height, line_id])\n self.window.update()", "def reset(self):\n\n game.reset()\n sm.get_screen('game_screen').reset()", "def clear(self):\n for i in range(len(self.canvas)):\n self.canvas[i] = 0", "def wipe(self):\n self.console.clear()", "def clear(self):\n self._grid = [[None]]", "def reset(self):\r\n self._p = self._p_init\r\n self._r = self._r_init\r\n self._v = self._v_init\r\n self._w = self._w_init\r\n self._a = self._a_init\r\n self._alpha = self._alpha_init", "def OnEraseBackground(self, event):\n pass # Do nothing, to avoid flashing on MSW.", "def reset(self):\n self.clear()", "def clear(self):\n self.raster_path_line.clear()\n self.labels_path.clear()\n self.shapefile_path.clear()\n self.costumelabels.clear()\n self.layer_name.clear()\n self.class_name.clear()\n self.idfield.clear()", "def reset(self):\n for lane in self.lanes.values():\n lane.puck_area.clear_widgets()\n lane.patrons = list()\n lane.disabled = False\n lane.beers = list()\n\n self.message_holder.remove_widget(self.you_lose_label)\n self.message_holder.remove_widget(self.you_win_label)", "def Clear(self) -> None:", "def destroy (self,event=None):\n \n self.top.withdraw() # Don't allow this window to be destroyed.", "def clear(self):\r\n\t\tself.grid.fill(False)", "def reset(self):\n Simulation.reset(self)" ]
[ "0.7524162", "0.7524162", "0.7407025", "0.73629093", "0.72758824", "0.7150235", "0.68685234", "0.67587095", "0.6713669", "0.6578326", "0.6544966", "0.6535955", "0.65174234", "0.6500613", "0.6446637", "0.6438641", "0.6435536", "0.64201194", "0.6414517", "0.63773596", "0.6367944", "0.63064617", "0.628534", "0.6247365", "0.6214424", "0.621003", "0.6184011", "0.61606157", "0.61280054", "0.6124192", "0.60793144", "0.6076353", "0.607229", "0.60708076", "0.6054863", "0.60292214", "0.6020159", "0.6013648", "0.6006681", "0.59906656", "0.5986926", "0.59855604", "0.5984967", "0.5981018", "0.59638673", "0.59631807", "0.5947882", "0.5935001", "0.5925756", "0.59141564", "0.5895339", "0.58920866", "0.5890968", "0.58883667", "0.58823633", "0.5862745", "0.5856176", "0.58334404", "0.58317286", "0.5811775", "0.58069575", "0.5776653", "0.5751107", "0.57459545", "0.5744136", "0.57433426", "0.57353556", "0.57303375", "0.5726578", "0.5723633", "0.57190216", "0.5718742", "0.57161975", "0.5714315", "0.57126045", "0.5710248", "0.5708133", "0.5707299", "0.5701729", "0.5698142", "0.5690658", "0.56871426", "0.5686265", "0.56807995", "0.56775945", "0.5663266", "0.56615525", "0.56596166", "0.5658234", "0.5652092", "0.56518656", "0.5649284", "0.5643404", "0.5642276", "0.56369936", "0.56369364", "0.5635156", "0.5634149", "0.56286836", "0.5622676" ]
0.7078368
6
Closes the graphics Window, deleting all assets
def bye(self): self._frame._destroy() self._turtles = [] self._gpens = [] del self._frame
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def finalizeExit(self) -> None:\n base.graphicsEngine.removeAllWindows()\n if self.win is not None:\n print(\"Exiting KarelCraft app, bye!\")\n self.closeWindow(self.win)\n self.win = None\n self.destroy()\n sys.exit()", "def close(self):\n self.window.destroy()\n self.buttons_window.destroy()", "def _close_window(self):\n render_window = self._iren.GetRenderWindow()\n render_window.Finalize()\n self._iren.TerminateApp()\n\n del render_window, self._iren, self._ren, self._renWin", "def onCloseWindow(self, event):\r\n\r\n self.Destroy()", "def close_app(self):\n BASE_DIR = os.path.dirname(os.path.abspath(__file__))\n\n db_path = os.path.join(BASE_DIR, \"..\", \"DATA\", \"AIRCRAFT_COLLISION_FORECAST_SYSTEM.db\")\n clean_table(db_path, 'AIRPLANES')\n\n BASE_DIR = os.path.dirname(os.path.abspath(__file__))\n\n img_path = os.path.join(BASE_DIR, \"..\", \"GUI\", \"IMAGE\")\n\n # img_path = 'GUI\\\\IMAGE\\\\'\n img_file_names = [file_name for file_name in listdir(img_path) if isfile(join(img_path, file_name))]\n for file_name in img_file_names:\n if file_name not in ('map_marker.png', 'airplane_marker.png', 'collision_marker.png'):\n os.remove(os.path.join(img_path, file_name))\n print('Closing app')\n self.app.root_window.close()", "def delwin(self):\n\t\tfor c in self.components:\n\t\t\tc.delwin()\n\t\tself.win = None", "def close(self):\n \n self.renderer.RemoveActor(self._crosshair.actor)\n self.renderer.RemoveActor(self._scalar_bar_actor)\n self.renderer.RemoveActor(self._orientation_annotation)\n self.renderer.RemoveActor(self._corner_annotation)\n \n for layer in self._layers :\n self.renderer.RemoveActor(layer.actor)\n \n for gui_annotation in self._gui_annotations.values() :\n self.renderer.RemoveActor(gui_annotation.shape_actor)\n self.renderer.RemoveActor(gui_annotation.text_actor)", "def __onclosing(self):\n self.window.destroy()", "def close_window(self):\n # Window - END\n self.root.destroy()", "def OnCloseWindow(self, event):\r\n self.data.close()\r\n sizes[self.data.__class__.__name__] = self.GetSizeTuple()\r\n self.Destroy()", "def close(self):\n self._screen = None\n pygame.display.quit()", "def close(self):\n self.destroy()", "def close(self):\n\n cv2.destroyWindow(winname=self.title)", "def close(self):\n\n\t\tself._window.close()", "def destroy(self):\n for window in self.windows:\n try:\n destroy_window(window)\n except:\n pass", "def close(self):\n self.image.close()", "def exitProgram():\n canvas.destroy()\n tool.destroy()\n code_editor.destroy()\n sys.exit()", "def close(self):\n self.props_action.setVisible(False)\n self.cache.last_format = None\n self.cache.last_serial = None\n self.setWindowTitle(\"TCam Capture\")\n self.pixel_coords_label.setText(\"\")\n self.pixel_label.setText(\"\")\n self.current_fps_label.setText(\"\")\n\n if self.props:\n self.props.setParent(None)\n self.props = None\n self.removeDockWidget(self.props)\n\n self.set_device_menus_enabled(False)\n self.setCentralWidget(None)\n self.serial = None\n\n if self.props_widget:\n self.props_widget.stop()\n self.props_widget = None\n\n if self.view is not None:\n self.stop()\n self.view.setParent(None)\n self.view = None\n # update menu to remove mark on open camera\n self.update_device_list(self.device_list)", "def __window_close(self):\n pass", "def close():\n wfs.close()\n ax.close()", "def close_visualization(self) -> None:\n for id in self.visual_ids:\n pybullet.removeBody(id)\n self.visual_ids = []", "def close(self):\n closeI1Display()", "def cleanup(self):\n pygame.quit()", "def image_window_destroy(self, widget, data=None):\n self._quit()", "def close(self):\n self.microgridRender.close();", "def destroy_all(self):\n\n for k in self.widgets:\n self.widgets[k].destroy()\n self.widgets = {}\n self.window.destroy()\n self.window = tk.Frame(self.root)\n self.window.pack(side=\"top\", fill=\"both\", expand=True)", "def destroy(self):\r\n self._tidy()\r\n self.stop()\r\n try:\r\n self.opengl.destroy(self)\r\n except:\r\n pass\r\n if self.external_mouse:\r\n try:\r\n self.external_mouse.stop()\r\n except:\r\n pass_\r\n try:\r\n self.mouse.stop()\r\n except:\r\n pass\r\n try:\r\n self.tkwin.destroy()\r\n except:\r\n pass\r\n Display.INSTANCE = None", "def close(event):\n event.widget.destroy()", "def quit(self):\n\n self.main_window.destroy()", "def close(self):\n\n if self.fig:\n plt.close('all')\n plt.ioff()", "def close(self):\n self.ag.close()\n self.mag.close()", "def destroy(self):\n self.window.destroy_output_panel(self.name)", "def destroy_window(self) -> None:\n self.master.destroy()\n self.master.master.create_right_left_containers()", "def destroy_on_close(self):\n self.deleteLater()", "def exit(self):\n \t\troot.destroy()\n \t\tpass", "def done(self):\n self.root.destroy()", "def on_close(self, event):\n # Save pos and size\n x, y = self.GetPosition()\n width, height = self.GetSize()\n self.__config.set('window.x', x)\n self.__config.set('window.y', y)\n self.__config.set('window.width', width)\n self.__config.set('window.height', height)\n\n # Style\n style = self.GetWindowStyle()\n self.__config.set('window.style', style)\n\n self.__config.save()\n\n # Stop monitoring\n self.__cor.stop_monitor()\n\n # Kill graph as it seems to be stopping script from ending\n self.__graph = None\n\n # End\n event.Skip()", "def terminate(self):\n plt.close('all')", "def exitGame(self):\n self.myBoard.clearFrame()\n for tileRow in self.myBoard.tiles:\n for tile in tileRow:\n tile.destroy()\n del Tile.images[:]\n del self.myBoard.images[:]\n self.myBoard.destroy()\n self.destroy()\n exit(0)", "def exit(self):\n if self.window:\n self.window.close()", "def close_window() -> None:\n global _window\n\n if _window is None:\n return\n\n _window.close()\n _window = None\n\n # Have to do a garbage collection or Python will crash\n # if we do a lot of window open and closes. Like for\n # unit tests.\n gc.collect()", "def OnClose(self, event):\n\n if not self.plot_deleted:\n pub.sendMessage( 'Close.%s' %self.GetLabel(), event=self )\n\n print(\"Window: '%s', closed by event: '%s'\" %( self.GetLabel(), event.__class__.__name__ ))\n self.Destroy()", "def closeImages(self):\n if False: #__name__ == '__main__': #butchered because ORS calling name needs waitkey(3)\n cv2.waitKey(0)\n cv2.destroyAllWindows()\n else:\n cv2.waitKey(3)", "def kill(self):\r\n plt.close(self.fig)", "def _close_figure(self):\n if self.disp_images:\n plt.show()\n else:\n plt.close()", "def close( self ):\n if not self.sji is None:\n self.sji._close()\n if not self.raster is None:\n self.raster._close()", "def close(self):\n plotid = self._plotid\n f = self.set(plotid)\n plt.close(f)\n self._plotid = None\n self._plots.remove(plotid)\n self._color_indexes.pop(plotid, None)\n self._mappable.pop(plotid, None)\n self._polar.pop(plotid, None)\n self._xscales.pop(plotid, None)\n self._yscales.pop(plotid, None)\n self._errorbar_colors.pop(plotid, None)", "def release( self ):\n self.oDialogControl.dispose()", "def close(self):\n self.RemoveAllObservers()\n if hasattr(self, 'axes_widget'):\n self.hide_axes() # Necessary to avoid segfault\n self.axes_actor = None\n del self.axes_widget\n\n if self._empty_str is not None:\n self._empty_str.SetReferenceCount(0)\n self._empty_str = None", "def exit(self):\n self.root.grab_release()\n self.root.destroy()", "def delete_window(self):\r\n self.mw.eval('::ttk::CancelRepeat')\r\n SlTrace.lg(\"Closing windows\")\r\n ''' \r\n ActiveCheck.clear_active() # Disable activities\r\n if self.score_win is not None:\r\n self.score_win.destroy()\r\n self.score_win = None\r\n if self.mw is not None and self.mw.winfo_exists():\r\n self.mw.quit()\r\n self.mw.destroy()\r\n self.mw = None\r\n '''\r\n if self.on_exit is not None:\r\n self.on_exit()\r\n \r\n sys.exit() # Else quit\r", "def deinit(self):\n self._font.close()", "def close(self):\n self._close_viewer_window()\n self.env.close()", "def _close( self ):\n for raster in self._raster_data:\n if raster != []:\n raster.close()", "def destroy(self, *args):\n logger.debug(\"WarningSc.destroy called\")\n if self.manageGTK:\n if self.quit:\n sys.exit(0)\n else:\n self.gui.get_object(self.window).destroy()\n while gtk.events_pending():\n gtk.main_iteration()", "def destroy(self):\n\n sceneOpts = self.sceneOpts\n contentPanel = self.contentPanel\n\n sceneOpts .removeListener('showXCanvas', self.name)\n sceneOpts .removeListener('showYCanvas', self.name)\n sceneOpts .removeListener('showZCanvas', self.name)\n sceneOpts .removeListener('labelSize', self.name)\n sceneOpts .removeListener('fgColour', self.name)\n sceneOpts .removeListener('showLabels', self.name)\n self.displayCtx .removeListener('location', self.name)\n self.displayCtx .removeListener('bounds', self.name)\n self.displayCtx .removeListener('selectedOverlay', self.name)\n self.displayCtx .removeListener('displaySpace', self.name)\n self.displayCtx .removeListener('radioOrientation', self.name)\n self.overlayList.removeListener('overlays', self.name)\n\n self.__labelMgr.destroy()\n self.__xcanvas.destroy()\n self.__ycanvas.destroy()\n self.__zcanvas.destroy()\n self.__removeEditMenu()\n\n contentPanel.Unbind(wx.EVT_SIZE)\n\n self.__xcanvas = None\n self.__ycanvas = None\n self.__zcanvas = None\n self.__focusedCanvas = None\n self.__labelMgr = None\n\n canvaspanel.CanvasPanel.destroy(self)", "def onApplicationClose(self):\n self.movieDisplay.clearImageCache()\n self.quit()", "def cerrar_GUI(raiz):\r\n raiz.destroy()", "def unload(self):\n for action in self.actions:\n self.iface.removePluginMenu(\n self.tr(u'&Create xyzrgb from Mosaic/DSM'),\n action)\n self.iface.removeToolBarIcon(action)", "def close(self):\n if self.proc:\n self.proc.terminate()\n self.proc.wait()\n self.proc = None\n del os.environ['DISPLAY']\n if self.fbdir:\n os.rmdir(self.fbdir)\n self.fbdir = None", "def close(self):\n self.tl.withdraw()\n self.lumpy.quit()", "def close(self):\n self.Close()", "def closeEvent(self, event):\n self._renderer.plotter.close()\n self.close()", "def Close(self):", "def close_UI(self):", "def stay():\r\n count = 0\r\n for wind in windows:\r\n if type(wind) is tuple:\r\n wind[0].destroy()\r\n windows.remove(wind)\r\n count += 1\r\n if count == 0:\r\n option_window.destroy()\r\n windows.remove(option_window)", "def close(self):\n def destroy(comp):\n for child in comp.children:\n destroy(child)\n comp.destroy()\n \n destroy(get_base().node_manager.wrap(self))\n get_base().plugin_manager.on_scene_close()\n \n # Now remove the root node. If the root node was render, reset base\n # in order to remove and recreate the default node set.\n if self.rootNp is get_base().render:\n get_base().reset()\n\n self.rootNp.removeNode()", "def discard(self) -> None:\n\n self.plot.close()", "def _quit():\r\n\twin.quit()\r\n\twin.destroy()\r\n\tquit()", "def closeImage(j):\n displayMessage(j, 'j.CloseImage()')\n j.CloseImage()", "def close(self):\n self.exit()", "def destroy(self):\n tk.Frame.destroy(self)", "def OnClose(self):\n self.SaveData()\n self.destroy()", "def close_canvas():\r\n global _canvas\r\n if _canvas == None:\r\n raise RuntimeError(\"Canvas is not open yet.\")\r\n else:\r\n _canvas.close()\r\n _canvas = None", "def close(self):\n self.gym.close()", "def OnClose(self, event):\r\n pos.app.main.Exit()", "def exit_game(root):\n root.destroy()", "def close(self):\n for child in self.winfo_children():\n logger.debug(\"Destroying child: %s\", child)\n child.destroy()", "def end(self, event):\n plt.close()", "def clean(self):\n for i in self.winfo_children():\n i.destroy()", "def __del__(self):\n try:\n self._frame._destroy()\n except:\n pass\n self._turtles = []\n self._pencils = []\n del self._frame", "def force_close(self):\n\n\t\tself._window.force_close()", "def _destroy(self):\n root = self._root\n turtle.Turtle._pen = None\n turtle.Turtle._screen = None\n self._root = None\n self._canvas = None\n turtle.TurtleScreen._RUNNING = True\n root.destroy()", "def __del__(self):\n self.appdoc.Close()\n self.app.Stop()\n glab_instrument.Glab_Instrument.__del__(self)", "def onExitButtonClicked(self, widget):\n self.getGtkTopObject().close()", "def close_and_run(self, master, window):\r\n self.main_gui(master)\r\n window.destroy()", "def close_rings(self):\n # Closing the open rings.\n capi.geom_close_rings(self.ptr)", "def close_all_plots(self):\n return ShadowTools.plt.close(\"all\")", "def close(self): # from gym/core.py\n pass", "def destroy_view(self): \n\n self.canvas.destroy()\n self.scrollbar.destroy()\n self.header_frame.destroy()\n self.button_frame.destroy()\n self.twitter_canvas.destroy()\n self.twitter_scrollbar.destroy()", "def close_window() -> bool:\n global WINDOW\n global IMPL\n global ACTIVE_CALLBACK\n try:\n cast(PygletRenderer, IMPL).shutdown()\n cast(pyglet.window.Window, WINDOW).close()\n\n WINDOW = None\n IMPL = None\n ACTIVE_CALLBACK = update\n return True\n except Exception as e:\n unrealsdk.Log(e)\n return False", "def destructor(self):\n cv2.destroyAllWindows()", "def renderWindowClosed(self):\n i = 0\n while i < len(self.rendererWindows):\n rw = self.rendererWindows[i]\n\n if rw.closed:\n self.rendererWindows.pop(i)\n self.rendererWindowsSubWin.pop(i)\n\n else:\n i += 1\n\n for rw in self.rendererWindows:\n rw.outputDialog.imageTab.imageSequenceTab.refreshLinkedRenderers()", "def close(self):\n if(screen == self):\n screen = None", "def quit(self):\n self.window.quit()\n self.window.destroy()", "def cleanup(self, window):\n if self._components:\n for component in self._components:\n component.cleanup(window)", "def clear_screen(self):\r\n lst_grid = self.root.grid_slaves()\r\n for widget in lst_grid:\r\n widget.destroy()\r\n lst_pack = self.root.pack_slaves()\r\n for widget in lst_pack:\r\n widget.destroy()", "def close(self, *obj):\n self._save_size()\n self.clean_up()\n self.uistate.gwm.close_track(self.track)\n self.opened = False\n self.parent_window.present()", "def closeProgram():\n\t#Save logs\n\ttry:\n\t\tsaveLogs()\n\texcept Exception as e:\n\t\tprint(\"Error saving logs because..\",e)\n\t\t#Exit even if error saving logs\n\ttry:\n\t\tif masterPod.currentMasterPod:\n\t\t\tmasterPod.currentMasterPod.save()\n\texcept:\n\t\tprint(\"Error saving master pod\")\n\t#Destory the window\n\twindow.destroy()", "def exit(self):\n cv2.destroyAllWindows()\n print(\"Exiting..\")" ]
[ "0.75555855", "0.7477226", "0.73451036", "0.7295747", "0.7272597", "0.7269997", "0.7173892", "0.7137351", "0.7103768", "0.7031342", "0.6960648", "0.6939482", "0.69168514", "0.6913677", "0.69004905", "0.689314", "0.6878659", "0.68318313", "0.6830364", "0.68288535", "0.68266654", "0.6804971", "0.68015563", "0.67975163", "0.67788863", "0.67782307", "0.67593837", "0.6732499", "0.67310864", "0.67232865", "0.67225623", "0.66960835", "0.6683874", "0.66514283", "0.66405517", "0.66328293", "0.663245", "0.663022", "0.6627155", "0.66103613", "0.6606775", "0.65918386", "0.6590279", "0.6587581", "0.65796113", "0.65770024", "0.6567114", "0.65579945", "0.6552044", "0.6547881", "0.65466475", "0.6546635", "0.65438086", "0.6528768", "0.65226275", "0.651756", "0.65106016", "0.65080327", "0.64925736", "0.6477969", "0.64714825", "0.6469686", "0.64673185", "0.6462851", "0.6461634", "0.64567024", "0.64546555", "0.6454392", "0.64459854", "0.64369303", "0.64345276", "0.6434253", "0.64250964", "0.64219296", "0.6419976", "0.6416965", "0.6413042", "0.6410573", "0.64086616", "0.63896316", "0.6385491", "0.63837117", "0.6378435", "0.63772815", "0.637494", "0.6367011", "0.6359479", "0.6358876", "0.6352636", "0.63507104", "0.6350356", "0.6350117", "0.63493234", "0.63383925", "0.6332827", "0.6327498", "0.6325199", "0.63243425", "0.6317854", "0.62952286" ]
0.67374146
27
Plays an OS specific alert sound
def beep(self): self._frame._root.bell()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def playSound():\n\tif os.name == \"posix\":\n\t\tduration = 0.5 # seconds\n\t\tfreq = 80 # Hz\n\t\t#os.system('play -nq -t alsa synth {} sine {}'.format(duration, freq))\n\telif os.name == \"nt\":\n\t\tduration = 500 # milliseconds\n\t\tfreq = 80 # Hz\n\t\t#winsound.Beep(freq, duration)", "def play_startup_sound():\n path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'misc/startup.mp3')\n os.system(\"mpg321 --quiet {}\".format(path))", "def handle_sound_ext(sid):\n Popen([\"afplay\", ds.sound_map[int(sid)]])\n\n # red = 1, green = 2, yellow = 3, blue = 4\n # intensity from 0 to 3 lights activated\n serial.write(\"{}{}\\n\".format(sid, 3).encode('ascii'))", "def alert_signal(signum, stack):\n if get_state():\n play(DOORBELL_SND)", "def play_sound(self, sound) -> None:\n pass", "def play(sound):\n if SOUNDDIR != \"\":\n call([\"aplay\", SOUNDDIR + sound])", "def playSound(self,sound):\n sound.play()", "def input_audio_icon():\n if use_espeak_synthesis:\n os.system(\"espeak \\\"Type in\\\"\")", "def play_sound(self, sound):\n try:\n if self.se_volume != 0:\n self.sound_lib[sound].play()\n except:\n print \"Couldn't play the sound '\", sound, \"'!\"", "def sound(bool): #py:sound\n RUR._sound_(bool)", "def output_sound():\n try:\n subprocess.call(['ffplay', '-nodisp', '-autoexit', SOUND_FILE])\n except:\n pass", "def error_tone():\n\ttones.beep(880,250)\n\ttime.sleep(0.15)", "def beep(self, parameters):\n if sys.platform.startswith(\"linux\"):\n # os.system(\"beep {}\".format(parameters))\n app.beep()\n else:\n app.beep()", "def play_sound(self, subject=\"Find My iPhone Alert\"):\n data = json.dumps(\n {\n \"device\": self.content[\"id\"],\n \"subject\": subject,\n \"clientContext\": {\"fmly\": True},\n }\n )\n self.session.post(self.sound_url, params=self.params, data=data)", "def notify():\n os.system(\"notify-send 'Search is complete.'\")\n os.system(\"paplay /usr/share/sounds/freedesktop/stereo/complete.oga\")", "def notify(title, text, subtitle, say=None):\n \n os.system(\"\"\"\n osascript -e 'display notification \"{}\" with title \"{}\" subtitle \"{}\" sound name \"Ping\"'\n \"\"\".format(text, title, subtitle, text))\n \n if say:\n os.system(\"\"\"\n osascript -e 'say \"{}\"'\n \"\"\".format(say))", "def play_sound(self):\n # http://soundbible.com/2103-1-Person-Cheering.html\n my_path = os.path.dirname(__file__)\n sound_path = os.path.join(my_path, 'yay.mp3')\n sound = SoundLoader.load(sound_path)\n sound.play()", "def TestSound():\n SoundsPath = os.path.join(AudioFilesPath, MySet.Sound + \".mp3\")\n Parent.PlaySound(SoundsPath, MySet.Volume*0.01)", "def PlaySound(self,protoId):\r\n app=GetApplication() \r\n if not protoId:\r\n return\r\n else:\r\n resId= random.choice(self.prototypes[protoId].resources) \r\n sound=app.resourcemanager.Load( resId )\r\n app.PlaySound(sound)", "def play_alarm_ringtone(self):\n try:\n mixer.init()\n mixer.music.load(\"frontend/assets/alarm_clock_ringtone.mp3\")\n mixer.music.play()\n except Exception:\n pass", "def button_sound(self):\n sound = pygame.mixer.Sound('assests/sounds/Button_Sound.wav')\n sound.play()", "def test_alsa(self):\n report(_(\"Testing ALSA sound output\"))\n return self.audio_try_play(type='alsa')", "def playSound(self, soundName):\n if globals.serverMode == 0:\n self.game.app.playSound(soundName)", "def playSoundFile(filename):\n\n # Try to play wav file on windows.\n try:\n import winsound\n winsound.PlaySound(filename, winsound.SND_ASYNC)\n return\n except ImportError:\n pass\n\n # Should play wav files on mac or Linux\n import subprocess\n # afplay - macOS\n # aplay - Linux (part of ALSA)\n # play - Linux (part of sox)\n # mplayer - Linux (common media player)\n progs = [ \"/usr/bin/afplay\", \"/usr/bin/aplay\", \"/usr/bin/play\", \"/usr/bin/mplayer\" ]\n\n for prog in progs:\n if os.path.exists(prog):\n # Don't print stuff to stdout or stderr\n FNULL = open(os.devnull, 'w')\n subprocess.Popen([prog, filename], stdout=FNULL, stderr=FNULL)\n return", "def play_audio(filename):\n os.system(AUDIOPLAYER + ' ' + filename)", "def speak(audio):\n engine.say(audio)\n engine.runAndWait()", "def speak(audio):\n engine.say(audio)\n engine.runAndWait()", "def setSound(self):\r\n self._shipsound = Sound('pew1.wav')\r\n self._aliensound = Sound('pew2.wav')\r\n self._shipexplode = Sound('blast1.wav')\r\n self._alienexplode = Sound('pop1.wav')", "def alarm(n):\r\n for i in range(n):\r\n playsound(sound_file)\r\n time.sleep(2)", "def sons():\n if pygame.mixer and SONS:\n music = os.path.join(main_dir, 'src/sound', '')\n pygame.mixer.music.load(music)\n pygame.mixer.music.play()", "def play_sound():\r\n try:\r\n\r\n file_name = r\"sound.mp3\"\r\n playsound.playsound(file_name)\r\n except OSError:\r\n AudioSegment.converter=r\"/Users/russelllamb/Downloads/ffmpeg\"\r\n sound=AudioSegment.from_mp3(r\"sound.mp3\")\r\n play(sound)", "def audio_try_play(self, type):\n wavfile = os.path.join(buildconfig.SPD_SOUND_DATA_PATH, \"test.wav\")\n\n binary = None\n\n if type == 'alsa':\n binary = 'aplay'\n elif type == 'pulse':\n binary = \"paplay\"\n else:\n raise NotImplementedError(\n \"Test for this audio system is not implemented\")\n\n if not shutil.which(binary):\n report(_(\"\"\"%s selected, but %s not installed. This might be a false\nwarning, but most likely sound is not working.\"\"\" % (type, binary)))\n reply = question(_(\"Are you sure that %s audio is working?\" % type), False)\n return reply\n\n cmd = '%s %s' % (binary, wavfile)\n if os.system(cmd):\n report(_(\"Can't play audio via\\n %s\" % cmd))\n report(_(\"\"\"Your audio doesn't seem to work, please fix audio first or choose\na different method.\"\"\"))\n return False\n\n\n reply = question(_(\"Did you hear the sound?\"), True)\n\n if not reply:\n report(_(\"\"\"Please examine the above output from the sound playback\nutility. If everything seems right, are you sure your audio is loud enough and\nnot muted in the mixer? Please fix your audio system first or choose a different\naudio output method in configuration.\"\"\"))\n return False\n else:\n report(_(\"Audio output '%s' works\" % type))\n return True", "def _play_audio(audio_file, no_audio_alerts=False):\n if not no_audio_alerts:\n audio_path = join(os.getcwd(), 'docs', 'audio', audio_file) # hard-coded path to file, can be changed\n wave_obj = sa.WaveObject.from_wave_file(audio_path)\n play_obj = wave_obj.play()\n play_obj.stop()", "async def random_sound(mood=None, blocking=True):\n mood = mood or choice(list(Mood))\n name = choice(sounds[mood])\n print('playing:', mood, name)\n await api.audio.play(name, blocking=blocking)", "def report(msg):\n print(msg)\n if use_espeak_synthesis:\n os.system(\"espeak %s\" % shlex.quote(msg))", "def make_a_sound(): # document string\n print('quack')", "async def play_sound(self) -> None:\n await self.core.loop.run_in_executor(None, self.device.play_sound)", "def sound_effects(sound):\n global effect # Making effect global so it can be used outside this function\n effect = pygame.mixer.Sound(sound) # Loading sound files\n effect.play(0) # Playing sound files", "def enable_sound(self):\n\t\tif self._setting.get(FIFE_MODULE, \"PlaySounds\"): # Set up sound if it is enabled\n\t\t\tself.soundclippool = self.engine.getSoundClipPool()\n\t\t\tself.emitter['bgsound'] = self.soundmanager.createEmitter()\n\t\t\tself.emitter['bgsound'].setGain(self._setting.get(UH_MODULE, \"VolumeMusic\"))\n\t\t\tself.emitter['bgsound'].setLooping(False)\n\t\t\tself.emitter['effects'] = self.soundmanager.createEmitter()\n\t\t\tself.emitter['effects'].setGain(self._setting.get(UH_MODULE, \"VolumeEffects\"))\n\t\t\tself.emitter['effects'].setLooping(False)\n\t\t\tself.emitter['speech'] = self.soundmanager.createEmitter()\n\t\t\tself.emitter['speech'].setGain(self._setting.get(UH_MODULE, \"VolumeEffects\"))\n\t\t\tself.emitter['speech'].setLooping(False)\n\t\t\tself.emitter['ambient'] = []\n\t\t\tself.music_rand_element = random.randint(0, len(self.menu_music) - 1)\n\t\t\tself.initial_menu_music_element = self.music_rand_element\n\n\t\t\tself.check_music() # Start background music\n\t\t\tExtScheduler().add_new_object(self.check_music, self, loops=-1)", "def play():\n xd = display.XKCDDisplayService()\n if xd.is_running():\n click.echo(\"showing the dialogs\")\n xd.send_signal(signal.SIGUSR1)\n else:\n click.echo(\"xkcd service not running\")", "def sound(self, where, stream=True):\n cook = cookie()\n S = Sound(cook, self)\n self.call('sound', cook, where, stream and 1 or 0)\n return S", "async def async_play_system_sound(self, sound=SOUND_SUCCESS):\n if self._raumfeld.group_is_valid(self._rooms):\n for room in self._rooms:\n await self._raumfeld.async_room_play_system_sound(room, sound)\n else:\n log_debug(\n \"Method was called although speaker group '%s' is invalid\" % self._rooms\n )", "def sound_callback(self, data):\n # print \"heard a loud noise!\"\n # print data.data\n sound = data.data.split(\" \")\n print sound[0]\n if float(sound[0]) > .8:\n if self.idling:\n # self.behav_pub.publish(\"greet\")\n self.ok = True\n self.control_pub.publish(\"idle stop; ed stop\")\n print \"STARTING GAME\"\n self.start_game = \"TTT\"\n # elif self.start_game != None:\n # self.ok = True\n # self.control_pub.publish(\"ed stop\")\n\n # self.behav_pub.publish(\"sleep\")\n # self.emotion_pub.publish(\"STARTLE\")", "def track_03():\n sonos.play_uri('http://nrj.de/sachsen', title='Energy Sachsen', force_radio=True)\n return \"Ok\"", "def say(msg):\n pause()\n os.system(\"say \" + msg)", "def test_pulse(self):\n report(_(\"Testing PULSE sound output\"))\n return self.audio_try_play(type='pulse')", "def _play(self):\n # Play unless explicitely ignored in config\n if not self.alarm_builder.config._get_debug_option(\"DO_NOT_PLAY_ALARM\"):\n self.alarm_builder.play(AlarmWorker.audio)", "def load_sound(name):\n\n class NoneSound:\n def play(self):\n pass\n\n if not pygame.mixer:\n return NoneSound()\n fullname = os.path.join(\"assets\", \"sounds\", name)\n try:\n sound = pygame.mixer.Sound(fullname)\n except pygame.error as message:\n print(\"Cannot load sound:\", name)\n raise SystemExit(message)\n return sound", "def beep_on(self):\n self.write(\":SYST:BEEP ON\")", "def play_sound(self, emitter, soundfile):\n\t\tif self._setting.get(FIFE_MODULE, \"PlaySounds\"):\n\t\t\temitter = self.emitter[emitter]\n\t\t\tassert emitter is not None, \"You need to supply a initialised emitter\"\n\t\t\tassert soundfile is not None, \"You need to supply a soundfile\"\n\t\t\temitter.reset()\n\t\t\temitter.setSoundClip(horizons.main.fife.soundclippool.addResourceFromFile(soundfile))\n\t\t\temitter.play()", "def play(filename):\n SoundClient(blocking=True).playWave(filename)", "def test_default_sound_system(self):\n\n if self.mc.sound_system is None:\n log = logging.getLogger('TestAudio')\n log.warning(\"Sound system is not enabled - unable to run audio tests\")\n self.skipTest(\"Sound system is not enabled\")\n\n self.assertIsNotNone(self.mc.sound_system)\n\n if self.mc.sound_system.audio_interface is None:\n log = logging.getLogger('TestAudio')\n log.warning(\"Sound system audio interface could not be loaded - skipping audio tests\")\n self.skipTest(\"Sound system audio interface could not be loaded\")\n\n self.assertIsNotNone(self.mc.sound_system.audio_interface)\n settings = self.mc.sound_system.audio_interface.get_settings()\n self.assertIsNotNone(settings)\n self.assertIn(settings['buffer_samples'], [1024, 2048])\n self.assertIn(settings['audio_channels'], [1, 2])\n self.assertEqual(settings['sample_rate'], 44100)\n\n # /sounds/sfx\n self.assertIn('198361_sfx-028', self.mc.sounds) # .wav\n self.assertIn('210871_synthping', self.mc.sounds) # .wav\n self.assertIn('264828_text', self.mc.sounds) # .ogg\n self.assertIn('4832__zajo__drum07', self.mc.sounds) # .wav\n self.assertIn('84480__zgump__drum-fx-4', self.mc.sounds) # .wav\n self.assertIn('100184__menegass__rick-drum-bd-hard', self.mc.sounds) # .wav\n\n # /sounds/voice\n self.assertIn('104457_moron_test', self.mc.sounds) # .wav\n self.assertIn('113690_test', self.mc.sounds) # .wav\n\n # Check for default ducking assigned only to sounds in the sfx folder\n self.assertTrue(self.mc.sounds['198361_sfx-028'].has_ducking)\n self.assertEqual(0, self.mc.sounds['198361_sfx-028'].ducking.delay)\n self.assertEqual(0.3, self.mc.sounds['198361_sfx-028'].ducking.attack)\n self.assertEqual(0.45, self.mc.sounds['198361_sfx-028'].ducking.attenuation)\n self.assertEqual(0.5, self.mc.sounds['198361_sfx-028'].ducking.release_point)\n self.assertEqual(1.0, self.mc.sounds['198361_sfx-028'].ducking.release)\n self.assertTrue(self.mc.sounds['210871_synthping'].has_ducking)\n self.assertTrue(self.mc.sounds['264828_text'].has_ducking)\n self.assertTrue(self.mc.sounds['4832__zajo__drum07'].has_ducking)\n self.assertTrue(self.mc.sounds['84480__zgump__drum-fx-4'].has_ducking)\n self.assertTrue(self.mc.sounds['100184__menegass__rick-drum-bd-hard'].has_ducking)\n\n # These sounds should not have ducking\n self.assertFalse(self.mc.sounds['104457_moron_test'].has_ducking)\n self.assertFalse(self.mc.sounds['113690_test'].has_ducking)", "def loadSound(path,name):\n\n class NoneSound:\n def play(self): pass\n if not pygame.mixer:\n return NoneSound()\n fullname = os.path.join(path,name)\n try:\n sound = mixer.Sound(fullname)\n except error, message:\n print \"Cannot load sound:\", name\n raise SystemExit, message\n return sound", "def _soundhelper(self):\n self._click()\n if self._last is None and self._touch is not None:\n if self._soundImage.contains(self._touch.x, self._touch.y):\n self._sound = not self._sound\n if self._soundImage.source == 'whitevolumeon.png':\n self._soundImage.source = 'whitevolumenull.png'\n else:\n self._soundImage.source = 'whitevolumeon.png'", "def play_audio(self):\n if not self.voice.get_busy():\n self.voice.play(self.sound)\n else:\n pass", "def _handleActuation(self, cmd: int) -> int:\n # If the command set ON, play alarm sound using playsound\n if cmd == ActuatorData.COMMAND_ON:\n logging.info(\"Fire Alarm Actuator Turn ON\")\n try:\n file = \"/home/darasy/git/constrained-device-app-rethd/src/main/python/programmingtheiot/cda/emulated/Alarm_Sound.mp3\"\n os.system(\"mpg123 \" + file)\n logging.info(\"Play successfully\")\n return 0\n except Exception as e:\n logging.warn(e)\n return -1\n else:\n logging.info(\"Turn off the sound\")\n return -1", "def play(file):\n # linux\n if platform == \"linux\" or platform == \"linux2\":\n if subprocess.call(\"timidity\") == 0:\n try:\n subprocess.call([\"timidity\", str(file)])\n except OSError:\n print(\"You do not have appropriate software installed to \"\n \"play MIDI files. See Timidity installation \"\n \"http://timidity.sourceforge.net/install.html\")\n\n else:\n try: subprocess.call([\"totem\", str(file)])\n except OSError:\n print(\"Maybe you do not have 'fluid-soundfont-gm' installed \"\n \"to use it with totem.\")\n\n # MAC OS X\n elif _platform == \"darwin\":\n if subprocess.call(\"timidity\") == 0:\n try:\n subprocess.call([\"timidity\", str(file)])\n except:\n print(\"You do not have appropriate software installed to \"\n \"play MIDI files. See Timidity installation \"\n \"http://timidity.sourceforge.net/install.html\")\n else:\n try:\n subprocess.call([\"open\", str(file)])\n except OSError:\n print(\"Seems that your 'open' program cannot play MIDI files\")\n # Windows\n elif _platform == \"win32\":\n try:\n subprocess.call([\"timidity\", str(file)])\n except OSError:\n print(\"You do not have appropriate software installed to \"\n \"play MIDI files. See Timidity installation \"\n \"http://timidity.sourceforge.net/install.html\")", "def start_alarm(sound=True, warning_message=True, email=False):\r\n\r\n if warning_message == True:\r\n ###throw up a warning Window, is a thread so that other functions can happen simultaneously\r\n t = Thread(target=warning_function)\r\n t.start()\r\n\r\n if sound == True: #fix this, the main program runs the side program\r\n\r\n h = Thread(target=sound_loop)\r\n h.daemon = True\r\n h.start()\r\n\r\n if email == True: #### Clearly haven't implemented this yet\r\n ###read in emil list and send out warning email\r\n print(\"simulation email\")", "def play_sound(f, wait=False, dic=None):\r\n if not pygame.mixer.get_init():\r\n pygame.mixer.init()\r\n if dic:\r\n sound = dic[f]\r\n else:\r\n sound = pygame.mixer.Sound(f)\r\n sound.play()\r\n if wait:\r\n while pygame.mixer.get_busy():\r\n events.wait(0.01)\r\n return sound", "def play_impact_sound(self) -> None:\n if self.health == 0:\n self.death_sound.play()\n return\n self.impact_sound.play()", "def track_04():\n sonos.play_uri('http://stream.sunshine-live.de/live/mp3-192', title='Sunshine Live', force_radio=True)\n return \"Ok\"", "def play(filename):\n if sys.platform == \"win32\":\n os.startfile(filename)\n else:\n opener =\"open\" if sys.platform == \"darwin\" else \"xdg-open\"\n subprocess.call([opener, filename])", "def say(self, text, force=False):\n if self.sound_active:\n if force:\n self.stopping_sound()\n else:\n while self.sound_active:\n logging.debug(\"waiting until sound play is finish\")\n time.sleep(1)\n logger.warning(\"sound play done - now playing next\")\n\n self.sound_active = True\n # set output high in order to turn on amplifier\n self.toggle_amp_pin(1)\n time.sleep(0.3)\n engine = pyttsx.init()\n engine.setProperty('rate', 125)\n # remove \"pass\" and uncomment next line in order to enable this function\n engine.say(text)\n engine.runAndWait()\n time.sleep(0.2)\n # set output low in order to turn off amplifier\n self.toggle_amp_pin(0)\n self.sound_active = False", "def playBeep(type: int):\n pass", "def play_warning_song(self):\n if self._warning_song_num is None:\n self.set_warning_song(0)\n\n self._serial_conn.send_command(\"141 \" + str(self._warning_song_num))", "def wavplay(filename):\n\tif (os.path.isfile(filename) == False): # raise error if wrong input file\n\t\tprint(\"Input file does not exist. Make sure you computed the analysis/synthesis\")\n\telse:\n\t\tif sys.platform == \"linux\" or sys.platform == \"linux2\":\n\t\t # linux\n\t\t subprocess.call([\"aplay\", filename])\n\n\t\telif sys.platform == \"darwin\":\n\t\t\t# OS X\n\t\t\tsubprocess.call([\"afplay\", filename])\n\t\telse:\n\t\t\tprint(\"Platform not recognized\")", "def powerup_collected(self) -> None:\n self.powerup_collected_sound.play()", "def play_menu_music(self):\n pygame.mixer.init()\n pygame.mixer.music.load(\"audio/splort_2.mp3\")\n pygame.mixer.music.play()", "def test_sound_volume(self):\n return self.send(\"test_sound_volume\")", "def play_audio():\n play_file = input(\"Would you like to play the file we created (y/n)? \")\n if play_file == \"y\":\n os.system(\"open output_audio.mp3\")\n else:\n print(\"Thanks for using our service, the file exists in your directory where you ran this file.\")", "def play(self, offset=0, timesLoop=0):\n self.canvas.call('playSound', self.cookie, offset, timesLoop)", "def beep(self) -> None:\n self._impl.beep()", "def announce_target(self):\n letter = self.bubbles[self.target].letter\n question = self.load_sound(\"question-\" + letter + \".wav\")\n question.play()\n if android: # get_length() not supported by android.mixer\n ms = 2000 \n else:\n ms = int(question.get_length()*1000)\n pygame.time.set_timer(Game.ANNOUNCE_EVENT, ms)\n self.state = Game.ANNOUNCE_STATE", "def test_default_sound_system(self):\n\n self.assertFalse(self.mc.machine_config['sound_system']['enabled'])\n self.assertIsNone(self.mc.sound_system)", "def play_tone(freq=440, duration=0.01):\n tone(board.A0, freq, duration)", "def play(self):\n self.__run_csound()", "def load(name):\n with pyglet.resource.file(f'sounds/{name}.wav', 'rb') as f:\n return pygame.mixer.Sound(f)", "def play_for(sample_wave, ms):\n sound = pygame.sndarray.make_sound(sample_wave)\n sound.play(-1)\n pygame.time.delay(ms)\n sound.stop()", "def toggle_sound(self):\n if self.sound: # sound is on and you stop it\n set_icon(\"soundoff.png\", self.actionSound)\n else: # sound is not on and you put it on\n set_icon(\"soundon.png\", self.actionSound)\n self.sound = not self.sound", "async def sound(self, ctx, name='default', start=0):\n voice = discord.utils.get(self.bot.voice_clients, guild=ctx.guild)\n \n if not (ctx.author.voice or voice):\n await ctx.message.add_reaction('\\U0001F615')\n await ctx.send(\"Not in a voice channel.\")\n return\n \n ffmpeg_path = os.environ['FFMPEG_PATH']\n sound_path = f'sounds/{name}.ogg'\n ffmpeg_opts = {'options': f'-ss {start}'}\n\n if not os.path.isfile(sound_path):\n if name == 'default':\n await ctx.message.add_reaction('\\U0001F615');\n await ctx.send(\"No sound specified.\")\n else:\n await ctx.message.add_reaction('\\U0001F615');\n await ctx.send(\"Sound file not found.\")\n return\n \n audio = discord.FFmpegPCMAudio(executable=ffmpeg_path,\n source=sound_path, **ffmpeg_opts)\n sound = discord.PCMVolumeTransformer(audio)\n \n if not voice:\n await self.join(ctx)\n\n voice = discord.utils.get(self.bot.voice_clients, guild=ctx.guild)\n\n if voice:\n if voice.is_playing():\n voice.stop()\n \n voice.play(sound)\n await ctx.send(f\"Playing `{name}.ogg`.\")", "def draw_sound_button(self):\n if self.settings.sound_on:\n self.screen.blit(self.image_sound_on, self.rect)\n else:\n self.screen.blit(self.image_sound_off, self.rect)", "async def alarm(ctx, on_time:float=1, off_time:float=0.6, n:int=5):\n buzzer.beep(on_time, off_time, n)\n await ctx.send(f\"Alarme acionado\")", "def playSound(self, filename, volume = 0.0, pan = 0.0, frequency = 44100, looping = False, play = True):\r\n m = re.match(self.soundRe, filename)\r\n if m:\r\n g = m.groups()[1]\r\n filename = filename.replace(g, str(int(int(g.strip('*')) * self.random.random()) + 1))\r\n try:\r\n s = stream.FileStream(file = filename)\r\n except BassError as e:\r\n raise BassError(e.code, 'Error playing file %s: %s.' % (filename, str(e)))\r\n v = self.baseVolume + volume\r\n if v < 0.0:\r\n v = 0.0\r\n elif v > 1.0:\r\n v = 1.0\r\n s.set_volume(v)\r\n p = 0.0 + pan\r\n if p > 1.0:\r\n p = 1.0\r\n elif p < -1.0:\r\n p = -1.0\r\n s.set_pan(p)\r\n s.set_frequency(frequency)\r\n s.set_looping(looping)\r\n if play:\r\n threading.Thread(name = 'Sound Player', target = s.play_blocking).start()\r\n return s", "def start_soundtrack(self):\n sources = screens['Combat']['music']\n self.source = choice(sources)\n Logger.info(\n 'Application: Chose \"{}\" as the combat music.'.format(self.source)\n )\n try:\n SoundManager.music[self.source]\n except KeyError:\n SoundManager.add_music(self.source, self)\n SoundManager.play_music(self.source)", "async def test_silence_voice_alert(self):\n with unittest.mock.patch.object(silence, \"VOICE_CHANNELS\") as mock_voice_channels:\n mock_voice_channels.get.return_value = self.text_channels[1].id\n\n message = \"This should show up as {channel}.\"\n await self.cog.send_message(message, self.text_channels[0], self.voice_channel, alert_target=True)\n\n updated_message = message.format(channel=self.voice_channel.mention)\n self.text_channels[0].send.assert_awaited_once_with(updated_message)\n self.text_channels[1].send.assert_awaited_once_with(updated_message)\n\n mock_voice_channels.get.assert_called_once_with(self.voice_channel.id)", "def beep(self, delay):\n self.on()\n Timer(delay, self.off).start()", "async def on_enter(bridge, ev):\n # ignore announcer channels - see ASTERISK-22744\n if ev['channel']['name'].startswith('Announcer/'):\n return\n await bridge.play(media=\"sound:ascending-2tone\")", "def waitForSound(self, time_limit = 7):\n\n\t\tself.sound.subscribe(\"sound_detection_client\")\n\n\t\t# give waiting a 7-second time limit\n\t\ttimeout = time.time() + 7\n\n\t\t# check for new sounds every 0.2 seconds\n\t\twhile (self.mem.getData(\"SoundDetected\")[0] != 1) and (time.time() < timeout):\n\t\t\ttime.sleep(0.2)\n\n\t\tself.sound.unsubscribe(\"sound_detection_client\")", "def play_sound(self, char):\n\n player = self._sounds.get(char)\n if player:\n player.play()", "def music():\n pygame.mixer.init()\n pygame.mixer.music.load(\"1.wav\")\n pygame.mixer.music.play(100)", "def load_sound(self, name):\n class NoneSound:\n def play(self): pass\n if not pygame.mixer or not pygame.mixer.get_init():\n sound = NoneSound()\n else:\n fullname = os.path.join('TeddyLevel','data', name)\n try:\n sound = pygame.mixer.Sound(fullname)\n except pygame.error, message:\n print 'Cannot load sound:', fullname\n raise SystemExit, message\n dictname = name[0:name.find('.')]\n self.dict[dictname] = sound", "async def async_locate(self, **kwargs: Any) -> None:\n await self._vacuum_bot.execute_command(PlaySound())", "def globalSound(command):\n for id in range(mixer.get_num_channels()): # Going through each mixer channel\n if command == \"stop\":\n mixer.Channel(id).stop() # Stopping all playback on the channel\n elif command == \"pause\":\n mixer.Channel(id).pause() # Pausing playback on the channel\n elif command == \"unpause\":\n mixer.Channel(id).unpause() # Unpausing playback on the channel\n elif command == \"toggleVol\":\n if mixer.Channel(id).get_volume() == 0: # Checking if the channel is muted\n mixer.Channel(id).set_volume(1) # Unmuting the channel\n else:\n mixer.Channel(id).set_volume(0) # Otherwise, mute the channel", "def play_sound(file_path: Path) -> None:\n threading.Thread(target=playsound.playsound, args=(file_path,), daemon=True).start()", "def toggle_sound(self):\n self.game_data.set_sound_on(not self.game_data.is_sound_on())\n self.settings_buttons[2].set_images(self.get_sound_button_img(), self.get_sound_button_img_h())", "def play_prog(self):\r\n\r\n serial_number = range(47845, 47869)\r\n chord_number = range(1, 25)\r\n for i in self.cnv:\r\n # Look for matching audio files and play them.\r\n try:\r\n filename = \"audio files/{}__{}.wav\".format(serial_number[i-1], chord_number[i-1])\r\n playsound.playsound(filename)\r\n except FileNotFoundError:\r\n print('Error: audio files not found.')", "def on_vader_start(ob, message):\n text='\"Please start speaking\"'\n subprocess.call('espeak '+ text, shell=True)\n logging.debug(\"Listening...\")", "def load_sound(self, filename):\n return mixer.Sound(os.path.join(\"sounds\", filename))", "def report_event(event): ### (5)\n\tevent_name = {\"2\": \"KeyPress\", \"4\": \"ButtonPress\"}\n\tprint (\"Time:\", str(event.time)) ### (6)\n\tprint (\"event:\", event)\n\n\t# Sample sound\n\tsound.Play( \"01_f.wav\" )", "def speak():\n sentences = ['DESTROY ALL HU- I MEAN GREETINGS MEAT BAG',\n 'She sells sea shells by the sea shore', 'Other sentence']\n while True:\n AUDIO.speak(sentences[randint(0, 2)])\n sleep(15)" ]
[ "0.7575998", "0.71356803", "0.6993021", "0.69806135", "0.6903117", "0.6809958", "0.6731711", "0.67272246", "0.67185855", "0.66775733", "0.6588559", "0.65214854", "0.6496222", "0.6477924", "0.6464842", "0.641596", "0.6394175", "0.6379717", "0.63734734", "0.6370575", "0.6352954", "0.63335925", "0.6299833", "0.62883765", "0.6261451", "0.62560266", "0.62560266", "0.62502563", "0.62424755", "0.6215086", "0.62101406", "0.61501026", "0.6111945", "0.61037177", "0.6077551", "0.6059388", "0.60537416", "0.60491246", "0.60259116", "0.59702444", "0.59612674", "0.5949182", "0.59414095", "0.59292835", "0.59279346", "0.5927702", "0.5910901", "0.5910682", "0.59083307", "0.59074986", "0.5897007", "0.5856349", "0.5851273", "0.5821002", "0.5820748", "0.58163375", "0.58133847", "0.58104086", "0.58086884", "0.57894737", "0.5784101", "0.57657385", "0.57294476", "0.57280964", "0.57280016", "0.57239395", "0.5721155", "0.57174546", "0.57135445", "0.5705778", "0.56956095", "0.5678545", "0.56770927", "0.56769574", "0.56665057", "0.56586593", "0.56580293", "0.56420857", "0.56084514", "0.56074077", "0.5602551", "0.5585598", "0.557536", "0.5572282", "0.55714136", "0.5567279", "0.5551778", "0.55440193", "0.55140567", "0.55003077", "0.5500166", "0.54666215", "0.545974", "0.5451685", "0.5445556", "0.54274905", "0.5426384", "0.5423918", "0.5422788", "0.5420468" ]
0.55198234
88
Shrinks the window down to an icon, effectively hiding it
def iconify(self): self._frame._root.iconify()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def onMinimize(self, event):\n # if self.IsIconized():\n # self.Hide()\n self.Hide()", "def hide(self):\n self.root.iconify() # self.root.withdraw()", "def show_window(icon, item):\n icon.stop()\n app.after(0, app.deiconify())", "def __minimize_on_click(self):\n self.close()", "def hide(self):\r\n if self.visible:\r\n nid = (self.hwnd, 0)\r\n Shell_NotifyIcon(NIM_DELETE, nid)\r\n self.visible = 0", "def iconify(self):\n if self.active:\n self.master.withdraw()\n self.active = False", "def activate_statusbar_icon_close():\n pass", "def hide(self):\r\n self.rect.center = (WINDOWWIDTH/2, WINDOWHEIGHT -2000)", "def toggle(self):\n if not self.hidden and not self.vimiv.commandline.entry.is_visible():\n self.bar.hide()\n else:\n self.bar.show()\n self.hidden = not self.hidden\n # Resize the image if necessary\n if not self.vimiv.image.user_zoomed and self.vimiv.paths and \\\n not self.vimiv.thumbnail.toggled:\n self.vimiv.image.zoom_to(0)", "def main():\r\n original = SimpleImage(\"images/poppy.png\")\r\n original.show()\r\n # shrink function\r\n after_shrink = shrink('images/poppy.png')\r\n after_shrink.show()", "def OnClose(self, event):\n self.OnIconize(event, True)", "def set_window_icon(widget, remove_help=True):\n icon = QIcon(get_resource_path('icons/Ducky.ico'))\n widget.setWindowIcon(icon)\n if remove_help:\n widget.setWindowFlags(Qt.Window |\n Qt.CustomizeWindowHint |\n Qt.WindowTitleHint |\n Qt.WindowCloseButtonHint |\n Qt.WindowStaysOnTopHint)", "def maximize_app( appname,ui):\r\n ui=ui\r\n ui.doDefault_on_obj(appname, False, 'button')", "def minimap_show_hide(self):\n if self.minimap_flag == 0:\n self.code_minimap_frame.pack_forget()\n self.working_area.pack_forget()\n self.line_num_frame.pack(fill='y', side='left')\n self.working_area.pack(fill='both', side='left', expand=True)\n self.code_minimap_frame.pack(fill='both', side='left', pady=(22, 0))\n self.minimap_flag = 1\n self.View.entryconfigure(1, label=' Hide Code Minimap ', command=self.minimap_show_hide)\n else:\n self.code_minimap_frame.pack_forget()\n self.minimap_flag = 0\n self.View.entryconfigure(1, label='Show Minimap')", "def set_minimize_to_tray(self):\r\n\t\tself.statusicon = gtk.StatusIcon()\r\n\t\ticon_path = os.path.realpath(\".\" + \"\\\\icon.png\")\r\n\t\tself.statusicon = load_status_icon(icon_path, 128, 128, gtk.STOCK_GOTO_TOP)\r\n\t\tself.statusicon.set_tooltip(\"Claimtracker\")\r\n\t\tself.statusicon.connect(\"activate\", self.return_from_tray)\r\n\t\tself.window.connect(\"window-state-event\", self.minimize_to_tray)\r\n\t\tself.statusicon.set_visible(False)", "def hide(self):\n self.geometry(\"%dx%d%+d%+d\" % (0, 0, 0, 0))", "def set_visible(self):\n\t\tself.hide()\n\t\tself.__sys_tray_icon.setVisible(True)", "def hide( self, event=None ):\n self.visible = 0\n self.withdraw()", "def hide( self, event=None ):\n self.visible = 0\n self.withdraw()", "def __maximize_on_click(self):\n self.__dialog = Dialog(self.__list, self.__start_button)\n self.__dialog.close_signal.connect(self.__update_diagrams)\n self.__maximize_button.clearFocus()", "def quit_window(icon, item):\n icon.stop()\n app.destroy()", "def hide(self, event=None):\r\n self.visible = 0\r\n self.withdraw()", "def hide(self, event=None):\r\n self.visible = 0\r\n self.withdraw()", "def hide(self):\n self.root.withdraw()", "def hide(self, event=None):\n self.visible = 0\n self.withdraw()", "def limit_panel_hide(self):\r\n\r\n self.limit_panel_toggle()\r\n self.optimize_size(height_only=True)", "def __add_icon_to_button(self):\n \n self.set_relief(gtk.RELIEF_NONE)\n \n icon_box = gtk.HBox(False, 0)\n image = gtk.Image()\n image.set_from_stock(gtk.STOCK_CLOSE, gtk.ICON_SIZE_MENU)\n \n settings = gtk.Widget.get_settings(self)\n width, height = gtk.icon_size_lookup_for_settings(settings, gtk.ICON_SIZE_MENU)\n gtk.Widget.set_size_request(self, width + 0, height + 2)\n \n icon_box.pack_start(image, True, False, 0)\n self.add(icon_box)\n \n image.show()\n icon_box.show()", "def resize(self):\r\n del self.win\r\n self.__create_win()", "def toggle_maximized(self):\n if self.isMaximized():\n self.showNormal()\n else:\n self.showMaximized()", "def on_closing():\n if not app.is_minimize_to_system_tray.get():\n app.save_setting()\n app.destroy()\n else:\n app.withdraw()\n plus_image = os.path.join(\"data\", \"plus.gif\")\n image = Image.open(plus_image)\n menus = menu(item(\"Quit\", quit_window), item(\"Show\", show_window, default=True))\n icon = pystray.Icon(\"name\", image, \"My System Tray Icon\", menus)\n icon.run()", "def hide(self):\n self.frame.grid_forget()\n self.visible = False", "def toggle(self):\n if self.is_visible():\n self.hide()\n self._app[\"main_window\"].grab_focus()\n self._app[\"statusbar\"].update_info()\n elif self._app.get_paths() and \\\n self._app.get_focused_widget() not in [\"lib\", \"thu\"]:\n if os.path.islink(self._app.get_path()):\n self._app[\"statusbar\"].message(\n \"Manipulating symbolic links is not supported\", \"warning\")\n elif not edit_supported(self._app.get_path()):\n self._app[\"statusbar\"].message(\n \"This filetype is not supported\", \"warning\")\n else:\n self.show()\n self._pixbuf = self._app[\"image\"].get_pixbuf_original()\n self.sliders[\"bri\"].grab_focus()\n self._app[\"statusbar\"].update_info()\n else:\n if self._app[\"thumbnail\"].toggled:\n self._app[\"statusbar\"].message(\n \"Manipulate not supported in thumbnail mode\", \"warning\")\n elif self._app[\"library\"].is_focus():\n self._app[\"statusbar\"].message(\n \"Manipulate not supported in library\", \"warning\")\n else:\n self._app[\"statusbar\"].message(\"No image open to edit\",\n \"warning\")", "def hideBtnImg(*args, **kwargs):\n\targs[0].get_image().hide()", "def close_launcher(self):\n self.misc.go_to_win(self.misc.bufwinnr(self.name))\n if self.misc.bufname() == self.name:\n vim.command('bd')\n self.misc.go_to_win(self.misc.bufwinnr(self.curr_buf.number))\n if self.nohidden_set:\n vim.command(\"set nohidden\")\n self.reset_launcher()", "def __window_resizeTo(self, iWidth, iHeight):\n pass", "def toggle_fullscreen(self):\n if self.isFullScreen(): # go back to normal\n set_icon(\"fullscreen.png\", self.actionFullscreen)\n self.paint_background(self, Qt.NoBrush)\n self.setGeometry(self.window_dimensions)\n self.showNormal()\n else: # go to fullscreen\n set_icon(\"closefullscreen.png\", self.actionFullscreen)\n self.window_dimensions = self.geometry() # save current window settings\n self.paint_background(self, Qt.black, True)\n self.showFullScreen()\n\n if not self.force_toolbar_display: self.toolBar.hide()\n\n self.update_image() # update the image to fit the fullscreen mode", "def hide_window2(self):\n self.ui_item.Unsplit(self.splitter_window_two.ui_item)", "def _trailingIcons(self):", "def hide_show_volume_scale(_=None):\n if volume_ctrl.winfo_viewable():\n volume_frame.place_forget()\n else:\n volume_frame.place(x=512,y=120)", "def MaximizeButton(self, visible=True):\r\n \r\n return self.SetFlag(self.buttonMaximize, visible)", "def hide (self):\n \n self.top.withdraw()", "def closeEvent(self, event):\n\n\t\tevent.ignore()\n\t\tself.hide()\n\t\tself.__sys_tray_icon.show()", "def hide(self) -> None:\n self.current_height = 0", "def set_expanded(control_instance, resize_button, state):\n # Change the icon depending on the button status\n icon = QtGui.QIcon()\n\n # Hide the control\n if not state:\n control_instance.hide()\n icon.addPixmap(\n QtGui.QPixmap(_fromUtf8(\":/soma_widgets_icons/nav_right\")),\n QtGui.QIcon.Normal, QtGui.QIcon.Off)\n\n # Show the control\n else:\n control_instance.show()\n icon.addPixmap(\n QtGui.QPixmap(_fromUtf8(\":/soma_widgets_icons/nav_down\")),\n QtGui.QIcon.Normal, QtGui.QIcon.Off)\n\n # Set the new button icon\n resize_button.setIcon(icon)", "def hide(self):\r\n\t\tself.frame.Show(False)", "def OnClose(self, event):\n self.Show(False)", "def OnClose(self, event):\n\t\tself.Show(False)", "def reveal(self):\n self.root.deiconify()", "def maximize(self):\n lib.SDL_MaximizeWindow(self._ptr)", "def hide(self):\n self.visible = False", "def minimize_app(appname,ui):\r\n ui=ui\r\n ui.doDefault_on_obj('Minimize', False, 'button') \r\n time.sleep(WAIT)", "def hide_action_bar(self):\n self.remove_widget(self.bottom_action_bar)", "def activate_statusbar_icon_mode():\n pass", "def HideMe(self, event):\n self.Hide()", "def window_info_toggle():\n window_info.hide() if window_info.showing else window_info.show()", "def maximise(self) -> None:\n if self.win is not None:\n try:\n if self.win.style_get_property(\"maximize_initially\"):\n self.win.unmaximise()\n else:\n self.win.maximise()\n except ValueError:\n print(\"error :(\")\n self.win.maximize()", "def close_UI(self):", "def do_hf_unhide(self, arg):\n self.show_hidden_frames = True\n self.refresh_stack()", "def hide_gui():\n pass", "def icon(self):", "def MinimizeButton(self, visible=True):\r\n\r\n return self.SetFlag(self.buttonMinimize, visible)", "def hide(self):\n\n self.stop_repl()\n self.visible = False\n self.withdraw() # Hide window", "def subnotebook_hide(self):\n if self.subnotebook and self.subnotebook.winfo_ismapped():\n logger.debug(\"Hiding subnotebook\")\n self.subnotebook.pack_forget()\n self.subnotebook.destroy()\n self.subnotebook = None", "def toggleWindowVisibility(*args, **kwargs)->None:\n pass", "def maximize_option():\n Width=MaxWidth\n Height=MaxHeight - WinTitle -WinBorder\n PosX=LeftPadding\n PosY=TopPadding\n move_active(PosX,PosY,Width,Height)\n raise_window(\":ACTIVE:\")", "def HideHint(self):\r\n\r\n # hides a transparent window hint if there is one\r\n if self._hint_window:\r\n self._hint_window.Hide()\r\n\r\n self._hint_fadetimer.Stop()\r\n self._last_hint = wx.Rect()", "def toggle_window_visibility(self):\r\n if self.isHidden():\r\n self.show_window()\r\n self.visibilityAction.setText(self.hideWindowString)\r\n else:\r\n self.hide_window()\r\n self.visibilityAction.setText(self.showWindowString)", "def hide(self):\n self.set_visible(False)", "def unHide(self):\n self.visible = True", "def minimize(self):\n lib.SDL_MinimizeWindow(self._ptr)", "def minimize_to_tray(self, widget, event, data=None):\r\n\t\tif event.changed_mask & gtk.gdk.WINDOW_STATE_ICONIFIED:\r\n\t\t\tif event.new_window_state & gtk.gdk.WINDOW_STATE_ICONIFIED:\r\n\t\t\t\tlogging.debug(\"minimize to tray\")\r\n\t\t\t\tself.window_state = self.check_window_state()\r\n\t\t\t\tself.statusicon.set_visible(True)\r\n\t\t\t\tself.window.hide_all()", "def on_window1_window_state_event(self, widget, event, *user_params):\n\t\tif not self.settings[\"min_icon\"]: return\n\t\tif (event.changed_mask == gtk.gdk.WINDOW_STATE_ICONIFIED):\t\t\t\t\t\t\t# minimize button clicked\n\t\t\tif ( (event.new_window_state == gtk.gdk.WINDOW_STATE_ICONIFIED) or\n\t\t\t (event.new_window_state == gtk.gdk.WINDOW_STATE_ICONIFIED | gtk.gdk.WINDOW_STATE_MAXIMIZED) ):\t# going to iconify\n\t\t\t\t#self.window1.iconify()\t\t\t# for smooth change with compiz\n\t\t\t\t#while gtk.events_pending():\r\n\t\t\t\t#\tgtk.main_iteration()\r\n\t\t\t\tself.stateico.set_visible(True)\n\t\t\t\tself.window1.set_property('visible', False)", "def remove(self):\n self.hide()\n self.deleteLater()", "def OnClose(self, event = None):\n ##Close.\n self.Hide()\n self.Destroy()", "def autoResize(self):\n\t\t#self.infoLabelBox.set_size_request(1,1)\n\t\timgSize = [self.currentPixbuf.get_width() * self.scaleFactor, self.currentPixbuf.get_height() * self.scaleFactor]\n\t\timgSize = map(lambda x: max(int(x), 1), imgSize)\n\t\tif not self.fullscreenToggle:\n\t\t\tself.resize(imgSize[0], imgSize[1])\n\t\t\tposition = ( int(0.5 * (self.get_screen().get_width() - imgSize[0])),\n\t\t\t\tint(0.5 * (self.get_screen().get_height() - imgSize[1])))\n\t\t\tself.move(position[0], position[1])\n\t\t\tself.fixed.move(self.imgDisplay, 0, 0)\n\t\t\tif not self.hideTransparent and self.imgTrans.bgOn:\n\t\t\t\tself.imgTrans.set_size_request(imgSize[0], imgSize[1])\n\t\t\t# make eventbox the same size as image\n\t\t\t# this will not be correct when infoLabelBox is visible\n\t\t\tself.eventBox.set_size_request(imgSize[0], imgSize[1])\n\t\telse:\n\t\t\tself.fixed.move(self.imgDisplay, max(0, int((self.get_size()[0] - imgSize[0]) / 2)),\n\t\t\t\tmax(0, int((self.get_size()[1] - imgSize[1]) / 2)))\n\t\t\tif not self.hideTransparent and self.imgTrans.bgOn:\n\t\t\t\tself.imgTrans.set_size_request(int(self.get_size()[0]), int(self.get_size()[1]))\n\t\t\t# make eventbox the same size as screen\n\t\t\tself.eventBox.set_size_request(self.get_size()[0],self.get_size()[1])", "def main(self): \n self.window.hide_all()\n self.window.isHide = True\n \n gtk.main()", "def resize_child_window(self):\n s = struct.pack('HHHH', 0, 0, 0, 0)\n x = fcntl.ioctl(0,termios.TIOCGWINSZ,s)\n fcntl.ioctl(self.child_fd,termios.TIOCSWINSZ,x)", "def onHelpButtonClicked(self, widget):\n self.getGtkTopObject().close()", "def _leadingIcons(self):", "def hide(self):\n self.window.run_command(\"hide_panel\", {\"panel\": self.full_name})", "def Icon(self, size, name):\n # ------------------------------------------------------------------------\n bitmap = self.Bitmap(size, name)\n if not bitmap:\n return None\n icon = wx.EmptyIcon()\n icon.CopyFromBitmap(bitmap)\n return icon", "def update_icon(self, _widget, _callback_data):\n\t\t\n\t\tprint \"in update_icon for \", self.name\n\t\tself.icon = self.__window.get_icon()\n\t\tself.icon.save(self.imgpath, \"png\")\n\t\tif not self.pile is None:\n\t\t\tself.pile.update_child_icon(self)\n\t\treturn", "def collapse(layout, key):\n return sg.pin(sg.Column(layout, key=key, visible=False), shrink=False)", "def _hide_vol_bar(self):\n self.root.withdraw()", "def __window_close(self):\n pass", "def Command(self, id, msg):\n\n # If the user click on the \"Close\" item of the menu\n if id == self.ID_LEFT_MENU_FIRST_ITEM:\n self.Close()\n\n # If the user click on the bitmap button from the menu\n elif id == self.ID_RIGHT_MENU_SHOW_CONTENT:\n # Updates the stored value of the toggle state\n self.toogleState = not self.toogleState\n\n # Hides the element\n self.HideElement(self.ID_HIDDEN_GROUP, self.toogleState)\n\n # Notifies that the content of the parent group of the group we just hide has changed and need to be redrawn\n self.LayoutChanged(self.ID_MAIN_GROUP)\n\n return True", "def resizeToView(self): # The Tool Options widget is a nightmare to resize :)\r\n view = self.activeView()\r\n\r\n if view and self.widget.isVisible():\r\n # We start with the tool options sizeHint as a goal size and then\r\n # shrink it down if necessary to fit inside the view.\r\n containerSize = self.widget.widget().sizeHint()\r\n\r\n # I don't like all these magic numbers (And repeteition) but I honestly don't know what they\r\n # correspond to either. Margins, I suppose, but then why is one of the numbers 14\r\n # when the margins are all 4?\r\n\r\n if view.height() < containerSize.height() + self.btnHide.height() + 14:\r\n containerSize.setHeight(view.height() - self.btnHide.height() - 14)\r\n\r\n if view.width() < containerSize.width() + 8:\r\n containerSize.setWidth(view.width() - 8)\r\n \r\n self.container.setFixedSize(containerSize)\r\n\r\n # Once the tool options container is an appropriate size, resize the\r\n # Pad widget to it's appropriate sizes\r\n padSize = self.sizeHint()\r\n if view.height() < padSize.height():\r\n padSize.setHeight(view.height())\r\n\r\n if view.width() < padSize.width():\r\n padSize.setWidth(view.width())\r\n \r\n self.setFixedSize(padSize)\r\n\r\n elif not self.widget.isVisible():\r\n # Resize the widget to the size of the button + some extra height for the hidden widget I guess?\r\n\r\n # I just don't know what these numbers are, or why I can't use the \r\n # button's own sizeHint. The result also varies if something else\r\n # about the layout varies.\r\n self.setFixedSize(23, 54)", "def do_hf_hide(self, arg):\n self.show_hidden_frames = False\n self.refresh_stack()", "def HideWindows(self):\r\n \r\n for child in self._itemWithWindow:\r\n if not self.IsItemVisible(child):\r\n for column in xrange(self.GetColumnCount()):\r\n wnd = child.GetWindow(column)\r\n if wnd and wnd.IsShown():\r\n wnd.Hide()", "def _shrink_secondary(self, amt):\n self._resize_secondary(-amt)", "def _show_vol_bar(self):\n self.root.deiconify()", "def cut_main_screen(im):\n top = 0\n left = 0\n bottom = 980\n right = 1350\n return im[top:bottom, left:right].copy()", "def unmap(widget):\n result = False\n if widget and widget.winfo_exists() and widget.winfo_ismapped():\n result = True\n geom_mgr = widget.winfo_manager()\n if geom_mgr == \"grid\":\n widget.grid_forget()\n elif geom_mgr == \"pack\":\n widget.pack_forget()\n elif geom_mgr == \"place\":\n widget.place_forget()\n else:\n result = False\n return result", "def show_hide_toolbar(self):\n if self.showing: # hiding\n self.toolbar_frame.pack_forget()\n self.Toolbars.entryconfigure(1, label=\" Show toolbar \", command=self.show_hide_toolbar)\n self.showing = False\n else: # displaying\n self.paned_win.pack_forget()\n self.on_off_project_hierarchy.pack_forget()\n self.statusbar_frame.pack_forget()\n\n self.statusbar_frame.pack(fill='x', side='bottom')\n self.toolbar_frame.pack(fill='x', side='top')\n self.on_off_project_hierarchy.pack(fill='y', side='left', ipadx=3)\n self.paned_win.pack(fill='both', expand=1)\n\n self.Toolbars.entryconfigure(1, label=\" Hide toolbar \")\n self.showing = True", "def shrink(self):\n if self.focused == 0:\n self._shrink_main(self.change_ratio)\n elif len(self.clients) == 2:\n self._shrink_solo_secondary(self.change_ratio)\n else:\n self._shrink_secondary(self.change_size)\n self.group.layout_all()", "def hide(self):\n\n # if there is no view/dockable, then there's nothing to try and hide\n if not(self.view and self.dockable):\n return\n\n # hide the dockable, and drop references to the widgets\n self.dockable.hide()\n self.view = None\n self.dockable = None", "def showMaximized(self):\n self.usualSize = self.size()\n self.setWindowState(Qt.WindowMaximized)\n self.move(0, 0)\n self.setFixedSize(QSize(self.screenSize.width(), self.screenSize.height()))\n self.maximized = True\n QWidget.showMaximized(self)", "def OnFrameClose(self, event):\r\n\t\tself.Hide()", "def close(self):\n closeI1Display()", "def deiconify(self):\n self._frame._root.deiconify()" ]
[ "0.7197378", "0.6926259", "0.6514841", "0.6401315", "0.63795197", "0.6355315", "0.62768066", "0.6245441", "0.622365", "0.6119201", "0.6098737", "0.60097057", "0.59481025", "0.5944409", "0.58673495", "0.5827085", "0.5737087", "0.5717614", "0.5717614", "0.569668", "0.5652999", "0.5650052", "0.5650052", "0.5646667", "0.56439424", "0.56379604", "0.5634502", "0.5601975", "0.55844045", "0.5575749", "0.55690944", "0.55242723", "0.5508251", "0.5501937", "0.54934853", "0.54725397", "0.54697794", "0.5461674", "0.5460485", "0.5459265", "0.5454", "0.54476", "0.5441926", "0.5438925", "0.543389", "0.54113823", "0.5382017", "0.5378146", "0.53680545", "0.53619754", "0.5349647", "0.5342969", "0.5339154", "0.5339097", "0.5335509", "0.5331029", "0.5329287", "0.5324853", "0.5321851", "0.5315016", "0.53058654", "0.52970517", "0.5280406", "0.52762187", "0.52520925", "0.52494764", "0.5239078", "0.5232378", "0.52292186", "0.52236766", "0.52164847", "0.52119386", "0.5147716", "0.5146871", "0.51174885", "0.5116956", "0.5105657", "0.51027215", "0.50991464", "0.5087775", "0.50856036", "0.50740576", "0.505863", "0.5058213", "0.50573426", "0.50468427", "0.5045579", "0.5022815", "0.501928", "0.5017612", "0.49994445", "0.49959263", "0.4990276", "0.49829268", "0.49750635", "0.49734056", "0.49667686", "0.4965814", "0.49649042", "0.49644297" ]
0.5634438
27
Expands the window from an icon so that it is visible
def deiconify(self): self._frame._root.deiconify()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def onMinimize(self, event):\n # if self.IsIconized():\n # self.Hide()\n self.Hide()", "def show_window(icon, item):\n icon.stop()\n app.after(0, app.deiconify())", "def iconify(self):\n if self.active:\n self.master.withdraw()\n self.active = False", "def __minimize_on_click(self):\n self.close()", "def hide(self):\n self.root.iconify() # self.root.withdraw()", "def maximize_app( appname,ui):\r\n ui=ui\r\n ui.doDefault_on_obj(appname, False, 'button')", "def MinimizeButton(self, visible=True):\r\n\r\n return self.SetFlag(self.buttonMinimize, visible)", "def activate_statusbar_icon_close():\n pass", "def __maximize_on_click(self):\n self.__dialog = Dialog(self.__list, self.__start_button)\n self.__dialog.close_signal.connect(self.__update_diagrams)\n self.__maximize_button.clearFocus()", "def set_minimize_to_tray(self):\r\n\t\tself.statusicon = gtk.StatusIcon()\r\n\t\ticon_path = os.path.realpath(\".\" + \"\\\\icon.png\")\r\n\t\tself.statusicon = load_status_icon(icon_path, 128, 128, gtk.STOCK_GOTO_TOP)\r\n\t\tself.statusicon.set_tooltip(\"Claimtracker\")\r\n\t\tself.statusicon.connect(\"activate\", self.return_from_tray)\r\n\t\tself.window.connect(\"window-state-event\", self.minimize_to_tray)\r\n\t\tself.statusicon.set_visible(False)", "def minimize(self):\n lib.SDL_MinimizeWindow(self._ptr)", "def set_expanded(control_instance, resize_button, state):\n # Change the icon depending on the button status\n icon = QtGui.QIcon()\n\n # Hide the control\n if not state:\n control_instance.hide()\n icon.addPixmap(\n QtGui.QPixmap(_fromUtf8(\":/soma_widgets_icons/nav_right\")),\n QtGui.QIcon.Normal, QtGui.QIcon.Off)\n\n # Show the control\n else:\n control_instance.show()\n icon.addPixmap(\n QtGui.QPixmap(_fromUtf8(\":/soma_widgets_icons/nav_down\")),\n QtGui.QIcon.Normal, QtGui.QIcon.Off)\n\n # Set the new button icon\n resize_button.setIcon(icon)", "def set_window_icon(widget, remove_help=True):\n icon = QIcon(get_resource_path('icons/Ducky.ico'))\n widget.setWindowIcon(icon)\n if remove_help:\n widget.setWindowFlags(Qt.Window |\n Qt.CustomizeWindowHint |\n Qt.WindowTitleHint |\n Qt.WindowCloseButtonHint |\n Qt.WindowStaysOnTopHint)", "def set_visible(self):\n\t\tself.hide()\n\t\tself.__sys_tray_icon.setVisible(True)", "def minimize_app(appname,ui):\r\n ui=ui\r\n ui.doDefault_on_obj('Minimize', False, 'button') \r\n time.sleep(WAIT)", "def iconify(self):\n self._frame._root.iconify()", "def show(self):\n self.root.update()\n self.root.deiconify()", "def on_window1_window_state_event(self, widget, event, *user_params):\n\t\tif not self.settings[\"min_icon\"]: return\n\t\tif (event.changed_mask == gtk.gdk.WINDOW_STATE_ICONIFIED):\t\t\t\t\t\t\t# minimize button clicked\n\t\t\tif ( (event.new_window_state == gtk.gdk.WINDOW_STATE_ICONIFIED) or\n\t\t\t (event.new_window_state == gtk.gdk.WINDOW_STATE_ICONIFIED | gtk.gdk.WINDOW_STATE_MAXIMIZED) ):\t# going to iconify\n\t\t\t\t#self.window1.iconify()\t\t\t# for smooth change with compiz\n\t\t\t\t#while gtk.events_pending():\r\n\t\t\t\t#\tgtk.main_iteration()\r\n\t\t\t\tself.stateico.set_visible(True)\n\t\t\t\tself.window1.set_property('visible', False)", "def reveal(self):\n self.root.deiconify()", "def minimap_show_hide(self):\n if self.minimap_flag == 0:\n self.code_minimap_frame.pack_forget()\n self.working_area.pack_forget()\n self.line_num_frame.pack(fill='y', side='left')\n self.working_area.pack(fill='both', side='left', expand=True)\n self.code_minimap_frame.pack(fill='both', side='left', pady=(22, 0))\n self.minimap_flag = 1\n self.View.entryconfigure(1, label=' Hide Code Minimap ', command=self.minimap_show_hide)\n else:\n self.code_minimap_frame.pack_forget()\n self.minimap_flag = 0\n self.View.entryconfigure(1, label='Show Minimap')", "def MaximizeButton(self, visible=True):\r\n \r\n return self.SetFlag(self.buttonMaximize, visible)", "def toggle_maximized(self):\n if self.isMaximized():\n self.showNormal()\n else:\n self.showMaximized()", "def open_imagingWindow(self):\n self.window = surveyWindow(self, imaging=True)\n self.hide()", "def window_info_toggle():\n window_info.hide() if window_info.showing else window_info.show()", "def show(self, nid =None):\r\n flags = NIF_ICON | NIF_MESSAGE\r\n if nid is None:\r\n nid = (self.hwnd, 0, flags, WM_USER+20, self.hicon)\r\n if self.visible:\r\n self.hide()\r\n Shell_NotifyIcon(NIM_ADD, nid)\r\n self.visible = 1", "def maximize_option():\n Width=MaxWidth\n Height=MaxHeight - WinTitle -WinBorder\n PosX=LeftPadding\n PosY=TopPadding\n move_active(PosX,PosY,Width,Height)\n raise_window(\":ACTIVE:\")", "def maximize(self):\n lib.SDL_MaximizeWindow(self._ptr)", "def main():\r\n original = SimpleImage(\"images/poppy.png\")\r\n original.show()\r\n # shrink function\r\n after_shrink = shrink('images/poppy.png')\r\n after_shrink.show()", "def show(self):\n # * displays the window, after using either the iconify or the withdraw methods\n self.wm_deiconify()\n # * this method can be called after the event which needs to happen before the window event\n self.wait_window()", "def on_stateico_clicked(self, *a):\n\t\tself.window1.set_property('visible', True)\n\t\tself.stateico.set_visible(False)\n\t\tself.window1.present()", "def toggle(self):\n if not self.hidden and not self.vimiv.commandline.entry.is_visible():\n self.bar.hide()\n else:\n self.bar.show()\n self.hidden = not self.hidden\n # Resize the image if necessary\n if not self.vimiv.image.user_zoomed and self.vimiv.paths and \\\n not self.vimiv.thumbnail.toggled:\n self.vimiv.image.zoom_to(0)", "def __add_icon_to_button(self):\n \n self.set_relief(gtk.RELIEF_NONE)\n \n icon_box = gtk.HBox(False, 0)\n image = gtk.Image()\n image.set_from_stock(gtk.STOCK_CLOSE, gtk.ICON_SIZE_MENU)\n \n settings = gtk.Widget.get_settings(self)\n width, height = gtk.icon_size_lookup_for_settings(settings, gtk.ICON_SIZE_MENU)\n gtk.Widget.set_size_request(self, width + 0, height + 2)\n \n icon_box.pack_start(image, True, False, 0)\n self.add(icon_box)\n \n image.show()\n icon_box.show()", "def resize(self):\r\n del self.win\r\n self.__create_win()", "def on_action_5_triggered(self):\n # TODO: not implemented yet\n print('最小化')\n self.showMinimized()", "def toggle_fullscreen(self):\n if self.isFullScreen(): # go back to normal\n set_icon(\"fullscreen.png\", self.actionFullscreen)\n self.paint_background(self, Qt.NoBrush)\n self.setGeometry(self.window_dimensions)\n self.showNormal()\n else: # go to fullscreen\n set_icon(\"closefullscreen.png\", self.actionFullscreen)\n self.window_dimensions = self.geometry() # save current window settings\n self.paint_background(self, Qt.black, True)\n self.showFullScreen()\n\n if not self.force_toolbar_display: self.toolBar.hide()\n\n self.update_image() # update the image to fit the fullscreen mode", "def maximise(self) -> None:\n if self.win is not None:\n try:\n if self.win.style_get_property(\"maximize_initially\"):\n self.win.unmaximise()\n else:\n self.win.maximise()\n except ValueError:\n print(\"error :(\")\n self.win.maximize()", "def setExpanded(self):", "def hide(self):\r\n if self.visible:\r\n nid = (self.hwnd, 0)\r\n Shell_NotifyIcon(NIM_DELETE, nid)\r\n self.visible = 0", "def Minimize(self):\r\n \r\n return self.SetFlag(self.optionMinimized, True)", "def close_launcher(self):\n self.misc.go_to_win(self.misc.bufwinnr(self.name))\n if self.misc.bufname() == self.name:\n vim.command('bd')\n self.misc.go_to_win(self.misc.bufwinnr(self.curr_buf.number))\n if self.nohidden_set:\n vim.command(\"set nohidden\")\n self.reset_launcher()", "def show_dialog(self):\n self.showMaximized()\n sys.exit(self.app.exec_())", "def on_closing():\n if not app.is_minimize_to_system_tray.get():\n app.save_setting()\n app.destroy()\n else:\n app.withdraw()\n plus_image = os.path.join(\"data\", \"plus.gif\")\n image = Image.open(plus_image)\n menus = menu(item(\"Quit\", quit_window), item(\"Show\", show_window, default=True))\n icon = pystray.Icon(\"name\", image, \"My System Tray Icon\", menus)\n icon.run()", "def activate_statusbar_icon_mode():\n pass", "def ev_windowminimized(self, event: WindowEvent) -> None:", "def quit_window(icon, item):\n icon.stop()\n app.destroy()", "def minimize(self):\n\t\tself.__window.minimize()\n\t\tself.update_minimization()\n\t\treturn", "def OnClose(self, event):\n self.OnIconize(event, True)", "def win_raise(self):\n self.raise_()\n self.activateWindow()", "def hide(self):\n self.root.withdraw()", "def restart_menu(self):\n self.__show_menu = True", "def show(self):\n # This function has to be placed here (and not in the user.py script)\n self.showMaximized()\n visapp.run()", "def hide(self):\r\n self.rect.center = (WINDOWWIDTH/2, WINDOWHEIGHT -2000)", "def onMinimize(self, eventDict = None):\n self.mainWindow.minimize()", "def showMaximized(self):\n self.usualSize = self.size()\n self.setWindowState(Qt.WindowMaximized)\n self.move(0, 0)\n self.setFixedSize(QSize(self.screenSize.width(), self.screenSize.height()))\n self.maximized = True\n QWidget.showMaximized(self)", "def minimize_to_tray(self, widget, event, data=None):\r\n\t\tif event.changed_mask & gtk.gdk.WINDOW_STATE_ICONIFIED:\r\n\t\t\tif event.new_window_state & gtk.gdk.WINDOW_STATE_ICONIFIED:\r\n\t\t\t\tlogging.debug(\"minimize to tray\")\r\n\t\t\t\tself.window_state = self.check_window_state()\r\n\t\t\t\tself.statusicon.set_visible(True)\r\n\t\t\t\tself.window.hide_all()", "def show(self, window):\r\n\r\n return", "def attachLeoIcon(self, window: Any) -> None:\n if self.appIcon:\n window.setWindowIcon(self.appIcon)", "def MacReopenApp(self):\n self.BringWindowToFront()", "def showBasic(self):\n self.setWindowIcon(QIcon(self.icon))\n self.setWindowTitle(self.title)\n self.setGeometry(*self.posXY, *self.windowSize)\n self.show()", "def on_show(self, event):\r\n self.frame.Show()\r\n self.frame.Raise()", "def close(self):\n self.parent.activate()", "def toggle(self):\n if self.is_visible():\n self.hide()\n self._app[\"main_window\"].grab_focus()\n self._app[\"statusbar\"].update_info()\n elif self._app.get_paths() and \\\n self._app.get_focused_widget() not in [\"lib\", \"thu\"]:\n if os.path.islink(self._app.get_path()):\n self._app[\"statusbar\"].message(\n \"Manipulating symbolic links is not supported\", \"warning\")\n elif not edit_supported(self._app.get_path()):\n self._app[\"statusbar\"].message(\n \"This filetype is not supported\", \"warning\")\n else:\n self.show()\n self._pixbuf = self._app[\"image\"].get_pixbuf_original()\n self.sliders[\"bri\"].grab_focus()\n self._app[\"statusbar\"].update_info()\n else:\n if self._app[\"thumbnail\"].toggled:\n self._app[\"statusbar\"].message(\n \"Manipulate not supported in thumbnail mode\", \"warning\")\n elif self._app[\"library\"].is_focus():\n self._app[\"statusbar\"].message(\n \"Manipulate not supported in library\", \"warning\")\n else:\n self._app[\"statusbar\"].message(\"No image open to edit\",\n \"warning\")", "def resize_child_window(self):\n s = struct.pack('HHHH', 0, 0, 0, 0)\n x = fcntl.ioctl(0,termios.TIOCGWINSZ,s)\n fcntl.ioctl(self.child_fd,termios.TIOCSWINSZ,x)", "def toggle_window_visibility(self):\r\n if self.isHidden():\r\n self.show_window()\r\n self.visibilityAction.setText(self.hideWindowString)\r\n else:\r\n self.hide_window()\r\n self.visibilityAction.setText(self.showWindowString)", "def icon(self):", "def maximize(self):\n\n self.driver.maximize_window(self.handle)", "def show(self):\n self.wid.show()", "def return_from_tray(self, event):\r\n\t\tlogging.debug(\"return from tray\")\r\n\t\tif self.window_state: # Was the window previously maximized?\r\n\t\t\tself.window.maximize()\r\n\t\tself.window.show_all()\r\n\t\tself.window.present()\r\n\t\tself.statusicon.set_visible(False)", "def update_information(self):\n if self._image_path is not None:\n self._open_button.Enable() \n else:\n self._open_button.Disable() \n \n self.Fit()\n self.GetSizer().SetSizeHints(self)", "def SetIcons (self):\n self.window.setWindowIcon(QIcon(win_icon))\n self.window.tabs.setTabIcon(0,QIcon(ui_data))\n self.window.tabs.setTabIcon(1,QIcon(ui_field))\n self.window.tabs.setTabIcon(2,QIcon(ui_query))", "def ev_windowmaximized(self, event: WindowEvent) -> None:", "def move_to_win(self):\n self.external_win = PlotWindow(plot=self.pw, parent=self)\n self.external_win.closeWin.connect(lambda: self.layout().takeAt(1))\n self.external_win.closeWin.connect(lambda: self.layout().insertWidget(1, self.pw))\n self.external_win.closeWin.connect(lambda: self.btn_open.setEnabled(True))\n self.external_win.show()", "def expand_or_collapse(control_instance, resize_button):\n # Hide the control\n if control_instance.isVisible():\n state = False\n # Show the control\n else:\n state = True\n ControllerControlWidget.set_expanded(control_instance, resize_button,\n state)", "def icon(self, new_icon):\r\n self.set({\"icon\": new_icon})", "def hide (self):\n \n self.top.withdraw()", "def show_window(self):\n self.show()", "def comprar(self):\n self.new_window = tk.Toplevel(self.menu)\n Comprar(self.new_window)", "def __expandAllSetup(self):\n btn = QtWidgets.QPushButton()\n self.__toolbar.addWidget(btn)\n btn.setIcon(QtGui.QIcon(\":down.png\"))\n btn.setFocusPolicy(QtCore.Qt.NoFocus)\n btn.setToolTip(\"Expand all groups\")\n btn.clicked.connect(self.__monitorCue.expandAll) # pylint: disable=no-member", "def hide( self, event=None ):\n self.visible = 0\n self.withdraw()", "def hide( self, event=None ):\n self.visible = 0\n self.withdraw()", "def update_ui(self):\r\n pass\r\n windowdata = self.window.get_data(self.__class__.__name__)\r\n windowdata['action_group'].get_action('UndoClose').set_sensitive(len(self.tabs_closed) > 0)\r\n windowdata['action_group'].get_action('CloseAll').set_sensitive(self.notebook.get_n_pages() > 0)\r\n windowdata['action_group'].get_action('CloseOthers').set_sensitive(self.notebook.get_n_pages() > 1)", "def hide(self, event=None):\r\n self.visible = 0\r\n self.withdraw()", "def hide(self, event=None):\r\n self.visible = 0\r\n self.withdraw()", "def hide(self):\n self.geometry(\"%dx%d%+d%+d\" % (0, 0, 0, 0))", "def _on_item_expanded(self, evt):\n self._model.expand(True, evt.GetItem())", "def details_window(self, instance: Union[Nobleman, Location]):\n window = tk.Toplevel()\n window.title(instance.name)\n window.protocol(\"WM_DELETE_WINDOW\",\n partial(self.close_details_window, instance))\n self.register_extra_window(instance, window)\n self.generate_window_content(instance, window)", "def ShowMe(self, event):\n self.Show(True)", "def hide_window2(self):\n self.ui_item.Unsplit(self.splitter_window_two.ui_item)", "def Maximize(self):\r\n\r\n return self.SetFlag(self.optionMaximized, True)", "def main(self): \n self.window.hide_all()\n self.window.isHide = True\n \n gtk.main()", "def open_transitWindow(self):\n self.window = surveyWindow(self, imaging=False)\n self.hide()", "def get_help_window(self):\n self.gui.active_window.hide()\n\n self.associated_window = help_window.HelpWindow(self.gui)\n self.gui.active_window = self.associated_window\n\n self.gui.active_window.show()", "def on_resize(self, _: int = 0) -> None:\n assert CursesMenu.stdscr is not None\n screen_rows, screen_cols = CursesMenu.stdscr.getmaxyx()\n curses.resizeterm(screen_rows, screen_cols)\n self.draw()", "def MinimizeMode(self, mode):\r\n \r\n self.minimize_mode = mode\r\n return self", "def toggleWindowVisibility(*args, **kwargs)->None:\n pass", "def hide(self, event=None):\n self.visible = 0\n self.withdraw()", "def OnClose(self, event):\n\t\tself.Show(False)", "def addButtonIcon(name):\n\n # Set parent to be the scroll layout\n global objectScroll\n cmds.setParent(objectScroll)\n\n # Instance object, with create flag set to True\n AssetIcon(name, True)", "def __window_close(self):\n pass", "def show(image):\n cv2.imshow('press ENTER to close', image)\n cv2.waitKey(0)", "def OnClose(self, event):\n self.Show(False)" ]
[ "0.71563345", "0.6827041", "0.67579377", "0.6734852", "0.6362016", "0.60919636", "0.6062259", "0.6049304", "0.6033592", "0.6020919", "0.59644955", "0.5946634", "0.59451485", "0.5897082", "0.58830255", "0.58652514", "0.58457947", "0.58319026", "0.57918", "0.5750798", "0.574958", "0.5733694", "0.5728968", "0.5655596", "0.5638253", "0.5635859", "0.5633175", "0.5631591", "0.56241727", "0.56070423", "0.5605291", "0.5591036", "0.5581069", "0.55781645", "0.5572684", "0.5567583", "0.5564603", "0.5564346", "0.55277324", "0.5484374", "0.54820216", "0.5473859", "0.5468286", "0.5449785", "0.5444457", "0.5423647", "0.54096574", "0.5406892", "0.53933007", "0.5387219", "0.5382227", "0.5382116", "0.5361937", "0.53508806", "0.5343714", "0.53424835", "0.5335421", "0.5330566", "0.53207546", "0.53132147", "0.5303893", "0.52865875", "0.52824664", "0.5282177", "0.5280596", "0.52635163", "0.52298874", "0.5217993", "0.5170635", "0.51697683", "0.51625127", "0.5162208", "0.514468", "0.5144333", "0.5144075", "0.5127832", "0.51221055", "0.5120402", "0.511138", "0.511138", "0.5108294", "0.5088759", "0.5088759", "0.5086123", "0.50734407", "0.50695664", "0.50665355", "0.5064471", "0.5060692", "0.50572", "0.5055711", "0.505374", "0.5050493", "0.50500745", "0.5043723", "0.5041161", "0.5037393", "0.50307095", "0.5026932", "0.502496", "0.5021851" ]
0.0
-1
Sets the maximum size for this window Any attempt to resize a dimension beyond the maximum size will fail.
def setMaxSize(self,width,height): assert (type(width) == int), "width %s is not an int" % `width` assert (width > 0), "width %s is negative" % `width` assert (type(height) == int), "height %s is not an int" % `height` assert (height > 0), "height %s is negative" % `height` self._frame._root.maxsize(width,height)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_max_size(self, size):\n # The hard Qt limit is 16777215 (which is 2**24 - 1) and will\n # print warnings to the shell if we attemp to set a max size\n # over that amount. This can be attempted when a QtMainWindow\n # has a central widget size equal to max size, and it also has\n # a menu bar and other components. Clipping the max size like\n # this will not have an effect on layout computation and thus\n # is relatively safe.\n max_width, max_height = size\n max_width = min(max_width, 16777215)\n max_height = min(max_height, 16777215)\n self.widget.setMaximumSize(max_width, max_height)", "def set_max_size(self, width: int, height: int):\n self.tk_ref.maxsize(width=width, height=height)", "def setmaxsize(self, maxsize):\n self.maxsize = maxsize", "def resize_to_maximum(self):\n if self.initialized:\n max_size = self._compute_maximum_size()\n self.set_max_size(max_size)\n self.resize(max_size)", "def maximum_size(self, maximum_size):\n\n self._maximum_size = maximum_size", "def maxsize(self, maxsize):\n self.shape = (int(maxsize), ) + self.shape[1:]\n self.clear()", "def update_maximum_size(self):\n if self.initialized:\n max_size = self._compute_maximum_size()\n self.set_max_size(max_size)", "def SetWindowSize(self, size):\n self.WINDOW_SIZE = size", "def set_maxSize(self, maxSize):\n if self.__log:\n self.__logger.info(f\"Setting max size to {maxSize}\")\n self.__maxSize = maxSize # Set max size\n self.__handle_cache_size() # Adapt to new changes", "def _maximum_size_changed(self):\n self.update_maximum_size()", "def max_size(self):\n max_size = self.widget.maximumSize()\n return Size(max_size.width(), max_size.height())", "def _set_size(self):\n if self.width_key is not None:\n width = config.get(self.width_key)\n height = config.get(self.height_key)\n self.window.resize(width, height)", "def setWindowSize(self, value):\n return self._set(windowSize=value)", "def resize(self, size):\n self.widget.resize(*size)", "def MaxSize1(self, size):\r\n\r\n self.max_size = size\r\n return self", "def resize(self):\n h, w = self.win.getmaxyx()\n self.maxh, self.maxw = h, w\n if w == 0 or h == 2:\n return\n self.win.resize(h, w)\n self.lpane.do_resize(h, w)\n self.rpane.do_resize(h, w)\n self.statusbar.resize(h, w)\n self.tabbar.resize(1,w)\n self.regenerate()\n self.display()", "def set_maxItemSize(self, maxItemSize):\n if self.__log:\n self.__logger.info(f\"Setting max item size to {maxItemSize}\")\n self.__maxItemSize = maxItemSize\n self.__handle_cache_size()", "def maximize(self):\n lib.SDL_MaximizeWindow(self._ptr)", "def SetMaxArea(self, *args):\n return _ShapeUpgrade.ShapeUpgrade_ShapeDivideArea_SetMaxArea(self, *args)", "def __window_resizeTo(self, iWidth, iHeight):\n pass", "def SetMaxArea(self, *args):\n return _ShapeUpgrade.ShapeUpgrade_FaceDivideArea_SetMaxArea(self, *args)", "def set_max_chunk_height(self, val=256):\n self._max_chunk_height = val", "def _set_maximum(self):\n self._level_gen.maximum_length = self._maximum_length_spinbox.value()\n self._refresh_view()", "def set_maximum(self, max_value):\n\n self._progress.setMaximum(max_value)", "def set_max_position_size(\n self,\n asset=None,\n max_shares=None,\n max_notional=None,\n on_error='fail'):\n control = MaxPositionSize(asset=asset,\n max_shares=max_shares,\n max_notional=max_notional,\n on_error=on_error)\n self.register_trading_control(control)", "def set_igv_window_size(self, width=800, height=600):\n self.set_igv_window_width(width)\n self.set_igv_window_height(height)", "def message_box_size_limit(self, message_box_size_limit: ConfigNodePropertyInteger):\n\n self._message_box_size_limit = message_box_size_limit", "def setMinSize(self,width,height):\n assert (type(width) == int), \"width %s is not an int\" % `width`\n assert (width > 0), \"width %s is negative\" % `width`\n assert (type(height) == int), \"height %s is not an int\" % `height`\n assert (height > 0), \"height %s is negative\" % `height`\n self._frame._root.minsize(width,height)", "def SetMinMaxSize(self, size: (int, int)):\r\n # TODO: if the resultset have less than 400px we don't want \r\n # the space need for the vertcal scrollbar\r\n sbh = wx.SystemSettings.GetMetric(wx.SYS_VSCROLL_X)\r\n self.SetMaxClientSize((size[0] - sbh, size[1]))\r\n self.SetMinClientSize((size[0] - sbh, size[1]))", "def setMaxValue(self, max_value):\r\n\t\tself.MaxValue = max_value", "def setMaxValue(self, max_value):\r\n\t\tself.MaxValue = max_value", "def DoSetSize(self, x, y, width, height, flags=wx.SIZE_AUTO):\r\n\r\n self._rect = wx.Rect(x, y, max(1, width), max(1, height))\r\n self.DoSizing()", "def set_max(self, max):\n self.set_val((self.val[0], max))", "def maximise(self) -> None:\n if self.win is not None:\n try:\n if self.win.style_get_property(\"maximize_initially\"):\n self.win.unmaximise()\n else:\n self.win.maximise()\n except ValueError:\n print(\"error :(\")\n self.win.maximize()", "def window_size(self, window_size):\n\n self._window_size = window_size", "def set_write_queue_max_size(self, val): \n self.j_pump.setWriteQueueMaxSize(val)\n return self", "def change_window_size(self, size):\n value = 0\n try:\n value = int(size)\n except ValueError:\n raise ValueError(\"Please type in a valid number.\")\n\n if value >= 0:\n self.__window_size = value\n else:\n raise ValueError(\"Please type in a valid positive number.\")", "def maximize(self):\n\n self.driver.maximize_window(self.handle)", "def set_max_sentence_length(self):\n new_max = int(self.set_max_sentence.get())\n cur_min = self.min_sentence_length\n\n if new_max > cur_min:\n self.max_sentence_length = new_max\n else:\n old_max = self.max_sentence_length\n old_max_var = tk.StringVar(self.master)\n old_max_var.set(str(old_max))\n self.set_max_sentence.config(textvariable=old_max_var)", "def save_my_size(self):\n if not settings.get_bool('maximized', False):\n width, height = self.get_size()\n settings.set('width', width)\n settings.set('height', height)", "def SetSize(*args, **kwargs):\n return _gdi_.Bitmap_SetSize(*args, **kwargs)", "def maximum_volume_size(self, value: typing.Union[str, int, None]):\n self._properties[\"maximumVolumeSize\"] = _types.integer_or_string(value)", "def setwinsize(self, rows, cols):", "def setSize(self, width, height):\n frameWidth = width\n frameHeight = height\n repaint()", "def set_episode_size(self, episode_size):\n self.max_episode_steps = episode_size", "def set_max(self, val):\n self._max = val", "def __set_max_value(self, value: int) -> None:\n self.__max_value = value * 2000\n half_value = self.__max_value // 2\n\n self.__x_spinbox.configure(from_=-half_value, to=half_value)\n self.__x_scale.configure(from_=half_value, to=-half_value)\n self.__y_spinbox.configure(from_=-half_value, to=half_value)\n self.__y_scale.configure(from_=-half_value, to=half_value)\n self.__z_spinbox.configure(from_=-half_value, to=half_value)\n self.__z_scale.configure(from_=half_value, to=-half_value)", "def MaxSize2(self, x, y):\r\n\r\n self.max_size.Set(x,y)\r\n return self", "def set_write_queue_max_size(self, size):\n self.java_obj.setWriteQueueMaxSize(size)\n return self", "def set_max_vm_mem(self, nMemSize):\n\t\tcall_sdk_function('PrlDispCfg_SetMaxVmMem', self.handle, nMemSize)", "def max_facet_width(self, max_facet_width):\n\n self._max_facet_width = max_facet_width", "def max_panel_height(self, height):\n self.command(\"maxPanelHeight %(height)s\" % locals())", "def set_is_max(self, is_max):\n self.__is_max = is_max", "def _set_max_steps(self, mx):\n self._max = max(0, mx)\n\n if self._max:\n self._step_width = Helper.len(str(self._max))\n else:\n self._step_width = 4", "def set_size(self, width, height):\n cairo.cairo_xcb_surface_set_size(self._pointer, width, height)\n self._check_status()", "def adjust_screen_size(self) -> None:\n if self.screen:\n max_row, max_cols = self.screen.getmaxyx()\n if max_row < MIN_SIZE + len(self.all_items):\n self.screen.resize(self.menu_height, max_cols)\n self.draw()", "def ev_windowmaximized(self, event: WindowEvent) -> None:", "def setMaxInputLength(self, value):\n return self._set(maxInputLength=value)", "def set_xmax(self, xmax):\n self.__xmax = xmax", "def _SetSize(self, pixels = None):\n if not pixels:\n pixels = self.GetClientSize()\n self.canvas.SetSize(pixels)\n self.figure.set_size_inches(pixels[0]/self.figure.get_dpi(),\n pixels[1]/self.figure.get_dpi())", "def setHeight(self, *args):\n return _libsbml.Dimensions_setHeight(self, *args)", "def DoSetSize(self, x, y, width, height, sizeFlags=wx.SIZE_AUTO):\r\n \r\n parent_size = self.GetParent().GetClientSize()\r\n if x + width > parent_size.x:\r\n width = max(0, parent_size.x - x)\r\n if y + height > parent_size.y:\r\n height = max(0, parent_size.y - y)\r\n\r\n wx.PyControl.DoSetSize(self, x, y, width, height, sizeFlags)", "def Maximize(self):\r\n\r\n return self.SetFlag(self.optionMaximized, True)", "def max_pool_size(self, max_pool_size: ConfigNodePropertyInteger):\n\n self._max_pool_size = max_pool_size", "def resize(self, width, height):\n\n\t\tself._window.resize(width, height)", "def setMaxLength(self, value):\n return self._set(maxLength=value)", "def setMaxLength(self, value):\n return self._set(maxLength=value)", "def setMaxLength(self, value):\n return self._set(maxLength=value)", "def setMaxLength(self, value):\n return self._set(maxLength=value)", "def max(self, max):\n\n self._max = max", "def max(self, max):\n\n self._max = max", "def resizeEvent(self, event):\n self.autosize()\n super().resizeEvent(event)", "def set_max_edges(self, edges):\n self.max_edges = edges", "def set_widget_size(self, widget_size):\n v = self.viewport\n v.projection.widget_rect = Rect(\n mins=[0, 0],\n maxes=[widget_size[0], widget_size[1]])\n v.view.widget_size = v.projection.widget_rect.sizes", "def setMaxEventQueueSize(self, eventQueueSize):\n internals.blpapi_SessionOptions_setMaxEventQueueSize(\n self.__handle,\n eventQueueSize)", "def maxSize():\n rect = pf.app.desktop().availableGeometry()\n maxh,maxw = rect.width(),rect.height()\n return maxh,maxw", "def setPixelsPerInchShrinkToFit(self,value):\n self.PDFreactorConfiguration.in1[\"pixelsPerInchShrinkToFit\"] = value", "def set_size(self, w, h):\n\t\tpass", "def fl_set_form_maxsize(ptr_flform, width, height):\n _fl_set_form_maxsize = library.cfuncproto(\n library.load_so_libforms(), \"fl_set_form_maxsize\", \\\n None, [cty.POINTER(xfdata.FL_FORM), xfdata.FL_Coord,\n xfdata.FL_Coord], \\\n \"\"\"void fl_set_form_maxsize(FL_FORM * form, FL_Coord w,\n FL_Coord h) \"\"\")\n library.check_if_flinitialized()\n library.verify_flformptr_type(ptr_flform)\n i_width = library.convert_to_FL_Coord(width)\n i_height = library.convert_to_FL_Coord(height)\n library.keep_elem_refs(ptr_flform, width, i_width, height, i_height)\n _fl_set_form_maxsize(ptr_flform, i_width, i_height)", "def set_canvas_size(self, width, height):\n self.canvas.config(width = int(width), height = int(height))", "def set_size(self, size):\n \n self.width = size[0]\n self.height = size[1]", "def setMaxOutputLength(self, value):\n return self._set(maxOutputLength=value)", "def setMaxOutputLength(self, value):\n return self._set(maxOutputLength=value)", "def setMaxOutputLength(self, value):\n return self._set(maxOutputLength=value)", "def media_images_height_max(self, media_images_height_max):\n\n self._media_images_height_max = media_images_height_max", "def resize(self, width: int, height: int):\n pass", "def max_size(self):\n raise NotImplementedError()", "def resize_to(self, width, height):\n\n self.driver.resize_window_to(self.handle, width, height)", "def resize(self):\n pass", "def set_maxVal(self, val):\n self.maxVal = val", "def setSize(self, y, h):\n if (h <= 0.0):\n self.ovflRect.hide()\n self.canvas.setHeight(y)\n else:\n self.ovflRect.setRect(0, y, self.mainWidth, h)\n self.ovflRect.show()\n self.canvas.setHeight(y + h)", "def setMaximumValue(self, value: int):\n self.ui.progress.setMaximum(value)", "def showMaximized(self):\n self.usualSize = self.size()\n self.setWindowState(Qt.WindowMaximized)\n self.move(0, 0)\n self.setFixedSize(QSize(self.screenSize.width(), self.screenSize.height()))\n self.maximized = True\n QWidget.showMaximized(self)", "def setMaxWindowLen(self, length):\n return self._set(maxWindowLen=length)", "def setMaxWindowLen(self, length):\n return self._set(maxWindowLen=length)", "def setErrorMax(self, error_max):\n\t\tself.error_max = error_max", "def changeSize(self, value):\n self.layer.brush_size = value", "def max_value(self, max_value):\n\n self._max_value = max_value", "def max_value(self, max_value):\n\n self._max_value = max_value", "def max_value(self, max_value):\n\n self._max_value = max_value" ]
[ "0.8343203", "0.808209", "0.7743762", "0.7440697", "0.7391813", "0.71794146", "0.7152232", "0.70025915", "0.6995238", "0.69787806", "0.6910393", "0.6805439", "0.67991096", "0.6738618", "0.67204857", "0.66982836", "0.667508", "0.66253215", "0.6608448", "0.6564544", "0.65028757", "0.64696825", "0.6448357", "0.6434331", "0.641322", "0.64088196", "0.6398256", "0.6396454", "0.63957804", "0.6385469", "0.6385469", "0.6382714", "0.638013", "0.63685805", "0.6356337", "0.63555765", "0.635307", "0.634295", "0.63387686", "0.6311151", "0.62933934", "0.628817", "0.6277122", "0.6271136", "0.62691927", "0.6267085", "0.626653", "0.6255888", "0.6252962", "0.6251516", "0.62515014", "0.6246031", "0.6222223", "0.62175137", "0.62101734", "0.6171249", "0.61454767", "0.6139117", "0.6132396", "0.61291456", "0.6123264", "0.6123181", "0.6120559", "0.61197233", "0.6100601", "0.6093389", "0.6093389", "0.6093389", "0.6093389", "0.608961", "0.608961", "0.6061776", "0.60413194", "0.6039902", "0.6031243", "0.6025294", "0.60242593", "0.6018137", "0.59982324", "0.5996723", "0.5991808", "0.59912485", "0.59912485", "0.59912485", "0.598939", "0.59697527", "0.5968272", "0.5959228", "0.59525335", "0.5948877", "0.59453845", "0.59381574", "0.5936495", "0.5935158", "0.5935158", "0.59315014", "0.59263164", "0.59191144", "0.59191144", "0.59191144" ]
0.81691533
1
Sets the minimum size for this window Any attempt to resize a dimension below the minimum size will fail.
def setMinSize(self,width,height): assert (type(width) == int), "width %s is not an int" % `width` assert (width > 0), "width %s is negative" % `width` assert (type(height) == int), "height %s is not an int" % `height` assert (height > 0), "height %s is negative" % `height` self._frame._root.minsize(width,height)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def SetMinimumPaneSize(self, minSize):\n self._minimumPaneSize = minSize", "def set_min_size(self, size):\n self.widget.setMinimumSize(*size)", "def set_min_size(self, width: int, height: int):\n self.tk_ref.minsize(width=width, height=height)", "def SetMinSize(self, s):\r\n\r\n self.min_size = wx.Size(*s)", "def minimum_size(self, minimum_size):\n\n self._minimum_size = minimum_size", "def setMinimumWidth( self, value ):\n self._minimumWidth = value", "def resize_to_minimum(self):\n if self.initialized:\n min_size = self._compute_minimum_size()\n self.set_min_size(min_size)\n self.resize(min_size)", "def update_minimum_size(self):\n if self.initialized:\n min_size = self._compute_minimum_size()\n self.set_min_size(min_size)", "def _minimum_size_changed(self):\n self.update_minimum_size()", "def MinSize1(self, size):\r\n self.min_size = size\r\n return self", "def _set_size(self):\n if self.width_key is not None:\n width = config.get(self.width_key)\n height = config.get(self.height_key)\n self.window.resize(width, height)", "def min_size(self, size):\n\n self._min_size = size\n self._is_min_size_stale = False", "def setMinimumHeight( self, value ):\n self._minimumHeight = value", "def min_size(self):\n min_size = self.widget.minimumSize()\n return Size(min_size.width(), min_size.height())", "def min_pixels(self, value) -> 'Size':\n raise_not_number(value)\n self.minimum = '{}px'.format(value)\n return self", "def minimumSizeHint(self):\n return QSize(1490, 800)", "def MinSize2(self, x, y):\r\n\r\n self.min_size = wx.Size(x, y)\r\n return self", "def SetWindowSize(self, size):\n self.WINDOW_SIZE = size", "def SetMinArea(self, *args):\n return _ShapeUpgrade.ShapeUpgrade_RemoveInternalWires_SetMinArea(self, *args)", "def GetMinimumPaneSize(self):\n return self._minimumPaneSize", "def minimumSizeHint(self):\n height = self._dayu_size * 1.2\n return QtCore.QSize(height, height / 2)", "def GetMinSize(self):\r\n\r\n return self.min_size", "def setMinW(self, w):\n return self._set(minW=w)", "def SetInitialSize(self, size=None):\n\n if size is None:\n size = wx.DefaultSize\n\n wx.Control.SetInitialSize(self, size)", "def _compute_minimum_size(self):\n # If the user has supplied an explicit minimum size, use that.\n computed_width, computed_height = self.minimum_size\n if computed_width != -1 and computed_height != -1:\n return Size(computed_width, computed_height)\n \n # Otherwise, try to compute a default from the central widget.\n widget = self.central_widget\n if widget is not None:\n\n # If the central widget is a container, we have it compute\n # the minimum size for us, otherwise, we use the size hint\n # of the widget as the value.\n if isinstance(widget, Container):\n min_width, min_height = widget.compute_min_size()\n else:\n min_width, min_height = widget.size_hint()\n\n # If the hug and resist clip policies of the widget are\n # weaker than the resize strength of the window, then\n # we ignore its value in that direction.\n if ((widget.hug_width not in STRONGER_THAN_RESIZE) and\n (widget.resist_clip_width not in STRONGER_THAN_RESIZE)):\n min_width = -1\n \n if ((widget.hug_height not in STRONGER_THAN_RESIZE) and\n (widget.resist_clip_height not in STRONGER_THAN_RESIZE)):\n min_height = -1 \n\n if computed_width == -1:\n computed_width = min_width\n\n if computed_height == -1:\n computed_height = min_height\n \n # We use the last resort values to replace any remaining \n # -1 values. This ensures the return value will be >= 0 \n # in both width and height\n if computed_width == -1 or computed_height == -1:\n default_width, default_height = self.minimum_size_default\n if computed_width == -1:\n computed_width = default_width\n if computed_height == -1:\n computed_height = default_height\n \n return Size(computed_width, computed_height)", "def SetMinMaxSize(self, size: (int, int)):\r\n # TODO: if the resultset have less than 400px we don't want \r\n # the space need for the vertcal scrollbar\r\n sbh = wx.SystemSettings.GetMetric(wx.SYS_VSCROLL_X)\r\n self.SetMaxClientSize((size[0] - sbh, size[1]))\r\n self.SetMinClientSize((size[0] - sbh, size[1]))", "def defaultWindowSize(self):\n self.resize(self.defaultWindowWidth, self.defaultWindowHeight)", "def setMaxSize(self,width,height):\n assert (type(width) == int), \"width %s is not an int\" % `width`\n assert (width > 0), \"width %s is negative\" % `width`\n assert (type(height) == int), \"height %s is not an int\" % `height`\n assert (height > 0), \"height %s is negative\" % `height`\n self._frame._root.maxsize(width,height)", "def set_max_size(self, size):\n # The hard Qt limit is 16777215 (which is 2**24 - 1) and will\n # print warnings to the shell if we attemp to set a max size\n # over that amount. This can be attempted when a QtMainWindow\n # has a central widget size equal to max size, and it also has\n # a menu bar and other components. Clipping the max size like\n # this will not have an effect on layout computation and thus\n # is relatively safe.\n max_width, max_height = size\n max_width = min(max_width, 16777215)\n max_height = min(max_height, 16777215)\n self.widget.setMaximumSize(max_width, max_height)", "def minimum_size(self):\n return self._minimum_size", "def _set_minimum(self):\n self._level_gen.minimum_length = self._minimum_length_spinbox.value()\n self._refresh_view()", "def UpdateSizing(self):\n def closure(pane):\n pane.MinSize(self.GetBestSize())\n self._PaneInfoOperation(closure)", "def min_size(self):\n raise NotImplementedError()", "def set_minimum(self, min_value):\n\n self._progress.setMinimum(min_value)", "def fl_set_form_minsize(ptr_flform, width, height):\n _fl_set_form_minsize = library.cfuncproto(\n library.load_so_libforms(), \"fl_set_form_minsize\", \\\n None, [cty.POINTER(xfdata.FL_FORM), xfdata.FL_Coord,\n xfdata.FL_Coord],\n \"\"\"void fl_set_form_minsize(FL_FORM * form, FL_Coord w,\n FL_Coord h) \"\"\")\n library.check_if_flinitialized()\n library.verify_flformptr_type(ptr_flform)\n i_width = library.convert_to_FL_Coord(width)\n i_height = library.convert_to_FL_Coord(height)\n library.keep_elem_refs(ptr_flform, width, i_width, height, i_height)\n _fl_set_form_minsize(ptr_flform, i_width, i_height)", "def minimumWidth( self ):\n return self._minimumWidth", "def MinSize(self, arg1=None, arg2=None):\r\n \r\n if isinstance(arg1, wx.Size):\r\n ret = self.MinSize1(arg1)\r\n elif isinstance(arg1, types.TupleType):\r\n ret = self.MinSize1(wx.Size(*arg1))\r\n else:\r\n ret = self.MinSize2(arg1, arg2)\r\n\r\n return ret", "def change_window_size(self, size):\n value = 0\n try:\n value = int(size)\n except ValueError:\n raise ValueError(\"Please type in a valid number.\")\n\n if value >= 0:\n self.__window_size = value\n else:\n raise ValueError(\"Please type in a valid positive number.\")", "def ev_windowminimized(self, event: WindowEvent) -> None:", "def window_size(self, window_size):\n\n self._window_size = window_size", "def resize(self, size):\n self.widget.resize(*size)", "def min_auto(self) -> 'Size':\n self.minimum = 'auto'\n return self", "def minimize(self):\n\t\tself.__window.minimize()\n\t\tself.update_minimization()\n\t\treturn", "def set_min_vm_mem(self, nMemSize):\n\t\tcall_sdk_function('PrlDispCfg_SetMinVmMem', self.handle, nMemSize)", "def min_width(self):\n ...", "def setSize_0(self, size):\n self.setSize(size.getWidth(), size.getHeight())", "def min(self, min):\n\n self._min = min", "def min(self, min):\n\n self._min = min", "def min_facet_width(self, min_facet_width):\n\n self._min_facet_width = min_facet_width", "def __window_resizeTo(self, iWidth, iHeight):\n pass", "def set_max_size(self, width: int, height: int):\n self.tk_ref.maxsize(width=width, height=height)", "def resizePreview(self):\n ratio = float(self.qIma.width()) / float(self.qIma.height())\n if self.qIma.width() > self.qIma.height():\n width = 300\n height = int(float(width) / ratio)\n else:\n height = 170\n width = int(float(height) / ratio)\n if 'prodManager' in os.path.basename(self._ima):\n width = 300\n height = 170\n self.lPreview.setMinimumSize(width, height)\n self.lPreview.setMaximumSize(width, height)", "def minimum_size(self):\n return self.fwhm*2.", "def setwinsize(self, rows, cols):", "def setWindowSize(self, value):\n return self._set(windowSize=value)", "def set_xmin(self, xmin):\n self.__xmin = xmin", "def set_size(self, width, height):\n # Combine the height and width to single string to be passed to root\n set_str = '{}x{}'.format(str(width), str(height))\n self.root.geometry(set_str)", "def onSize(self, event): \n\t\tw, h = self.GetClientSizeTuple()\n\t\tself.tree.SetDimensions(0, 0, w, h)", "def setRect( self, rect ):\n mwidth = self.minimumWidth()\n mheight = self.minimumHeight()\n \n if ( rect.width() < mwidth ):\n rect.setWidth(mwidth)\n if ( rect.height() < mheight ):\n rect.setHeight(mheight)\n \n return super(XNode, self).setRect(rect)", "def resizeEvent(self, event):\n self.autosize()\n super().resizeEvent(event)", "def UpdateHintWindowSize(self):\r\n\r\n size = self.CalculateNewSplitSize()\r\n\r\n # the placeholder hint window should be set to this size\r\n info = self._mgr.GetPane(\"dummy\")\r\n \r\n if info.IsOk(): \r\n info.MinSize(size)\r\n info.BestSize(size)\r\n self._dummy_wnd.SetSize(size)", "def setMinValue(self, min_value):\r\n\t\tself.MinValue = min_value", "def setMinValue(self, min_value):\r\n\t\tself.MinValue = min_value", "def set_min(self, min):\n self.set_val((min, self.val[1]))", "def resize(self):\n h, w = self.win.getmaxyx()\n self.maxh, self.maxw = h, w\n if w == 0 or h == 2:\n return\n self.win.resize(h, w)\n self.lpane.do_resize(h, w)\n self.rpane.do_resize(h, w)\n self.statusbar.resize(h, w)\n self.tabbar.resize(1,w)\n self.regenerate()\n self.display()", "def on_user_resize(self, event):\n self.resize_scaled(drag_rootx=event.x_root + self._mouse_drag_offset)", "def min_value(self, min_value):\n\n self._min_value = min_value", "def min_value(self, min_value):\n\n self._min_value = min_value", "def min_value(self, min_value):\n\n self._min_value = min_value", "def ev_windowminimized(self, event: tcod.event.WindowEvent) -> T | None:", "def adjust_screen_size(self) -> None:\n if self.screen:\n max_row, max_cols = self.screen.getmaxyx()\n if max_row < MIN_SIZE + len(self.all_items):\n self.screen.resize(self.menu_height, max_cols)\n self.draw()", "def Minimize(self):\r\n \r\n return self.SetFlag(self.optionMinimized, True)", "def _resize_image(self, event):\n self.window_width = event.width\n self.window_height = event.height", "def DoSetSize(self, x, y, width, height, flags=wx.SIZE_AUTO):\r\n\r\n self._rect = wx.Rect(x, y, max(1, width), max(1, height))\r\n self.DoSizing()", "def _set_x_size(self):\n self._level_gen.size = (self._level_size_x_spinbox.value(),\n self._level_gen.size[Y],\n self._level_gen.size[Z])\n self._refresh_view()", "def resize(self, auto_layout = False, **kwds):\n\t\told_auto_layout = self.auto_layout\n\t\ttry:\n\t\t\tself.auto_layout = auto_layout\n\t\t\tself.set(**kwds)\n\t\tfinally:\n\t\t\tself.auto_layout = old_auto_layout", "def OnSize(self, event):\r\n \r\n self.UpdateHintWindowSize()\r\n event.Skip()", "def setSize(self, width, height):\n dw = (width - self.width()) / 2.0\n dh = (height - self.height()) / 2.0\n rect = self.sceneRect()\n rect.adjust(-dw, -dh, dw, dh)\n self.setSceneRect(rect)", "def setMinSentenceSize(self, value):\n return self._set(minSentenceSize=value)", "def OnSize(self, event):\r\n \r\n x, y = self.GetClientSize()\r\n realize = False\r\n\r\n if x > y:\r\n self.SetOrientation(wx.HORIZONTAL)\r\n else:\r\n self.SetOrientation(wx.VERTICAL)\r\n\r\n if (x >= y and self._absolute_min_size.x > x) or (y > x and self._absolute_min_size.y > y):\r\n \r\n # hide all flexible items\r\n for item in self._items:\r\n if item.sizer_item and item.proportion > 0 and item.sizer_item.IsShown():\r\n item.sizer_item.Show(False)\r\n item.sizer_item.SetProportion(0)\r\n\r\n if self._originalStyle & AUI_TB_OVERFLOW:\r\n if not self.GetOverflowVisible():\r\n self.SetOverflowVisible(True)\r\n realize = True\r\n \r\n else:\r\n\r\n if self._originalStyle & AUI_TB_OVERFLOW and not self._custom_overflow_append and \\\r\n not self._custom_overflow_prepend:\r\n if self.GetOverflowVisible():\r\n self.SetOverflowVisible(False)\r\n realize = True\r\n\r\n # show all flexible items\r\n for item in self._items:\r\n if item.sizer_item and item.proportion > 0 and not item.sizer_item.IsShown():\r\n item.sizer_item.Show(True)\r\n item.sizer_item.SetProportion(item.proportion)\r\n \r\n self._sizer.SetDimension(0, 0, x, y)\r\n\r\n if realize:\r\n self.Realize()\r\n else:\r\n self.Refresh(False)\r\n \r\n self.Update()", "def EnableMinFrameSize(self):\n\t\treturn self._get_attribute('enableMinFrameSize')", "def OnSize(self, event):\r\n\r\n self.Layout()", "def minimumSizeHint(self):\n return self.sizeHint()", "def minimize(self):\n lib.SDL_MinimizeWindow(self._ptr)", "def setSize(self, width, height):\n frameWidth = width\n frameHeight = height\n repaint()", "def sizeHint( self ):\n return self.window_size", "def set_igv_window_size(self, width=800, height=600):\n self.set_igv_window_width(width)\n self.set_igv_window_height(height)", "def set_size(self, w, h):\n\t\tpass", "def resize(self, rows, cols, minecount, event=None):\n self.clearFrame()\n #reset relevant instance variables\n self.rows = rows\n self.cols = cols\n self.numMines = minecount\n self.numChecked = 0\n self.numFlags = 0\n self.minesArmed = False\n self.startTime = None\n\n #re-add all elements on the board\n self.setUpFrame()\n self.addTiles(rows,cols,minecount)\n\n #resize window to fit the new board size\n windowWidth = str(20*cols+40)\n windowHeight = str(20*rows+60)\n self.parent.minsize(windowWidth, windowHeight)\n self.parent.maxsize(windowWidth, windowHeight)\n self.parent.geometry(windowWidth+'x'+windowHeight)", "def _set_x_block_size(self):\n self._scene_gen.block_dimensions = (self._block_size_x_spinbox.value(),\n self._scene_gen.block_dimensions[Y],\n self._scene_gen.block_dimensions[Z])\n self._refresh_view()", "def set_min_sentence_length(self):\n new_min = int(self.set_min_sentence.get())\n cur_max = self.max_sentence_length\n\n if new_min < cur_max:\n self.min_sentence_length = new_min\n else:\n old_min = self.min_sentence_length\n old_min_var = tk.StringVar(self.master)\n old_min_var.set(str(old_min))\n self.set_min_sentence.config(textvariable=old_min_var)", "def setWidth(self, *args):\n return _libsbml.Dimensions_setWidth(self, *args)", "def set_size(self, size):\n \n self.width = size[0]\n self.height = size[1]", "def size_with_window(self, size_with_window):\n\n self.container['size_with_window'] = size_with_window", "def SetMinTolerance(self, *args):\n return _ShapeUpgrade.ShapeUpgrade_Tool_SetMinTolerance(self, *args)", "def resizeEvent(self, event):\n self.resized.emit()\n return super(PiWndow, self).resizeEvent(event)", "def resize(self, rows, cols, mines):\n if self.menuVar.get() != 4: self.checkVar.set(0)\n self.myBoard.resize(rows, cols, mines)", "def OnSize(self, event):\r\n\r\n if self._owner_mgr and self._send_size:\r\n self._owner_mgr.OnFloatingPaneResized(self._pane_window, event.GetSize())", "def __ev_resize(self, event):\n\n new_size = event.dict['size']\n surface_size = self.__screen.get_size()\n old_center = self.__screen.get_rect().center\n if new_size != surface_size:\n self.__screen = pygame.display.set_mode(new_size,\n self.__screen.get_flags(),\n self.__screen.get_bitsize())\n self.init(offset=vect_diff(self.__screen.get_rect().center,\n old_center))\n self.__screen_width, self.__screen_height = self.__screen.get_size()", "def resize(self):\r\n del self.win\r\n self.__create_win()" ]
[ "0.82620406", "0.8125616", "0.7964996", "0.7790341", "0.7647176", "0.7619907", "0.74496925", "0.7417188", "0.73141354", "0.72229654", "0.70047134", "0.6895434", "0.68838006", "0.6880474", "0.67831236", "0.6776431", "0.67747736", "0.6735547", "0.66872287", "0.66674685", "0.6595648", "0.6565349", "0.6561743", "0.65322226", "0.64010483", "0.63585526", "0.63574594", "0.6317511", "0.6278679", "0.62656796", "0.6221268", "0.6216851", "0.62102455", "0.6191779", "0.61905867", "0.61898094", "0.6175591", "0.61599463", "0.61353797", "0.6132427", "0.6132195", "0.612173", "0.6107337", "0.6101902", "0.6100541", "0.6078329", "0.6072862", "0.6072862", "0.60669905", "0.60627055", "0.6060072", "0.59993213", "0.59860694", "0.5973332", "0.59693944", "0.5960584", "0.59369826", "0.59278053", "0.5914971", "0.5885618", "0.5884909", "0.5866504", "0.5866504", "0.58646554", "0.58599824", "0.5858454", "0.5842654", "0.5842654", "0.5842654", "0.58424604", "0.58414716", "0.5837247", "0.58333373", "0.5786669", "0.5783789", "0.5782925", "0.5774766", "0.5772488", "0.57666063", "0.57576877", "0.5739522", "0.57363546", "0.57291865", "0.5723536", "0.5723244", "0.572018", "0.571294", "0.5695867", "0.56874764", "0.56865495", "0.56827533", "0.5682432", "0.5681977", "0.5680938", "0.5674436", "0.56709665", "0.56579983", "0.56537884", "0.5646655", "0.56449485" ]
0.81438893
1
Unsupported method for compatibility
def flush(self): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _platform_compatible():\r\n raise NotImplementedError", "def __call__(self):\n raise NotImplementedError()", "def __call__(self):\n raise NotImplementedError", "def __upgrade(self):", "def support(self):", "def __call__(self):\r\n raise NotImplementedError('override me')", "def curvetype(self):\n\t\traise Exception(NotImplemented)", "def __call__(self) -> None:", "def __call__(self, *args, **kwargs):\r\n raise NotImplementedError", "def __call__(self, *args, **kwargs):\r\n raise NotImplementedError", "def __call__(self, *args, **kwargs):\r\n raise NotImplementedError", "def __call__(self, *args, **kwargs):\n msg = '{0} is not usable in pure-python'.format(self)\n raise NotImplementedError(msg)", "def to_legacy(self) -> object:\n pass", "def _patch_implementation(self, original, *args, **kwargs):\n pass", "def __call__(self, *args, **kwargs):\n raise NotImplementedError", "def __call__(self, *args, **kwargs):\n raise NotImplementedError", "def __call__(self, f):\n raise NotImplementedError()", "def __call__(self, *args, **kwargs) -> None:\n raise NotImplementedError()", "def _raise_not_supported(self):\n raise CpoNotSupportedException(\"Method '{}' is not available in solver agent '{}' ({}).\"\n .format(inspect.stack()[1][3], self.context.agent, type(self)))", "def _not_implemented(self, *args, **kwargs):\n raise NotImplementedError", "def checkCompatibility(self, *args):\n return _libsbml.SBase_checkCompatibility(self, *args)", "def test(self):\n raise NotImplementedError", "def __call__(self):\n pass", "def __call__(self):\n pass", "def __call__(self, *args, **kwargs): # real signature unknown\n pass", "def is_compatible(self, function, arguments):", "def version(self):\r\n raise NotImplementedError()", "def __call__():", "def __call__():", "def __call__():", "def __call__():", "def __call__():", "def __call__(self, **kwargs):\n raise NotImplementedError", "def __call__(self, a, b):\n # STUDENT CODE HERE\n raise NotImplementedError", "def __call__(self, a, b):\n # STUDENT CODE HERE\n raise NotImplementedError", "def __call__(self, a, b):\n # STUDENT CODE HERE\n raise NotImplementedError", "def extension (self):\n assert False, \"To be implemented by child\"", "def bad(self):\n raise NotImplementedError", "def bad(self):\n raise NotImplementedError", "def _GetOpener(self):\n raise NotImplementedError()", "def check_supported_features(self):", "def interface(self):\n raise exceptions.NotImplementedError()", "def func(*args, **kwargs): # pragma: no cover\n raise NotImplementedError(\"{name} not ported from upstream\"\n .format(name=name))", "def override(self):\n return None", "def function(self):\n raise NotImplementedError", "def __call__(self):\n\t\treturn", "def check(self):\n raise NotImplementedError", "def _create_impl(self):", "def _GetOpener(self):\r\n raise NotImplementedError()", "def check_stability(self):", "def __call__(self):\r\n raise self", "def __call__(self):\r\n raise self", "def warning(self, *args, **kwargs): # real signature unknown\n pass", "def fallback(self):\n pass", "def fallback(self):\n pass", "def method(self):\n return None", "def __call__( self ):\n pass", "def __call__(object):", "def _different_curvatures_not_supported(self):\n if self._extension_cls_directions != self._extension_cls_second:\n raise NotImplementedError(\n \"Different extensions for (directions, second) not supported.\"\n )", "def f_get(self):\n raise NotImplementedError(\"Should have implemented this.\")", "def base(self):\n raise NotImplementedError()", "def patch_sdk():", "def patch_sdk():", "def patch_sdk():", "def __nonzero__ ( self ) :\n raise AbstractMethodException( self , \"__nonzero__\" )", "def __call__(self, req):\n raise NotImplementedError(\"%s.__call__()\" % self.__class__.__name__)", "def __call__(self, req):\n raise NotImplementedError(\"%s.__call__()\" % self.__class__.__name__)", "def __int__(self):\n pass", "def get(self):\n raise NotImplementedError", "def available(self):\n\t\traise NotImplementedError", "def test_4_4_1_1(self):\n pass", "def downgrade():\n raise NotImplementedError(\"Downgrade is not supported\")", "def downgrade():\n raise NotImplementedError(\"Downgrade is not supported\")", "def downgrade():\n raise NotImplementedError(\"Downgrade is not supported\")", "def protocol(self):\n raise UnsupportedCall(f\"'{self.__class__.__name__}' object has no attribute 'protocol'\")", "def lint(self):\n raise NotImplementedError()", "def unsupported(self) -> Union[object, Sequence]:\n return self._unsupported", "def __init__(self):\n raise NotImplementedError()", "def _get_version(self):", "def offering(self):\r\n raise NotImplementedError()", "def retinanet(self, *args, **kwargs):\n raise NotImplementedError('retinanet method not implemented.')", "def available(self):\n raise ClixxException(\"Not implemented.\")", "def function(self, *args):\n raise NotImplemented", "def dl():\n raise NotImplementedError()", "def version(self):\n raise NotImplementedError", "def version(self):\n raise NotImplementedError", "def version(self):\n raise NotImplementedError", "def version(self):\n raise NotImplementedError", "def upgrade(self):", "def upgrade(self):", "def onJT808Operation(self):\n pass", "def default(self):\n raise NotImplementedError", "def _to_be_wrapped(self) -> None:", "def method(self):", "def test_method_not_supported(self):\n result = self.app.get('/api/v1.0/documents/convert')\n # assert the status code of the response 405 (method not allowed)\n self.assertEqual(result.status_code, 405)", "def __tr_getattr__(self, name):\n raise AttributeError(name)", "def __tr_getattr__(self, name):\n raise AttributeError(name)", "def method_get_version(self) -> str: # pragma: nocover\n raise NotImplementedError", "def __call__(self):", "def __call__(self):", "def original(self) -> Any:\n raise NotImplementedError" ]
[ "0.7279129", "0.68142307", "0.6761746", "0.6669215", "0.661067", "0.6566959", "0.6410402", "0.62955695", "0.6209175", "0.6209175", "0.6209175", "0.6150251", "0.6138908", "0.6107612", "0.6102898", "0.6102898", "0.60230196", "0.60135454", "0.59595454", "0.59073716", "0.59036094", "0.588363", "0.58825576", "0.58825576", "0.58590126", "0.58429396", "0.582205", "0.5804422", "0.5804422", "0.5804422", "0.5804422", "0.5804422", "0.5799619", "0.57945853", "0.57945853", "0.57945853", "0.57794803", "0.57730615", "0.57730615", "0.5769253", "0.57617915", "0.5761162", "0.57336277", "0.56964874", "0.56950104", "0.5678023", "0.5673142", "0.56418127", "0.56072664", "0.5604688", "0.5604218", "0.5604218", "0.5600337", "0.55896837", "0.55896837", "0.5588693", "0.55734813", "0.5561598", "0.55558157", "0.5546809", "0.55339056", "0.55225104", "0.55225104", "0.55225104", "0.55130005", "0.5511499", "0.5511499", "0.5511015", "0.5509951", "0.5495512", "0.5488144", "0.5480076", "0.5480076", "0.5480076", "0.5479503", "0.5475619", "0.54702616", "0.5457303", "0.5456054", "0.5447579", "0.5441189", "0.5438457", "0.54357326", "0.54073244", "0.5403553", "0.5403553", "0.5403553", "0.5403553", "0.5397426", "0.5397426", "0.53834", "0.538178", "0.5377456", "0.5369526", "0.53668934", "0.5359533", "0.5359533", "0.5356159", "0.5351877", "0.5351877", "0.5348911" ]
0.0
-1
Unsupported method for compatibility
def stroke(self, path, clr): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _platform_compatible():\r\n raise NotImplementedError", "def __call__(self):\n raise NotImplementedError()", "def __call__(self):\n raise NotImplementedError", "def __upgrade(self):", "def support(self):", "def __call__(self):\r\n raise NotImplementedError('override me')", "def curvetype(self):\n\t\traise Exception(NotImplemented)", "def __call__(self) -> None:", "def __call__(self, *args, **kwargs):\r\n raise NotImplementedError", "def __call__(self, *args, **kwargs):\r\n raise NotImplementedError", "def __call__(self, *args, **kwargs):\r\n raise NotImplementedError", "def __call__(self, *args, **kwargs):\n msg = '{0} is not usable in pure-python'.format(self)\n raise NotImplementedError(msg)", "def to_legacy(self) -> object:\n pass", "def _patch_implementation(self, original, *args, **kwargs):\n pass", "def __call__(self, *args, **kwargs):\n raise NotImplementedError", "def __call__(self, *args, **kwargs):\n raise NotImplementedError", "def __call__(self, f):\n raise NotImplementedError()", "def __call__(self, *args, **kwargs) -> None:\n raise NotImplementedError()", "def _raise_not_supported(self):\n raise CpoNotSupportedException(\"Method '{}' is not available in solver agent '{}' ({}).\"\n .format(inspect.stack()[1][3], self.context.agent, type(self)))", "def _not_implemented(self, *args, **kwargs):\n raise NotImplementedError", "def checkCompatibility(self, *args):\n return _libsbml.SBase_checkCompatibility(self, *args)", "def test(self):\n raise NotImplementedError", "def __call__(self):\n pass", "def __call__(self):\n pass", "def __call__(self, *args, **kwargs): # real signature unknown\n pass", "def is_compatible(self, function, arguments):", "def version(self):\r\n raise NotImplementedError()", "def __call__():", "def __call__():", "def __call__():", "def __call__():", "def __call__():", "def __call__(self, **kwargs):\n raise NotImplementedError", "def __call__(self, a, b):\n # STUDENT CODE HERE\n raise NotImplementedError", "def __call__(self, a, b):\n # STUDENT CODE HERE\n raise NotImplementedError", "def __call__(self, a, b):\n # STUDENT CODE HERE\n raise NotImplementedError", "def extension (self):\n assert False, \"To be implemented by child\"", "def bad(self):\n raise NotImplementedError", "def bad(self):\n raise NotImplementedError", "def _GetOpener(self):\n raise NotImplementedError()", "def check_supported_features(self):", "def interface(self):\n raise exceptions.NotImplementedError()", "def func(*args, **kwargs): # pragma: no cover\n raise NotImplementedError(\"{name} not ported from upstream\"\n .format(name=name))", "def override(self):\n return None", "def function(self):\n raise NotImplementedError", "def __call__(self):\n\t\treturn", "def check(self):\n raise NotImplementedError", "def _create_impl(self):", "def _GetOpener(self):\r\n raise NotImplementedError()", "def check_stability(self):", "def __call__(self):\r\n raise self", "def __call__(self):\r\n raise self", "def warning(self, *args, **kwargs): # real signature unknown\n pass", "def fallback(self):\n pass", "def fallback(self):\n pass", "def method(self):\n return None", "def __call__( self ):\n pass", "def __call__(object):", "def _different_curvatures_not_supported(self):\n if self._extension_cls_directions != self._extension_cls_second:\n raise NotImplementedError(\n \"Different extensions for (directions, second) not supported.\"\n )", "def f_get(self):\n raise NotImplementedError(\"Should have implemented this.\")", "def base(self):\n raise NotImplementedError()", "def patch_sdk():", "def patch_sdk():", "def patch_sdk():", "def __nonzero__ ( self ) :\n raise AbstractMethodException( self , \"__nonzero__\" )", "def __call__(self, req):\n raise NotImplementedError(\"%s.__call__()\" % self.__class__.__name__)", "def __call__(self, req):\n raise NotImplementedError(\"%s.__call__()\" % self.__class__.__name__)", "def __int__(self):\n pass", "def get(self):\n raise NotImplementedError", "def available(self):\n\t\traise NotImplementedError", "def test_4_4_1_1(self):\n pass", "def downgrade():\n raise NotImplementedError(\"Downgrade is not supported\")", "def downgrade():\n raise NotImplementedError(\"Downgrade is not supported\")", "def downgrade():\n raise NotImplementedError(\"Downgrade is not supported\")", "def protocol(self):\n raise UnsupportedCall(f\"'{self.__class__.__name__}' object has no attribute 'protocol'\")", "def lint(self):\n raise NotImplementedError()", "def unsupported(self) -> Union[object, Sequence]:\n return self._unsupported", "def __init__(self):\n raise NotImplementedError()", "def _get_version(self):", "def offering(self):\r\n raise NotImplementedError()", "def retinanet(self, *args, **kwargs):\n raise NotImplementedError('retinanet method not implemented.')", "def available(self):\n raise ClixxException(\"Not implemented.\")", "def function(self, *args):\n raise NotImplemented", "def dl():\n raise NotImplementedError()", "def version(self):\n raise NotImplementedError", "def version(self):\n raise NotImplementedError", "def version(self):\n raise NotImplementedError", "def version(self):\n raise NotImplementedError", "def upgrade(self):", "def upgrade(self):", "def onJT808Operation(self):\n pass", "def default(self):\n raise NotImplementedError", "def _to_be_wrapped(self) -> None:", "def method(self):", "def test_method_not_supported(self):\n result = self.app.get('/api/v1.0/documents/convert')\n # assert the status code of the response 405 (method not allowed)\n self.assertEqual(result.status_code, 405)", "def __tr_getattr__(self, name):\n raise AttributeError(name)", "def __tr_getattr__(self, name):\n raise AttributeError(name)", "def method_get_version(self) -> str: # pragma: nocover\n raise NotImplementedError", "def __call__(self):", "def __call__(self):", "def original(self) -> Any:\n raise NotImplementedError" ]
[ "0.7279129", "0.68142307", "0.6761746", "0.6669215", "0.661067", "0.6566959", "0.6410402", "0.62955695", "0.6209175", "0.6209175", "0.6209175", "0.6150251", "0.6138908", "0.6107612", "0.6102898", "0.6102898", "0.60230196", "0.60135454", "0.59595454", "0.59073716", "0.59036094", "0.588363", "0.58825576", "0.58825576", "0.58590126", "0.58429396", "0.582205", "0.5804422", "0.5804422", "0.5804422", "0.5804422", "0.5804422", "0.5799619", "0.57945853", "0.57945853", "0.57945853", "0.57794803", "0.57730615", "0.57730615", "0.5769253", "0.57617915", "0.5761162", "0.57336277", "0.56964874", "0.56950104", "0.5678023", "0.5673142", "0.56418127", "0.56072664", "0.5604688", "0.5604218", "0.5604218", "0.5600337", "0.55896837", "0.55896837", "0.5588693", "0.55734813", "0.5561598", "0.55558157", "0.5546809", "0.55339056", "0.55225104", "0.55225104", "0.55225104", "0.55130005", "0.5511499", "0.5511499", "0.5511015", "0.5509951", "0.5495512", "0.5488144", "0.5480076", "0.5480076", "0.5480076", "0.5479503", "0.5475619", "0.54702616", "0.5457303", "0.5456054", "0.5447579", "0.5441189", "0.5438457", "0.54357326", "0.54073244", "0.5403553", "0.5403553", "0.5403553", "0.5403553", "0.5397426", "0.5397426", "0.53834", "0.538178", "0.5377456", "0.5369526", "0.53668934", "0.5359533", "0.5359533", "0.5356159", "0.5351877", "0.5351877", "0.5348911" ]
0.0
-1
Unsupported method for compatibility
def fill(self, path, clr): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _platform_compatible():\r\n raise NotImplementedError", "def __call__(self):\n raise NotImplementedError()", "def __call__(self):\n raise NotImplementedError", "def __upgrade(self):", "def support(self):", "def __call__(self):\r\n raise NotImplementedError('override me')", "def curvetype(self):\n\t\traise Exception(NotImplemented)", "def __call__(self) -> None:", "def __call__(self, *args, **kwargs):\r\n raise NotImplementedError", "def __call__(self, *args, **kwargs):\r\n raise NotImplementedError", "def __call__(self, *args, **kwargs):\r\n raise NotImplementedError", "def __call__(self, *args, **kwargs):\n msg = '{0} is not usable in pure-python'.format(self)\n raise NotImplementedError(msg)", "def to_legacy(self) -> object:\n pass", "def _patch_implementation(self, original, *args, **kwargs):\n pass", "def __call__(self, *args, **kwargs):\n raise NotImplementedError", "def __call__(self, *args, **kwargs):\n raise NotImplementedError", "def __call__(self, f):\n raise NotImplementedError()", "def __call__(self, *args, **kwargs) -> None:\n raise NotImplementedError()", "def _raise_not_supported(self):\n raise CpoNotSupportedException(\"Method '{}' is not available in solver agent '{}' ({}).\"\n .format(inspect.stack()[1][3], self.context.agent, type(self)))", "def _not_implemented(self, *args, **kwargs):\n raise NotImplementedError", "def checkCompatibility(self, *args):\n return _libsbml.SBase_checkCompatibility(self, *args)", "def test(self):\n raise NotImplementedError", "def __call__(self):\n pass", "def __call__(self):\n pass", "def __call__(self, *args, **kwargs): # real signature unknown\n pass", "def is_compatible(self, function, arguments):", "def version(self):\r\n raise NotImplementedError()", "def __call__():", "def __call__():", "def __call__():", "def __call__():", "def __call__():", "def __call__(self, **kwargs):\n raise NotImplementedError", "def __call__(self, a, b):\n # STUDENT CODE HERE\n raise NotImplementedError", "def __call__(self, a, b):\n # STUDENT CODE HERE\n raise NotImplementedError", "def __call__(self, a, b):\n # STUDENT CODE HERE\n raise NotImplementedError", "def extension (self):\n assert False, \"To be implemented by child\"", "def bad(self):\n raise NotImplementedError", "def bad(self):\n raise NotImplementedError", "def _GetOpener(self):\n raise NotImplementedError()", "def check_supported_features(self):", "def interface(self):\n raise exceptions.NotImplementedError()", "def func(*args, **kwargs): # pragma: no cover\n raise NotImplementedError(\"{name} not ported from upstream\"\n .format(name=name))", "def override(self):\n return None", "def function(self):\n raise NotImplementedError", "def __call__(self):\n\t\treturn", "def check(self):\n raise NotImplementedError", "def _create_impl(self):", "def _GetOpener(self):\r\n raise NotImplementedError()", "def check_stability(self):", "def __call__(self):\r\n raise self", "def __call__(self):\r\n raise self", "def warning(self, *args, **kwargs): # real signature unknown\n pass", "def fallback(self):\n pass", "def fallback(self):\n pass", "def method(self):\n return None", "def __call__( self ):\n pass", "def __call__(object):", "def _different_curvatures_not_supported(self):\n if self._extension_cls_directions != self._extension_cls_second:\n raise NotImplementedError(\n \"Different extensions for (directions, second) not supported.\"\n )", "def f_get(self):\n raise NotImplementedError(\"Should have implemented this.\")", "def base(self):\n raise NotImplementedError()", "def patch_sdk():", "def patch_sdk():", "def patch_sdk():", "def __nonzero__ ( self ) :\n raise AbstractMethodException( self , \"__nonzero__\" )", "def __call__(self, req):\n raise NotImplementedError(\"%s.__call__()\" % self.__class__.__name__)", "def __call__(self, req):\n raise NotImplementedError(\"%s.__call__()\" % self.__class__.__name__)", "def __int__(self):\n pass", "def get(self):\n raise NotImplementedError", "def available(self):\n\t\traise NotImplementedError", "def test_4_4_1_1(self):\n pass", "def downgrade():\n raise NotImplementedError(\"Downgrade is not supported\")", "def downgrade():\n raise NotImplementedError(\"Downgrade is not supported\")", "def downgrade():\n raise NotImplementedError(\"Downgrade is not supported\")", "def protocol(self):\n raise UnsupportedCall(f\"'{self.__class__.__name__}' object has no attribute 'protocol'\")", "def lint(self):\n raise NotImplementedError()", "def unsupported(self) -> Union[object, Sequence]:\n return self._unsupported", "def __init__(self):\n raise NotImplementedError()", "def _get_version(self):", "def offering(self):\r\n raise NotImplementedError()", "def retinanet(self, *args, **kwargs):\n raise NotImplementedError('retinanet method not implemented.')", "def available(self):\n raise ClixxException(\"Not implemented.\")", "def function(self, *args):\n raise NotImplemented", "def dl():\n raise NotImplementedError()", "def version(self):\n raise NotImplementedError", "def version(self):\n raise NotImplementedError", "def version(self):\n raise NotImplementedError", "def version(self):\n raise NotImplementedError", "def upgrade(self):", "def upgrade(self):", "def onJT808Operation(self):\n pass", "def default(self):\n raise NotImplementedError", "def _to_be_wrapped(self) -> None:", "def method(self):", "def test_method_not_supported(self):\n result = self.app.get('/api/v1.0/documents/convert')\n # assert the status code of the response 405 (method not allowed)\n self.assertEqual(result.status_code, 405)", "def __tr_getattr__(self, name):\n raise AttributeError(name)", "def __tr_getattr__(self, name):\n raise AttributeError(name)", "def method_get_version(self) -> str: # pragma: nocover\n raise NotImplementedError", "def __call__(self):", "def __call__(self):", "def original(self) -> Any:\n raise NotImplementedError" ]
[ "0.7279129", "0.68142307", "0.6761746", "0.6669215", "0.661067", "0.6566959", "0.6410402", "0.62955695", "0.6209175", "0.6209175", "0.6209175", "0.6150251", "0.6138908", "0.6107612", "0.6102898", "0.6102898", "0.60230196", "0.60135454", "0.59595454", "0.59073716", "0.59036094", "0.588363", "0.58825576", "0.58825576", "0.58590126", "0.58429396", "0.582205", "0.5804422", "0.5804422", "0.5804422", "0.5804422", "0.5804422", "0.5799619", "0.57945853", "0.57945853", "0.57945853", "0.57794803", "0.57730615", "0.57730615", "0.5769253", "0.57617915", "0.5761162", "0.57336277", "0.56964874", "0.56950104", "0.5678023", "0.5673142", "0.56418127", "0.56072664", "0.5604688", "0.5604218", "0.5604218", "0.5600337", "0.55896837", "0.55896837", "0.5588693", "0.55734813", "0.5561598", "0.55558157", "0.5546809", "0.55339056", "0.55225104", "0.55225104", "0.55225104", "0.55130005", "0.5511499", "0.5511499", "0.5511015", "0.5509951", "0.5495512", "0.5488144", "0.5480076", "0.5480076", "0.5480076", "0.5479503", "0.5475619", "0.54702616", "0.5457303", "0.5456054", "0.5447579", "0.5441189", "0.5438457", "0.54357326", "0.54073244", "0.5403553", "0.5403553", "0.5403553", "0.5403553", "0.5397426", "0.5397426", "0.53834", "0.538178", "0.5377456", "0.5369526", "0.53668934", "0.5359533", "0.5359533", "0.5356159", "0.5351877", "0.5351877", "0.5348911" ]
0.0
-1
Unsupported method for compatibility
def write(self, fname): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _platform_compatible():\r\n raise NotImplementedError", "def __call__(self):\n raise NotImplementedError()", "def __call__(self):\n raise NotImplementedError", "def __upgrade(self):", "def support(self):", "def __call__(self):\r\n raise NotImplementedError('override me')", "def curvetype(self):\n\t\traise Exception(NotImplemented)", "def __call__(self) -> None:", "def __call__(self, *args, **kwargs):\r\n raise NotImplementedError", "def __call__(self, *args, **kwargs):\r\n raise NotImplementedError", "def __call__(self, *args, **kwargs):\r\n raise NotImplementedError", "def __call__(self, *args, **kwargs):\n msg = '{0} is not usable in pure-python'.format(self)\n raise NotImplementedError(msg)", "def to_legacy(self) -> object:\n pass", "def _patch_implementation(self, original, *args, **kwargs):\n pass", "def __call__(self, *args, **kwargs):\n raise NotImplementedError", "def __call__(self, *args, **kwargs):\n raise NotImplementedError", "def __call__(self, f):\n raise NotImplementedError()", "def __call__(self, *args, **kwargs) -> None:\n raise NotImplementedError()", "def _raise_not_supported(self):\n raise CpoNotSupportedException(\"Method '{}' is not available in solver agent '{}' ({}).\"\n .format(inspect.stack()[1][3], self.context.agent, type(self)))", "def _not_implemented(self, *args, **kwargs):\n raise NotImplementedError", "def checkCompatibility(self, *args):\n return _libsbml.SBase_checkCompatibility(self, *args)", "def test(self):\n raise NotImplementedError", "def __call__(self):\n pass", "def __call__(self):\n pass", "def __call__(self, *args, **kwargs): # real signature unknown\n pass", "def is_compatible(self, function, arguments):", "def version(self):\r\n raise NotImplementedError()", "def __call__():", "def __call__():", "def __call__():", "def __call__():", "def __call__():", "def __call__(self, **kwargs):\n raise NotImplementedError", "def __call__(self, a, b):\n # STUDENT CODE HERE\n raise NotImplementedError", "def __call__(self, a, b):\n # STUDENT CODE HERE\n raise NotImplementedError", "def __call__(self, a, b):\n # STUDENT CODE HERE\n raise NotImplementedError", "def extension (self):\n assert False, \"To be implemented by child\"", "def bad(self):\n raise NotImplementedError", "def bad(self):\n raise NotImplementedError", "def _GetOpener(self):\n raise NotImplementedError()", "def check_supported_features(self):", "def interface(self):\n raise exceptions.NotImplementedError()", "def func(*args, **kwargs): # pragma: no cover\n raise NotImplementedError(\"{name} not ported from upstream\"\n .format(name=name))", "def override(self):\n return None", "def function(self):\n raise NotImplementedError", "def __call__(self):\n\t\treturn", "def check(self):\n raise NotImplementedError", "def _create_impl(self):", "def _GetOpener(self):\r\n raise NotImplementedError()", "def check_stability(self):", "def __call__(self):\r\n raise self", "def __call__(self):\r\n raise self", "def warning(self, *args, **kwargs): # real signature unknown\n pass", "def fallback(self):\n pass", "def fallback(self):\n pass", "def method(self):\n return None", "def __call__( self ):\n pass", "def __call__(object):", "def _different_curvatures_not_supported(self):\n if self._extension_cls_directions != self._extension_cls_second:\n raise NotImplementedError(\n \"Different extensions for (directions, second) not supported.\"\n )", "def f_get(self):\n raise NotImplementedError(\"Should have implemented this.\")", "def base(self):\n raise NotImplementedError()", "def patch_sdk():", "def patch_sdk():", "def patch_sdk():", "def __nonzero__ ( self ) :\n raise AbstractMethodException( self , \"__nonzero__\" )", "def __call__(self, req):\n raise NotImplementedError(\"%s.__call__()\" % self.__class__.__name__)", "def __call__(self, req):\n raise NotImplementedError(\"%s.__call__()\" % self.__class__.__name__)", "def __int__(self):\n pass", "def get(self):\n raise NotImplementedError", "def available(self):\n\t\traise NotImplementedError", "def test_4_4_1_1(self):\n pass", "def downgrade():\n raise NotImplementedError(\"Downgrade is not supported\")", "def downgrade():\n raise NotImplementedError(\"Downgrade is not supported\")", "def downgrade():\n raise NotImplementedError(\"Downgrade is not supported\")", "def protocol(self):\n raise UnsupportedCall(f\"'{self.__class__.__name__}' object has no attribute 'protocol'\")", "def lint(self):\n raise NotImplementedError()", "def unsupported(self) -> Union[object, Sequence]:\n return self._unsupported", "def __init__(self):\n raise NotImplementedError()", "def _get_version(self):", "def offering(self):\r\n raise NotImplementedError()", "def retinanet(self, *args, **kwargs):\n raise NotImplementedError('retinanet method not implemented.')", "def available(self):\n raise ClixxException(\"Not implemented.\")", "def function(self, *args):\n raise NotImplemented", "def dl():\n raise NotImplementedError()", "def version(self):\n raise NotImplementedError", "def version(self):\n raise NotImplementedError", "def version(self):\n raise NotImplementedError", "def version(self):\n raise NotImplementedError", "def upgrade(self):", "def upgrade(self):", "def onJT808Operation(self):\n pass", "def default(self):\n raise NotImplementedError", "def _to_be_wrapped(self) -> None:", "def method(self):", "def test_method_not_supported(self):\n result = self.app.get('/api/v1.0/documents/convert')\n # assert the status code of the response 405 (method not allowed)\n self.assertEqual(result.status_code, 405)", "def __tr_getattr__(self, name):\n raise AttributeError(name)", "def __tr_getattr__(self, name):\n raise AttributeError(name)", "def method_get_version(self) -> str: # pragma: nocover\n raise NotImplementedError", "def __call__(self):", "def __call__(self):", "def original(self) -> Any:\n raise NotImplementedError" ]
[ "0.7279129", "0.68142307", "0.6761746", "0.6669215", "0.661067", "0.6566959", "0.6410402", "0.62955695", "0.6209175", "0.6209175", "0.6209175", "0.6150251", "0.6138908", "0.6107612", "0.6102898", "0.6102898", "0.60230196", "0.60135454", "0.59595454", "0.59073716", "0.59036094", "0.588363", "0.58825576", "0.58825576", "0.58590126", "0.58429396", "0.582205", "0.5804422", "0.5804422", "0.5804422", "0.5804422", "0.5804422", "0.5799619", "0.57945853", "0.57945853", "0.57945853", "0.57794803", "0.57730615", "0.57730615", "0.5769253", "0.57617915", "0.5761162", "0.57336277", "0.56964874", "0.56950104", "0.5678023", "0.5673142", "0.56418127", "0.56072664", "0.5604688", "0.5604218", "0.5604218", "0.5600337", "0.55896837", "0.55896837", "0.5588693", "0.55734813", "0.5561598", "0.55558157", "0.5546809", "0.55339056", "0.55225104", "0.55225104", "0.55225104", "0.55130005", "0.5511499", "0.5511499", "0.5511015", "0.5509951", "0.5495512", "0.5488144", "0.5480076", "0.5480076", "0.5480076", "0.5479503", "0.5475619", "0.54702616", "0.5457303", "0.5456054", "0.5447579", "0.5441189", "0.5438457", "0.54357326", "0.54073244", "0.5403553", "0.5403553", "0.5403553", "0.5403553", "0.5397426", "0.5397426", "0.53834", "0.538178", "0.5377456", "0.5369526", "0.53668934", "0.5359533", "0.5359533", "0.5356159", "0.5351877", "0.5351877", "0.5348911" ]
0.0
-1
The heading of this turtle in degrees. Heading is measured counter clockwise from due east.
def heading(self): return float(self._turtle.heading())
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def heading(self):\n x, y = self._orient\n result = round(math.atan2(y, x)*180.0/math.pi, 10) % 360.0\n result /= self._degreesPerAU\n return (self._angleOffset + self._angleOrient*result) % self._fullcircle", "def raw_heading(self):\n\n self._heading = math.atan2(self._mag[X], self._mag[Y])\n\n if self._heading < 0:\n self._heading += 2*math.pi\n if self._heading > 2*math.pi:\n self._heading -= 2*math.pi\n\n self._heading_degrees = round(math.degrees(self._heading),2)\n\n return self._heading_degrees", "def heading(self) -> float:\n return self._state[2]", "def heading_idx(self):\n if self.heading > 0:\n idx = self.heading * 180\n else:\n idx = 360 + self.heading * 180\n return int(idx - 1)", "def heading(self):\n\n self.update()\n\n truncate = [0,0,0]\n for i in range(X, Z+1):\n truncate[i] = math.copysign(min(math.fabs(self._accel[i]), 1.0), self._accel[i])\n try:\n pitch = math.asin(-1*truncate[X])\n roll = math.asin(truncate[Y]/math.cos(pitch)) if abs(math.cos(pitch)) >= abs(truncate[Y]) else 0\n # set roll to zero if pitch approaches -1 or 1\n\n self._tiltcomp[X] = self._mag[X] * math.cos(pitch) + self._mag[Z] * math.sin(pitch)\n self._tiltcomp[Y] = self._mag[X] * math.sin(roll) * math.sin(pitch) + \\\n self._mag[Y] * math.cos(roll) - self._mag[Z] * math.sin(roll) * math.cos(pitch)\n self._tiltcomp[Z] = self._mag[X] * math.cos(roll) * math.sin(pitch) + \\\n self._mag[Y] * math.sin(roll) + \\\n self._mag[Z] * math.cos(roll) * math.cos(pitch)\n self._tilt_heading = math.atan2(self._tiltcomp[Y], self._tiltcomp[X])\n\n if self._tilt_heading < 0:\n self._tilt_heading += 2*math.pi\n if self._tilt_heading > 2*math.pi:\n self._heading -= 2*math.pi\n\n self._tilt_heading_degrees = round(math.degrees(self._tilt_heading),2)\n return self._tilt_heading_degrees\n\n except Exception:\n return None", "def current_heading():\n global current_pose\n while current_pose is None:\n pass\n x = current_pose.pose.orientation.x\n y = current_pose.pose.orientation.y\n z = current_pose.pose.orientation.z\n w = current_pose.pose.orientation.w\n\n t3 = +2.0 * (w * z + x * y)\n t4 = +1.0 - 2.0 * (y * y + z * z)\n yaw_z = math.atan2(t3, t4)\n heading = math.degrees(yaw_z) - 90\n if heading < 0:\n heading += 360\n return heading", "def heading_at(self, longitudinal: float) -> float:\n raise NotImplementedError()", "def getH(self):\n\t\thAngle = (math.atan2(self.y,self.x))/(2*math.pi)\n\t\tif self.y < 0:\n\t\t\thAngle = 1 + hAngle\t\n\t\treturn hAngle", "def getPosHeading(self) :\n\t\treturn (self.avatarNP.getX(), self.avatarNP.getY(), \\\n\t\t\tself.avatarNP.getZ(), (self.avatarNP.getHpr()[0])%360)", "def getHeadingTime(self) -> float:\n return self.timestep_cached_heading_tm", "def __get_heading(self, robot_position, robot_yaw):\n abs_heading = math.atan2(self.pinger_loc[1] - robot_position[1],\n self.pinger_loc[0] - robot_position[0])\n return self.normalize(\n abs_heading - robot_yaw + random.gauss(0, self.noise))", "def get_EUL_Heading(self):\n eul_raw = self.i2c.mem_read(2, self.addr, OUT_EUL_HEADING_LSB)\n eul_heading = self.sign_val(((eul_raw[1]<<8) + eul_raw[0]))/16.0\n return eul_heading\n #print(eul_heading)", "def get_heading(hunter_position, target_position):\n hunter_x, hunter_y = hunter_position\n target_x, target_y = target_position\n heading = atan2(target_y - hunter_y, target_x - hunter_x)\n heading = angle_trunc(heading)\n return heading", "def get_heading(hunter_position, target_position):\n hunter_x, hunter_y = hunter_position\n target_x, target_y = target_position\n heading = atan2(target_y - hunter_y, target_x - hunter_x)\n heading = angle_trunc(heading)\n return heading", "def Get_Heading(x1, y1, x2, y2):\n\n heading = 0\n dx = x2 - x1\n dy = y2 - y1\n\n if dx != 0:\n heading = (90 - math.degrees(math.atan2(dy,dx)) + 360) % 360\n\n elif dy > 0: heading = 0\n\n elif dy < 0: heading = 180\n\n return heading", "def heading_difference(self, other_heading):\n diff = abs(self.heading - other_heading)\n if diff > 180:\n diff = 360 - diff\n return diff", "def Get_Heading_Change(heading_last, heading_current):\n r = heading_current - heading_last + 180\n return (r % 360) - 180", "def theta_deg(self):\n return self.theta * 180 / np.pi", "def course(self) -> float:\n crab_angle = np.arctan2(self.velocity[1], self.velocity[0])\n return self.heading + crab_angle", "def calculate_heading(self):\r\n self.radius = Bullet.side # for collision detection\r\n self.angle += self.boss.turretAngle\r\n self.mass = Bullet.mass\r\n self.vel = Bullet.vel\r\n\r\n # Designing a Bullet\r\n image = pygame.Surface((Bullet.side * 2, Bullet.side))\r\n image.fill((128, 128, 128))\r\n pygame.draw.rect(image, (252, 65, 3), (0, 0, int(Bullet.side * 1.5), Bullet.side))\r\n pygame.draw.circle(image, self.color, (int(self.side * 1.5), self.side // 2), self.side // 2)\r\n image.set_colorkey((128, 128, 128))\r\n\r\n # Converting bullet surface to image\r\n self.image0 = image.convert_alpha()\r\n self.image = pygame.transform.rotate(self.image0, self.angle)\r\n self.rect = self.image.get_rect()\r\n\r\n # Positioning of bullet\r\n self.dx = math.cos(degrees_to_radians(self.boss.turretAngle)) * self.vel\r\n self.dy = math.sin(degrees_to_radians(-self.boss.turretAngle)) * self.vel", "def get_heading(self):\n return self.__heading", "def get_heading(self):\n return self.__heading", "def hp(self):\n return float(self.hp_angle)", "def degrees(self) -> float:\n return math.degrees(self.radians)", "def heading(self):\n return self._heading", "def orientationToHeading(orientation):\n res = [0, 0, 0, 0]\n res[0] = orientation.x\n res[1] = orientation.y\n res[2] = orientation.z\n res[3] = orientation.w\n return tf.transformations.euler_from_quaternion(res)[2]", "def getHeading(self, request, context):\n \n return droneconnect_pb2.Heading(heading = float(self.vehicle.heading))", "def GetHeading(self):\n if self._imu_data == None:\n # No data yet.\n return 0\n\n if self._thread_state != 0:\n # IMU down.\n return None\n\n return self._imu_data['fusionPose'][2]", "def _calc_cycle_delta_heading(self):\n first_frame = self._frames[0]\n last_frame = self._frames[-1]\n\n rot_start = self.get_frame_root_rot(first_frame)\n rot_end = self.get_frame_root_rot(last_frame)\n inv_rot_start = transformations.quaternion_conjugate(rot_start)\n drot = transformations.quaternion_multiply(rot_end, inv_rot_start)\n cycle_delta_heading = motion_util.calc_heading(drot)\n\n return cycle_delta_heading", "def angle(self) -> float:\n ...", "def angle(self):\n return 0", "def heading(yaw):\n q = euler2quat(0.0, 0.0, yaw)\n quat = Quaternion()\n quat.w = q[0]\n quat.x = q[1]\n quat.y = q[2]\n quat.z = q[3]\n return quat", "def angle(self) -> float:\n return self._angle", "def angle(self) -> float:\n return self._angle", "def turn_heading(self, heading, inc):\n return (heading + inc) % 4", "def get_heading(self):\n return self.heading[0]", "def compute_heading_from_quaternion(r):\n # quaternion - np.quaternion unit quaternion\n # Real world rotation\n direction_vector = np.array([0, 0, -1]) # Forward vector\n heading_vector = quaternion_rotate_vector(r.inverse(), direction_vector)\n\n phi = -np.arctan2(heading_vector[0], -heading_vector[2]).item()\n return phi", "def safe_north_point(self):\n ifMutexAcquire(self.use_mutex)\n try:\n x, y, z = self.read_magnetometer()\n except:\n x, y, z = 0,0,0\n finally:\n ifMutexRelease(self.use_mutex)\n\n # using the x and z axis because the sensor is mounted vertically\n # the sensor's top face is oriented towards the front of the robot\n\n heading = -atan2(-x, z) * 180 / pi\n\n # adjust it to 360 degrees range\n\n if heading < 0:\n heading += 360\n elif heading > 360:\n heading -= 360\n\n return heading", "def heading_to(self, p1):\n # Turn them all into radians\n phi1 = math.radians(self.lat)\n lambda0 = math.radians(self.long)\n phi = math.radians(p1.lat)\n lambda_ = math.radians(p1.long)\n\n ldiff = lambda_ - lambda0\n cosphi = math.cos(phi)\n\n bearing = math.atan2(cosphi * math.sin(ldiff),\n (math.cos(phi1) * math.sin(phi) - math.sin(phi1) * cosphi * math.cos(ldiff)))\n bearing_deg = math.degrees(bearing)\n if bearing_deg < 0:\n bearing_deg += 360\n\n return bearing_deg", "def get_angle(self):\n return self.__angle", "def heading_callback(self, data):\n heading = data.data\n self.heading_vector = [math.cos(heading), math.sin(heading)]", "def estimated_heading(self):\n return self._estimates[2].item(0)", "def angle(self):\r\n return self.model.angle", "def angle(self) -> int:", "def get_heading_pwm(self, goal):\n hdiff = goal.target_heading - self.curr_heading\n # handle 0/360 change at magnetic north\n if abs(hdiff) > 180:\n if hdiff < 0:\n hdiff += 360\n else:\n hdiff -= 360\n # p-control\n hout = hdiff * self.heading_p\n # limit output if necassary\n if abs(hout) > self.heading_pmax:\n if hout < 0:\n hout = -self.heading_pmax\n else:\n hout = self.heading_pmax\n hout += self.pwm_center\n return hout", "def phi_deg(self):\n return self.phi * 180 / np.pi", "def convert_heading(self, in_heading):\n\n headings = [\"North\", \"North East\",\n \"East\", \"South East\",\n \"South\", \"South West\",\n \"West\", \"North West\",\n \"North\"]\n\n nb_headings = len(headings)-1 # North is listed twice\n heading_index = int(round(in_heading/(360.0/nb_headings),0))\n # sometimes the IMU will return a in_heading of -1000 and higher.\n if heading_index < 0:\n heading_index = 0\n # print(\"heading {} index {}\".format(in_heading, heading_index))\n # print(\" {} \".format( headings[heading_index]))\n return(headings[heading_index])", "def angle(self):\n return self._angle", "def angle(self):\n return self._angle", "def angle(self):\n return self._angle", "def direction(self):\r\n return 180 - atan2(self.x, self.y)*180/pi", "def horiz_angle(time, data):\n\n # TODO What should 0deg be? Set it to inline w/ target? facing target?\n\n # direction of the sun. measured in degrees counted clockwise from north.\n azimuth = data[time]['azimuth']\n\n h_angle = (azimuth / 2 - 90)\n\n # returns answer between -180 and 180 degrees\n return round(((h_angle + 180) % 360) - 180, 4)", "def get_total_heading(self, heading_unit_vector_list):\n if heading_unit_vector_list == []:\n # no neighbour is seen\n # go as you wish\n h = self.heading_vector\n # go to north\n # h = [1,0]\n return h\n # sum all the virtual heading values\n total_heading_vector = [0, 0]\n for heading_unit_vector in heading_unit_vector_list:\n total_heading_vector[0] += heading_unit_vector[0]\n total_heading_vector[1] += heading_unit_vector[1]\n # normalize total heading to be a unit vector\n h = [(total_heading_vector[0]/linalg.norm(total_heading_vector)),\n (total_heading_vector[1]/linalg.norm(total_heading_vector))]\n return h", "def hp(self):\n minute_int, second = divmod(self.minute, 1)\n if self.positive:\n return self.degree + (minute_int / 100) + (second * 0.006)\n else:\n return -(self.degree + (minute_int / 100) + (second * 0.006))", "def getHeadAngles(self):\n\n\t\trobot_head_yaw, robot_head_pitch = self.motion.getAngles(\"Head\", False)\n\n\t\t# return adjusted robot head angles\n\t\treturn [robot_head_yaw, -robot_head_pitch]", "def phi_up(self):\n return 0.5 * (self.phi + 10 * (self.phi / 30.0) ** 2) / 180.0 * np.pi", "def hp(self):\n if self.positive:\n return self.degree + (self.minute / 100) + (self.second / 10000)\n else:\n return -(self.degree + (self.minute / 100) + (self.second / 10000))", "def getHeading(q):\n yaw = math.atan2(2 * (q.x * q.y + q.w * q.z),\n q.w * q.w + q.x * q.x - q.y * q.y - q.z * q.z)\n return yaw", "def interior_angle(self):\n if self.interior_angle_l is not None:\n return self.interior_angle_l\n else:\n self.interior_angle_l = ((self.vert_count - 2)*180)/math.pi\n return self.interior_angle_l", "def set_heading(self, heading):\n self._kernel.set_heading(float(heading))", "def get_heading(self):\n raise NotImplementedError()", "def lunar_phase(cls, tee):\n return mod(cls.lunar_longitude(tee) - cls.hindu_solar_longitude(tee), 360)", "def hpa(self):\n return HPAngle(self.hp())", "def hpa(self):\n return HPAngle(self.hp())", "def hpa(self):\n return HPAngle(self.hp())", "def TransformHeading(transform, heading):\n x1, y1 = np.cos(heading), np.sin(heading)\n\n # Transform the unit ray.\n unit_ray = np.array([x1, y1, 0.0, 1.0])\n transform_no_shift = CopyTransform(transform)\n transform_no_shift[0, 3] = 0\n transform_no_shift[1, 3] = 0\n transformed_ray = np.matmul(transform_no_shift, unit_ray)\n x2, y2 = transformed_ray[0:2]\n\n # Use arctan2 to compute the new rotation angle; note that arctan2 takes 'y'\n # and then 'x'.\n new_heading = np.arctan2(y2, x2)\n return new_heading", "def direction_angle(self):\n return math.atan2(self.velocity, self.velocity)", "def degree(self):\n return self._deg", "def getFinalLarmorAngle(self):\n return np.degrees(self.theta_L_array[-1])", "def getAngle(self):\n return self._angle", "def turn_heading(self, heading, inc):\n return turn_heading(heading, inc)", "def get_orientation_degrees(game_object: GameObject) -> float:\n return CommonQuaternion.to_degrees(CommonObjectLocationUtils.get_orientation(game_object))", "def get_turn_degrees(self):\n self.turn_degrees = 360/self.num_protrusions\n return self.turn_degrees", "def hpa(self):\n return HPAngle(gon2hp(self.gon_angle))", "def angle(self):\n return arccos(dot((self.a - self.o) / self.r, (self.b - self.o) / self.r))", "def angle(self):\n return math.degrees(math.atan2(self[1], self[0]))", "def start_angle(self):\n return self._start_angle", "def start_angle(self):\n return self._start_angle", "def steps_to_angle():\n pass", "def getAngle(self):\n return self.vector.angle", "def theta(self):\n return float(np.arctan2(self.y, self.x))", "def get_angle(self, t_step, degree=True):\n v_vector = np.array([self.s_velocity[t_step], self.d_velocity[t_step]])\n u_v1 = self._get_unit_vector(v_vector)\n rad = np.arccos(np.clip(np.dot(self.ref_vector, u_v1), -1.0, 1.0))\n if degree:\n return np.rad2deg(rad) * np.sign(self.d_velocity[t_step])\n else:\n return rad", "def get_angle(self) -> NumberType:\n return self._angle", "def theta(self):\n return atan2(self.y, self.x)", "def degrees(x):\n return 0.0", "def avl_angle(self):\n dif_height = (self.heights[5] - self.heights[7])\n dif_position = (self.positions[0][7] - self.positions[0][5])\n angle = atan(dif_height / dif_position) / 1.5 * 180 / pi\n return angle", "def speed(self):\n return self._turtle.speed()", "def speed(self):\n return self._turtle.speed()", "def h(self) -> float:\n return self._h", "def theta(self):\n return self._theta", "def theta(self):\n return self._theta", "def get_desired_heading(self, h, p):\n heading_vector = [self.alpha*h[0] + self.beta*p[0],\n self.alpha*h[1] + self.beta*p[1]]\n a = [(heading_vector[0]/linalg.norm(heading_vector)),\n (heading_vector[1]/linalg.norm(heading_vector))]\n return a", "def dir(self):\n return unit_vector(normalize_angle(self.angle))", "def setheading(self, rot):\n if(self._gridmode):\n rot = round(rot/90)*90\n \n self._rotation = round(rot, 2)\n self._appendCurrentState()", "def rotation_angle(self):\n return self.container['rotation_angle']", "def phase(self):\n return -self.attrs['RFphase']*2*np.pi", "def get_theta(self):\n return self.theta", "def hp(self):\n return dec2hp(self.dec_angle)", "def _rad_center(self):\n return ((self.rad_hi + self.rad_lo) / 2).to(\"deg\")", "def get_degree_to(self, position):\n dx = abs(self.left - position.left)\n dy = abs(self.top - position.top)\n if dx == 0:\n return 90\n return math.atan(dy / dx) / math.pi * 180" ]
[ "0.85323304", "0.7825337", "0.77631426", "0.7465228", "0.7281472", "0.7165995", "0.70578945", "0.6940017", "0.6924136", "0.6804612", "0.6803771", "0.677972", "0.6762503", "0.6762503", "0.67530805", "0.6701139", "0.66272557", "0.65986854", "0.6573453", "0.6512578", "0.65064764", "0.65064764", "0.64914626", "0.6480391", "0.6439444", "0.64054114", "0.63982445", "0.6391681", "0.6342559", "0.62989235", "0.62915313", "0.62629783", "0.62604123", "0.62604123", "0.62315863", "0.62219316", "0.62119204", "0.6208607", "0.61810595", "0.6155384", "0.6149425", "0.61415845", "0.61127716", "0.6109154", "0.60842276", "0.60823584", "0.60636103", "0.6061499", "0.6061499", "0.6061499", "0.60119605", "0.60119396", "0.6003686", "0.59743226", "0.59639466", "0.59516233", "0.5948388", "0.5918282", "0.58904994", "0.5871402", "0.5870014", "0.5848375", "0.58427685", "0.58427685", "0.58427685", "0.58280444", "0.582502", "0.5823509", "0.57987744", "0.5798552", "0.57943827", "0.5788252", "0.5782603", "0.57785153", "0.57776314", "0.573645", "0.57160074", "0.57160074", "0.5714432", "0.569393", "0.56865937", "0.5677814", "0.5675643", "0.5670794", "0.5654902", "0.56438726", "0.56276166", "0.56276166", "0.56246513", "0.5623583", "0.5623583", "0.5621815", "0.56203806", "0.5618453", "0.55997086", "0.55975467", "0.55814636", "0.5577025", "0.5568441", "0.55540466" ]
0.87159824
0
The animation speed of this turtle. The speed is an integer from 0 to 10. Speed = 0 means that no animation takes place. The methods forward/back makes turtle jump and likewise left/right make the turtle turn instantly. Speeds from 1 to 10 enforce increasingly faster animation of line drawing and turtle turning. 1 is the slowest speed while 10 is the fastest (noninstantaneous) speed.
def speed(self): return self._turtle.speed()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def speed(self, speed=None):\n speeds = {'fastest':0, 'fast':10, 'normal':6, 'slow':3, 'slowest':1 }\n if speed is None:\n return self._speed\n if speed in speeds:\n speed = speeds[speed]\n elif 0.5 < speed < 10.5:\n speed = int(round(speed))\n else:\n speed = 0\n self.pen(speed=speed)", "def set_animation_speed(self, speed):\n self.m_animation_speed = self.calculate_animation_speed(speed)", "def speed(n):\n turtleTmp.speed(max(1, min(n, 10)))", "def calculate_animation_speed(self, speed):\n speed = float(speed)\n\n self.m_scl_pause = True if speed == 0 else False\n\n calc_speed = int(-1715 * pow(abs(speed), 3) + 4121 * pow(abs(speed), 2) - 3735 * abs(speed) + 1332)\n\n return calc_speed if speed >= 0 else -calc_speed", "def set_speed(self, speed=0):\n speed = clamp(speed)\n self._state.speed = speed\n self.send_command(Command.SET_SPEED, [int(speed)])", "def set_speed(self, speed):\n self._set_sub_text('speed', text=str(speed))\n return self", "def speed(self, speed: int, time: int = 0, /) -> None:", "def speed(self, speed):\n self._speed = speed\n self._rotspeed = speed", "def set_speed(self, speed):\n self.speed = speed", "def set_motor_speed(self, speed=0.0):\r\n self.target_speed = speed", "def set_speed(self, speed):\n assert isinstance(speed, float), \"Must be a float\"\n \n if speed < 0.0:\n raise ValueError(\"Negative speeds not supported\")\n \n self.speed = speed", "def _nextSpeed(self, position, speed, action):\n next_speed = speed + self.integration_step * self._speedDiff(position, speed, action)\n\n # Check if you reach a terminal state\n if abs(next_speed) > 3:\n self.stuck = True\n return next_speed", "def set_speed(self,speed):\n self.speed = speed", "def movespeed(self, speed):\n self._speed = speed", "def change_motor_speed(self, speed=0.0):\r\n if not self.enabled:\r\n self.set_neutral(braked=False)\r\n return\r\n\r\n # logging.info(\"{} Motor Speed: {}\".format(self.motor_name, speed))\r\n self.current_speed = speed # Store current set speed\r\n\r\n # If speed is < 0.0, we are driving in reverse.\r\n self.forward = True\r\n if speed < 0.0:\r\n # Normalise speed value to be in range [0, 100]\r\n speed = -speed\r\n # Store direction\r\n self.forward = False\r\n\r\n # Apply a factor to the speed to limit speed\r\n speed *= self.speed_factor\r\n\r\n # Set motor directional pins\r\n if self.forward:\r\n if self.a_pin >= 0:\r\n self.GPIO.output(self.a_pin, 1)\r\n if self.b_pin >= 0:\r\n self.GPIO.output(self.b_pin, 0)\r\n else:\r\n if self.a_pin >= 0:\r\n self.GPIO.output(self.a_pin, 0)\r\n if self.b_pin >= 0:\r\n self.GPIO.output(self.b_pin, 1)\r\n\r\n # Convert speed into PWM duty cycle\r\n # and clamp values to min/max ranges.\r\n dutycycle = speed\r\n if dutycycle < 0.0:\r\n dutycycle = 0.0\r\n elif dutycycle > self.max_speed:\r\n dutycycle = self.max_speed\r\n\r\n # Change the PWM duty cycle based on fabs() of speed value.\r\n self.PWM.ChangeDutyCycle(dutycycle)", "def move_turtle(self):\n self.forward(self.move_speed)", "def set_speed(self, speed):\r\n speed = float(speed)\r\n speed = int(round(speed * 27.7778))\r\n return self.send_command('speed %s' % speed)", "def set_speed(self, speed: str) -> None:\n self.wink.set_state(True, speed)", "def speed(self, s=0):", "def set_speed(self, speed):\n return self.bot_client.send_command(_Command.SetSpeed, speed)", "def move_set_speed(self, speed):\n # self.motor_set_speed(MOTOR_LEFT, speed)\n # self.motor_set_speed(MOTOR_RIGHT, speed)\n self.move_speed = speed\n print(\"move_speed is now:\", self.move_speed)", "def set_speed(speed):\n if speed >255:\n speed =255\n elif speed <0:\n speed =0\n set_left_speed(speed)\n #time.sleep(.1)\n set_right_speed(speed)", "def speed(self, value: float):\n self._speed = value", "def speed(self) -> int:", "def speed(self) -> int:", "def set_speed(self, speed):\n self.device.set_speed(speed)\n return \"OK\"", "def speed(self) -> int:\n return self._speed", "def speed(self) -> int:\n return self._speed", "def set_speed(self,speed):\n self.speed_p = speed", "def set_speed(self, level):\n speed = self.SPEED + (self.SPEED_INCREMENT * level)\n\n if self.lane % 2:\n # Move to the right\n self.velocity = (speed, 0)\n else:\n # Move to the left\n self.velocity = (-speed, 0)", "def speed(self):\n return self._speed.value", "def move_forward(self, speed):\n\n # Clamp the speed\n speed = clamp(delta_unit(speed), 0, delta_unit(Car.max_speed))\n\n # Appends the speed according to the direction\n rad = np.radians(self.direction)\n self.fx += speed * np.cos(rad)\n self.fy += speed * np.sin(rad)\n\n # Set marker to move\n self.moved = True", "def set_speed(self, speed):\n self._kernel.set_speed(float(speed))", "def set_speed(self, speed):\n # create the MAV_CMD_DO_CHANGE_SPEED command\n msg = self.message_factory.command_long_encode(0, 0,mavutil.mavlink.MAV_CMD_DO_CHANGE_SPEED,0,0,speed,0, 0, 0, 0, 0)\n\n # send command to vehicle\n self.send_mavlink(msg)\n self.flush()", "def speed(self) -> float:\n return self._speed", "def speed(self) -> float:\n return self._speed", "def turtle_movement(turtle_shape, bg_color, turtle_color, turtle_speed):\n turtle_name = initialize(turtle_shape, bg_color,\n turtle_color, turtle_speed)\n\n for i in range(36):\n for i in range(4):\n turtle_name.forward(200)\n turtle_name.right(90)\n turtle_name.right(10)", "def forward(self, speed):\n self.pwm_backward.ChangeDutyCycle(0)\n self.pwm_forward.ChangeDutyCycle(speed)", "def forward(self, speed):\n self.pwm_backward.ChangeDutyCycle(0)\n self.pwm_forward.ChangeDutyCycle(speed)", "def speed(self, value: int, /) -> None:", "def set_joystick_speed(self, speed):\n if self.table_ready:\n command = self.build_command(self.device, (\"set_joy_speed\", str(speed)))\n self.vcw.write(self.device, command)", "def setVoiceSpeed(self, speed):\n\n try:\n assert speed >= 50 and speed <= 400\n\n except AssertionError:\n self.logger.warning(\"incorrect voice speed, resesting to the default speed\")\n speed = 100\n\n self.tts.setParameter(\"speed\", speed)", "def increment_speed(self):\n self.speed += 0.0004", "def speed(self):\n return 1 # speed system not implemented yet", "def speed(self) -> str:\n return self._current_speed", "def set_speed(self, speed: str) -> None:\n if speed == SPEED_HIGH:\n self._bond.setSpeed(self._deviceId, self._speed_high)\n elif speed == SPEED_MEDIUM:\n self._bond.setSpeed(self._deviceId, self._speed_medium)\n elif speed == SPEED_LOW:\n self._bond.setSpeed(self._deviceId, self._speed_low)\n self._attributes['current_speed'] = speed", "def set_speed(self, speed, ports='ABCD'):\n\n speed += self.avg_speed\n if self.inverted:\n speed = -speed\n\n if speed > self.margin:\n speed = self.margin\n elif speed < -self.margin:\n speed = self.margin\n\n for p in ports:\n if self.motors[p].connected:\n self.motors[p].run_forever(speed_sp=speed, speed_regulation=True)\n else:\n print(\"Cant run motor on\", p, \"- not connected\")", "def set_speed(self, axis, speed):\n #log.info(f\"set speed {axis} {speed}\")\n self.cmd_axis_speed[axis] = speed", "def forward(self, speed):\n self.controller.forward(speed)", "def walk(self):\n self.speed = self.speed + (0.2 * self.legs)", "def speed(self) -> str:\n return self._attributes.get(\"current_speed\")", "def turnspeed(self, rotspeed):\n self._rotspeed = rotspeed", "def set_speed(self, speed=None, auto=False, adaptive=False):\n if speed is None:\n speed = 0\n elif not util.is_natural(speed):\n raise TypeError('Expected positive number for speed, given %s.' % speed)\n elif speed > self.MAX_JTAG_SPEED:\n raise ValueError('Given speed exceeds max speed of %d.' % self.MAX_JTAG_SPEED)\n elif speed < self.MIN_JTAG_SPEED:\n raise ValueError('Given speed is too slow. Minimum is %d.' % self.MIN_JTAG_SPEED)\n\n if auto:\n speed = speed | self.AUTO_JTAG_SPEED\n\n if adaptive:\n speed = speed | self.ADAPTIVE_JTAG_SPEED\n\n self._dll.JLINKARM_SetSpeed(speed)\n\n return None", "def forward(self, speed):\n vrep.simxSetJointTargetVelocity(self.client_id, self.handles['rollingJoint_rr' + self.postfix], -speed,\n ONE_SHOT_MODE)\n vrep.simxSetJointTargetVelocity(self.client_id, self.handles['rollingJoint_rl' + self.postfix], -speed,\n ONE_SHOT_MODE)\n vrep.simxSetJointTargetVelocity(self.client_id, self.handles['rollingJoint_fr' + self.postfix], -speed,\n ONE_SHOT_MODE)\n vrep.simxSetJointTargetVelocity(self.client_id, self.handles['rollingJoint_fl' + self.postfix], -speed,\n ONE_SHOT_MODE)", "def _speed_action(i, speed, mi):\n result = mi\n # Need to avoid speeds of 1, -1 resulting in zero movement\n if i % (abs(speed) + 1) != 0:\n if speed > 0:\n result = (mi + 1) % NUM_STEPPER_STATES\n elif speed < 0:\n result = (mi - 1) % NUM_STEPPER_STATES\n\n return result", "def set_speed():\n pass", "def speed(self):\n return sqrt(self.velocity_x ** 2 + self.velocity_y ** 2)", "def change_speed(self, action):\r\n if action == \"faster\":\r\n self.speed += 1\r\n else:\r\n if self.speed > 1:\r\n self.speed -= 1", "def set_speed(self, speed, motor):\n self.driver.set_speed(speed, motor)\n self.last_control = time.time()", "def get_speed(self):\n return self._speed", "def set_cmd_velocity(self, speed):\n self.gripper_io.set_signal_value(\"speed_mps\", speed)", "def set_move_speed(cls, quad):\n\n\t\tspeed = cls.get_address_value(quad.result)\n\t\treturn speed/1000.0", "def set_speed(self, new_speed):\n self.__x_speed, self.__y_speed = new_speed", "def set_flywheel_speeds(self, speed):\n\n # Set the flywheel speeds\n self.fmt.set_speed(speed)\n self.fmb.set_speed(speed)", "def settle(self):\n if (self.angle >= self.max_angle) or (\n self.angle <= -self.max_angle\n ): # time to reverse\n print(\"reverse\", self.angle, self.max_angle)\n self.speed *= -0.9 # damped\n self.max_angle *= 0.9\n if self.speed > 0:\n self.angle = self.max_angle\n else:\n self.angle = -self.max_angle\n\n self.angle += radians(self.speed)\n print(self.angle, self.max_angle, self.speed)\n self.x = self.cx + self.length * sin(self.angle)\n self.y = self.cy + self.length * cos(self.angle)", "def move(self, speed=1):\n self.set_motor(self.left_motor, 'left', speed)\n self.set_motor(self.right_motor, 'right', speed)\n time.sleep(0.5)", "def adjustSpeed(self, speed):\n\t\tif self.timeout <= 0:\n\t\t\tself.speed = max(self.minimumSpeed, min(self.maximumSpeed, self.speed + speed))", "def setspeed(speed):\n if speed is None:\n click.echo(\"speed value is required\")\n raise click.Abort()\n\n for fan in range(_wrapper_get_num_fans()):\n status = _wrapper_set_fan_speed(fan, speed)\n if not status:\n click.echo(\"Failed\")\n sys.exit(1)\n\n click.echo(\"Successful\")", "def setMotorSpeed(self,motorID,speed):\n speed = max(min(speed,1.0),-1.0) #range limit\n direction = speed < 0 # set reverse direction bit if speed less than 0\n bit8speed = self.params[1] & 1 #first bit of paramter 1 can be used to determin if its in 8 bit speed mode\n speedMultiplyer = 127 # speed is between 0-127 for 7bit speed mode\n if bit8speed:\n speedMultiplyer = 255 #speed is between 0-255 for 8bit speed mode\n speedByte = int(abs(speed)*speedMultiplyer)# covert floating speed to scaled byte\n \n cmd = speedByte >= 128 # bit 0 of command is used for 8th bit of speedbyte as speedbyte can only use 7 bits\n \n speedByte &= 127 #clear the 8th bit of the speedbyte as it can only use 7 bits\n \n cmd |= direction << 1 #shift direction into bit 1\n cmd |= motorID << 2 #shift motor id into bit 2\n cmd |= 1 << 3 # just set bit 3\n\n #send the speed command\n self.driver.sendReceive([0xaa,self.id,cmd,speedByte],0)", "def speed(self):\n self.convert_window(\"Speed\", \"meters/second\", [\"Mach number\", \"Nm/24hr\", \"centimeters/minute\", \"centimeters/second\", \"feet/hour\", \"feet/minute\", \"feet/second\", \"inches/minute\", \"inches/second\", \"kilometers/hour\", \"kilometers/second\", \"knots\", \"meters/hour\", \"meters/minute\", \"meters/second\", \"miles/hour\", \"miles/minute\", \"miles/second\", \"nautical miles/hour\", \"speed of light\", \"speed of sound\", \"yards/hour\", \"yards/minute\", \"yards/second\"])", "def move_forward(self, speed):\n\t\t# You should modify the bias of 4 wheels depending on your hardware.\n\t\tself._front_left_wheel.anticlockwise_rotate(speed + LEFT_FR_BIAS + LEFT_RIGHT_BIAS)\n\t\tself._front_right_wheel.clockwise_rotate(speed + RIGHT_FR_BIAS)\n\t\tself._rear_left_wheel.anticlockwise_rotate(speed + LEFT_RIGHT_BIAS)\n\t\tself._rear_right_wheel.clockwise_rotate(speed)", "def set_speed(self,value):\n if (value>self.get_max_speed()):\n print \"asked to set the speed to %f but the max speed is %f\\n\" % (value,self.get_max_speed())\n else:\n return self.put_par(\"slew_speed\",value)", "def increase_speed(self, character):\n character.speed = min(character.max_steps/4, character.speed * 1.25)", "def get_speed(self):\n raise NotImplementedError", "def get_speed(self):\n raise NotImplementedError", "def setSpeedEngine1(speed: int):\n pass", "def _get_forward_speed(self):\n\n velocity = self._vehicle.get_velocity()\n transform = self._vehicle.get_transform()\n vel_np = np.array([velocity.x, velocity.y, velocity.z])\n pitch = np.deg2rad(transform.rotation.pitch)\n yaw = np.deg2rad(transform.rotation.yaw)\n orientation = np.array([np.cos(pitch) * np.cos(yaw), np.cos(pitch) * np.sin(yaw), np.sin(pitch)])\n speed = np.dot(vel_np, orientation)\n return speed", "def speed(self):\n return self._dll.JLINKARM_GetSpeed()", "def get_speed(self):\r\n return self.__x_speed, self.__y_speed", "def set_speed (self, dx = None, dy = None) :\n if dx != None :\n self.speed[0] = dx\n if dy != None :\n self.speed[1] = dy", "def step(self, speed):\n\n obstacle_speed_double = ctypes.c_double(speed[0])\n agent_x_speed_double = ctypes.c_double(speed[1])\n agent_y_speed_double = ctypes.c_double(speed[2])\n\n self.wrapper.step(self.instance, obstacle_speed_double, agent_x_speed_double, agent_y_speed_double)", "def movement_speed(self) -> Union[int, float]:\n return self.type_data.proto.movement_speed", "def movement_speed(self) -> Union[int, float]:\n return self.type_data.proto.movement_speed", "def drive(self, speed=300):\n\t\tself.direction = self.find_direction()\n\t\tio.set_bit(OUTPUT.MOTORDIR, self.direction)\n\t\tio.write_analog(OUTPUT.MOTOR, 2048+4*abs(config.SPEED))\n\t\tself.moving = True", "def increase_speed(self):\n self.target_speed *= self.speedup_scale\n self.bullet_speed_factor *= self.speedup_scale", "def step(self):\n if self.change_rate != 0:\n self.speed += stats.norm(loc=0, scale=self.change_rate).rvs()\n\n if self.speed < 0.5 * self._initial_speed:\n self.speed = 0.5 * self._initial_speed\n if self.speed > 2.0 * self._initial_speed:\n self.speed = 2.0 * self._initial_speed\n else:\n pass", "def target_speed(self):\n return self._target_speed.value", "def send_tspeed(self):\n return self.shell.terminal_speed", "async def set_fan_speed(self, speed):\n\n if speed not in self.fan_speeds:\n raise ValueError(f\"Invalid fan speed: {speed}\")\n keys = self._get_cmd_keys(CMD_STATE_WIND_STRENGTH)\n speed_value = self.model_info.enum_value(keys[2], DHumFanSpeed[speed].value)\n await self.set(keys[0], keys[1], key=keys[2], value=speed_value)", "def get_speed(self):\n raise NotImplementedError()", "def setMotorSpeed(self, idMotor=0, sense=0, speed=0, board=0):\n msg = [idMotor, sense, int(speed / 256.0), speed % 256]\n return self.callModule('motors', board, 0, 'setvelmtr', msg)", "def setSpeedEngine2(speed: int):\n pass", "def setSpeedEngine4(speed: int):\n pass", "def GetSpeed(self):\n pass", "def set_speed(self, speed, motor = 'both'):\n #easily handle setting both or a single motor\n motors = ['speed'] if motor == 'both' else [motor]\n outputs = []\n for motor in motors:\n output = self._send_command(\"%s %s\" % (motor, speed))\n outputs.append(output.strip())\n\n return \", \".join(outputs)", "async def speed(self, value=None):\n return self.extract(await self._rpc.speed()) if value is None else (await self._rpc.speed(self.extend(value)))", "def speed(self) -> str:\n current_wink_speed = self.wink.current_fan_speed()\n if SPEED_AUTO == current_wink_speed:\n return SPEED_AUTO\n if SPEED_LOWEST == current_wink_speed:\n return SPEED_LOWEST\n if SPEED_LOW == current_wink_speed:\n return SPEED_LOW\n if SPEED_MEDIUM == current_wink_speed:\n return SPEED_MEDIUM\n if SPEED_HIGH == current_wink_speed:\n return SPEED_HIGH\n return None", "def set_speed(self, ratio):\n self._speed = ratio", "def acceleration(self):\n # speed is by formula: x axis speed: by cos of the heading and y\n # axis by sine of the heading\n self.x_speed += math.cos(math.radians(self.degrees))\n self.y_speed += math.sin(math.radians(self.degrees))" ]
[ "0.7406827", "0.6900128", "0.68084306", "0.674762", "0.6742498", "0.65431887", "0.6535551", "0.651261", "0.64920443", "0.64284843", "0.64099866", "0.64064616", "0.6403609", "0.63961345", "0.63856757", "0.6369697", "0.63394564", "0.6332096", "0.6319833", "0.6256491", "0.6254903", "0.62447685", "0.62365156", "0.61812925", "0.61812925", "0.6114333", "0.6080267", "0.6080267", "0.606338", "0.6051716", "0.6048129", "0.60312235", "0.60272855", "0.5998844", "0.5996013", "0.5996013", "0.5966423", "0.59604585", "0.59604585", "0.595613", "0.5944324", "0.59384793", "0.59382486", "0.5929409", "0.59258807", "0.5899523", "0.58959746", "0.5855007", "0.5854416", "0.5853371", "0.5825588", "0.5816898", "0.58159333", "0.5787815", "0.5756396", "0.57493097", "0.5747357", "0.57433784", "0.57401305", "0.5733244", "0.5702421", "0.5701651", "0.5684754", "0.56818074", "0.5667961", "0.5657898", "0.5650163", "0.5645091", "0.56365716", "0.56332684", "0.559866", "0.55907804", "0.5581376", "0.55806035", "0.55806035", "0.55794054", "0.5576034", "0.5571822", "0.55687565", "0.5566014", "0.55627066", "0.55570644", "0.55570644", "0.55533344", "0.5551019", "0.5547676", "0.5528538", "0.5516416", "0.54955864", "0.54879344", "0.54699886", "0.5460077", "0.54578245", "0.54509455", "0.5441958", "0.54409635", "0.54272324", "0.5420331", "0.54138124" ]
0.7135669
2
The color of this turtle. All subsequent draw commands (forward/back) draw using this color. If the color changes, it only affects future draw commands, not past ones.
def color(self): return self._color
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def color(self):\n assert False, 'Pen does not have a color; use pencolor or fillcolor'", "def get_color(self):\r\n return self.__color", "def color(self):\n return self.__color", "def get_color(self):\n return self.color", "def get_color(self):\r\n if self.color:\r\n return \"RED\"\r\n else:\r\n return \"BLACK\"", "def get_color(self):\n\n return self.color", "def get_color(self):\r\n return self._color", "def set_color(color):\r\n global _current_color\r\n _current_color = color", "def get_color(self):\n return self._color", "def get_color(self):\n return self._color", "def setColor(color):\n turtleTmp.color = color\n turtleTmp.penColor(color)", "def set_color(self, new_color):\n self.color = new_color", "def get_color(self):\n\n return self._color", "def set_color(self, color):\n pass", "def set_color(self, color):\n\t\tpass", "def _set_color(self, r):\n c = COLORS[self.color]\n r.setLineColor(c[0], c[1], c[2])\n r.setColor(c[0], c[1], c[2])", "def getColor(self):\n return self.__color", "def getColor(self):\n return self.__color", "def getColor(self):\n return self.__color", "def getColor(self):\r\n return self.color", "def getColor(self):\n return self.color", "def refresh_color(self):\n self.color = max(0, int(math.sqrt(self.vx ** 2\n + self.vy ** 2)) + 100)", "def color(self):\n return self.COLOR", "def color(self, color):\n #self._color = color\n new_color = \"{0}{1}{2}\".format(hex(int(color[0]))[2:].zfill(2),\n hex(int(color[1]))[2:].zfill(2),\n hex(int(color[2]))[2:].zfill(2))\n #self.log.info(\"RASPLes.color(%s : %s -> %s)\" % (self.number, color, new_color))\n #print(\"color(%s -> %s)\" % (self.number, new_color))\n try:\n self.current_color = new_color\n #self.strip.setPixelColor(int(self.number), self.current_color)\n self.strip.setPixelColorRGB(int(self.number), color[0], color[1], color[2])\n\n self.strip.updated = True\n except Exception as e:\n self.log.error(\"led update error\" + str(e))", "def highlight_color(self):\n return curses.color_pair(4) if self.cycling else curses.color_pair(2)", "def penColor( self ):\n return self._penColor", "def color(self):\n if self._simplecell:\n self.fetch()\n return self._color", "def set_color(self, color):\n self.color = color", "def next_color(self):\n if self._color_cycle is None:\n return self._theme.color\n return next(self._color_cycle)['color']", "def get_color(self):\n return \"yellow\"", "def changeColor( self ):\n\t\t\n\t\tx, y = self.position.xy\n\t\tself.color = ( int((x / WINDOW_X) * 128), int((x / WINDOW_X) * 128) + int((y / WINDOW_Y) * 128 ), int((y / WINDOW_Y) * 128))", "def _update_color(self, color):\n self.color = color", "def color(self):\n return self['color']", "def color(self):\n return 0x2f3136", "def getColor(self):\n return self._l[2]", "def change_color(self, color):\n self.color = color", "def get_color(self) -> str:\n return self.color", "def set_green(self):\n self.fill= Cell.FILLED_COLOR_BG\n self.draw()", "def reset_color():\n global CURRENT_COLOR\n CURRENT_COLOR = 0", "def pencolor(self, *args):\n if args:\n color = self._colorstr(args)\n if color == self._pencolor:\n return\n self.pen(pencolor=color)\n else:\n return self._color(self._pencolor)", "def get_color(self) -> str:\r\n return self.color", "def base_color(self):\n return curses.color_pair(3) if self.cycling else curses.color_pair(1)", "def pencolor(self):\n return self._pencolor", "def setColor(self, color):\n self.__color = color", "def getCurrentColor(self):\n if self.__currentnode__ is None:\n return None\n else:\n return self.__currentnode__.getPlayer().getColor()", "def color(self, color=0):\n if color not in [0, 1, 2, 3, 4, 5, 6, 7]:\n raise ValueError('color must be a positive integer less than and 8 or 0')\n else:\n self._write(self.__class__.__ESC + 'r' + chr(color))", "def set_color(self, color):\n self._color = color", "def get_colour(self):\n return self.colour", "def brighter_switch(turtle, color):\n turtle.fillcolor(color + \"1\")", "def color(self, *args):\n if args:\n l = len(args)\n if l == 1:\n pcolor = fcolor = args[0]\n elif l == 2:\n pcolor, fcolor = args\n elif l == 3:\n pcolor = fcolor = args\n pcolor = self._colorstr(pcolor)\n fcolor = self._colorstr(fcolor)\n self.pen(pencolor=pcolor, fillcolor=fcolor)\n else:\n return self._color(self._pencolor), self._color(self._fillcolor)", "def getColor(self):\n return self.side_color", "def _set_color(color):\r\n\r\n return FontColor.get_color(color, add_reset=False)", "def getTweenColor(self, factor):\n\n pass", "def _get_color(self):\n return self.__color", "def _get_color(self):\n return self.__color", "def _get_color(self):\n return self.__color", "def _get_color(self):\n return self.__color", "def set_color(self, color):\n self.light_color = color\n for f in self.color_change_cb:\n f(self)", "def dimmer_switch(turtle, color):\n turtle.fillcolor(color + \"4\")", "def resetColor(self):\n self.setColor(255, 255, 255 ,255)", "def update_color(self):\r\n \r\n \r\n colorset = self.colorset\r\n \r\n self.grfx[0].colorset = colorset\r\n pass", "def get_color(self, point):\n return self._color.dup()", "def change_color(self, color):\r\n if color == \"black\":\r\n self.color = \"white\"\r\n self.canvas.itemconfig(self.ball, fill='white')\r\n else:\r\n self.color = \"black\"\r\n self.canvas.itemconfig(self.ball, fill='black')", "def color(self):\n if \"color\" in self._prop_dict:\n return self._prop_dict[\"color\"]\n else:\n return None", "def setNewColor(self, color: QColor):\n self.drawNewColor = color", "def GetColour(self):\r\n\r\n return self._colour", "def color(self) -> Optional[str]:\n return self.colour", "def color(self, value: tuple) -> None:\n if value in Color.PALETTE:\n self._color = value", "def getColor(self, visited):\r\n if visited == \"n\":\r\n button_color = [0.4, 0.6, 0, 1]\r\n else:\r\n button_color = [0.4, 0.7, 0.9, 1]\r\n return button_color", "def setColor(self, color):\n self.point_color = color\n self.side_color = color\n self.area_color = color", "def _updateColor(self, color):\n primitive = self._getScenePrimitive()\n if len(primitive.children) != 0:\n primitive.children[0].setAttribute('color', color)", "def color(self):\n return self.container['color']", "def get_color(self):\n return COLOR_DICT[self.element]", "def _color(self):\n # get my renderer\n renderer = self.renderer\n # sign on\n yield \"\"\n yield renderer.commentLine(\"color support\")\n\n # sniff the terminal type\n yield renderer.commentLine(\"initialize the TERM environment variable\")\n yield from renderer.setu(name=\"TERM\", value=\"dumb\")\n\n # build a conditional assignment block so we can turn color off on terminals that\n # don't understand ANSI control sequences\n yield from renderer.ifeq(\n op1=renderer.value(var=\"TERM\"),\n op2=renderer.builtin(\n func=\"findstring\",\n args=[renderer.value(var=\"TERM\"), self._ansiTerminals],\n ),\n onTrue=self._ansiCSI(),\n onFalse=self._dumbCSI(),\n )\n\n # render the color database\n # basic colors\n yield \"\"\n yield renderer.commentLine(\"basic colors\")\n yield from renderer.set(\n name=\"palette.normal\", value=renderer.call(func=\"csi3\", args=[\"0\"])\n )\n yield from renderer.set(\n name=\"palette.black\", value=renderer.call(func=\"csi3\", args=[\"0;30\"])\n )\n yield from renderer.set(\n name=\"palette.red\", value=renderer.call(func=\"csi3\", args=[\"0;31\"])\n )\n yield from renderer.set(\n name=\"palette.green\", value=renderer.call(func=\"csi3\", args=[\"0;32\"])\n )\n yield from renderer.set(\n name=\"palette.brown\", value=renderer.call(func=\"csi3\", args=[\"0;33\"])\n )\n yield from renderer.set(\n name=\"palette.blue\", value=renderer.call(func=\"csi3\", args=[\"0;34\"])\n )\n yield from renderer.set(\n name=\"palette.purple\", value=renderer.call(func=\"csi3\", args=[\"0;35\"])\n )\n yield from renderer.set(\n name=\"palette.cyan\", value=renderer.call(func=\"csi3\", args=[\"0;36\"])\n )\n yield from renderer.set(\n name=\"palette.light-gray\", value=renderer.call(func=\"csi3\", args=[\"0;37\"])\n )\n\n # bright colors\n yield \"\"\n yield renderer.commentLine(\"bright colors\")\n yield from renderer.set(\n name=\"palette.dark-gray\", value=renderer.call(func=\"csi3\", args=[\"1;30\"])\n )\n yield from renderer.set(\n name=\"palette.light-red\", value=renderer.call(func=\"csi3\", args=[\"1;31\"])\n )\n yield from renderer.set(\n name=\"palette.light-green\", value=renderer.call(func=\"csi3\", args=[\"1;32\"])\n )\n yield from renderer.set(\n name=\"palette.yellow\", value=renderer.call(func=\"csi3\", args=[\"1;33\"])\n )\n yield from renderer.set(\n name=\"palette.light-blue\", value=renderer.call(func=\"csi3\", args=[\"1;34\"])\n )\n yield from renderer.set(\n name=\"palette.light-purple\", value=renderer.call(func=\"csi3\", args=[\"1;35\"])\n )\n yield from renderer.set(\n name=\"palette.light-cyan\", value=renderer.call(func=\"csi3\", args=[\"1;36\"])\n )\n yield from renderer.set(\n name=\"palette.white\", value=renderer.call(func=\"csi3\", args=[\"1;37\"])\n )\n\n # pretty colors\n yield \"\"\n yield renderer.commentLine(\"pretty colors\")\n yield from renderer.set(\n name=\"palette.amber\",\n value=renderer.call(func=\"csi24\", args=[\"38\", \"255\", \"191\", \"0\"]),\n )\n yield from renderer.set(\n name=\"palette.lavender\",\n value=renderer.call(func=\"csi24\", args=[\"38\", \"192\", \"176\", \"224\"]),\n )\n yield from renderer.set(\n name=\"palette.sage\",\n value=renderer.call(func=\"csi24\", args=[\"38\", \"176\", \"208\", \"176\"]),\n )\n yield from renderer.set(\n name=\"palette.steel-blue\",\n value=renderer.call(func=\"csi24\", args=[\"38\", \"70\", \"130\", \"180\"]),\n )\n\n # diagnostics\n yield \"\"\n yield renderer.commentLine(\"diagnostics\")\n yield from renderer.set(\n name=\"palette.info\", value=renderer.call(func=\"csi8\", args=[\"38\", \"28\"])\n )\n yield from renderer.set(\n name=\"palette.warning\", value=renderer.call(func=\"csi8\", args=[\"38\", \"214\"])\n )\n yield from renderer.set(\n name=\"palette.error\", value=renderer.call(func=\"csi8\", args=[\"38\", \"196\"])\n )\n yield from renderer.set(\n name=\"palette.debug\", value=renderer.call(func=\"csi8\", args=[\"38\", \"75\"])\n )\n yield from renderer.set(\n name=\"palette.firewall\", value=renderer.value(var=\"palette.light-red\")\n )\n\n # the default theme\n yield \"\"\n yield renderer.commentLine(\"the default theme\")\n yield from renderer.set(\n name=\"palette.asset\", value=renderer.value(var=\"palette.steel-blue\")\n )\n yield from renderer.set(\n name=\"palette.action\", value=renderer.value(var=\"palette.lavender\")\n )\n yield from renderer.set(\n name=\"palette.attention\", value=renderer.value(var=\"palette.purple\")\n )\n\n # all done\n return", "def red(self) -> float:\n return self._red", "def _get_color(self, r, g, b):\n clr = (r, g, b)\n return clr", "def _set_color(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name=\"color\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint32', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"color must be of a type compatible with uint32\"\"\",\n 'defined-type': \"uint32\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name=\"color\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint32', is_config=False)\"\"\",\n })\n\n self.__color = t\n if hasattr(self, '_set'):\n self._set()", "def _set_color(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name=\"color\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint32', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"color must be of a type compatible with uint32\"\"\",\n 'defined-type': \"uint32\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name=\"color\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint32', is_config=False)\"\"\",\n })\n\n self.__color = t\n if hasattr(self, '_set'):\n self._set()", "def _set_color(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name=\"color\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint32', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"color must be of a type compatible with uint32\"\"\",\n 'defined-type': \"uint32\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name=\"color\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint32', is_config=False)\"\"\",\n })\n\n self.__color = t\n if hasattr(self, '_set'):\n self._set()", "def _set_color(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name=\"color\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint32', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"color must be of a type compatible with uint32\"\"\",\n 'defined-type': \"uint32\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name=\"color\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint32', is_config=False)\"\"\",\n })\n\n self.__color = t\n if hasattr(self, '_set'):\n self._set()", "def color(self):\n return self.settings['color']", "def set_color(self, c, color, draw=True):\n \n if c == self.maze.get_start_cell() or c == self.maze.get_end_cell():\n return\n self.cvs.itemconfig(self.cvs_cells[c], fill=color)\n\n if draw: self.draw()", "def set_color(self, color: str):\n self.color = color", "def set_trace_color(self, color): #py:UR.set_trace_color\n RUR._UR.set_trace_color_(self.body, color)", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")" ]
[ "0.69709", "0.6779695", "0.6763572", "0.672866", "0.6717508", "0.67131394", "0.67123073", "0.6694104", "0.6650042", "0.6650042", "0.6635534", "0.66264987", "0.66009176", "0.6586705", "0.65372753", "0.65127057", "0.6490231", "0.6490231", "0.6490231", "0.6489274", "0.6463869", "0.6455739", "0.6454981", "0.64528877", "0.6451804", "0.64098674", "0.6397187", "0.6397054", "0.6390269", "0.63690245", "0.6355759", "0.6354927", "0.63419545", "0.63322735", "0.6323056", "0.6304752", "0.6282236", "0.62769055", "0.6274787", "0.62686175", "0.6265358", "0.62584025", "0.6256934", "0.6246539", "0.62299275", "0.62186694", "0.6208475", "0.62009037", "0.61730236", "0.6166651", "0.6160597", "0.6144013", "0.6143887", "0.612921", "0.612921", "0.612921", "0.612921", "0.6115715", "0.61080337", "0.6099286", "0.6078236", "0.6077497", "0.6059883", "0.60532546", "0.6051233", "0.60355824", "0.6033163", "0.60174865", "0.60116667", "0.59930533", "0.59623986", "0.59536475", "0.595259", "0.59498465", "0.594472", "0.59302294", "0.59244615", "0.59244615", "0.59244615", "0.59244615", "0.59112304", "0.5903672", "0.58977747", "0.58962995", "0.58930874", "0.58930874", "0.58930874", "0.58930874", "0.58930874", "0.58930874", "0.58930874", "0.58930874", "0.58930874", "0.58930874", "0.58930874", "0.58930874", "0.58930874", "0.58930874", "0.58930874" ]
0.6654484
9
Indicates whether the turtle's icon is visible. Drawing commands will still work while the turtle icon is hidden. There will just be no indication of the turtle's current location on the screen.
def visible(self): return self._turtle.isvisible()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_visible(self):", "def is_visible(self):\n return self._visible", "def isVisible( self ):\n layer = self.layer()\n if ( layer and not layer.isVisible() ):\n return False\n# \n# if ( self.isIsolateHidden() ):\n# return False\n# \n return self._visible", "def is_visible(self):\n return self.container['is_visible']", "def is_visible(self):\n return self.proto.display_type == DISPLAY_TYPE.Visible.value", "def is_ruler_visible(self):\n return self.container['is_ruler_visible']", "def is_visible(self):\n return self.rect.x < self.screen_rect.width", "def is_visible(self, path):\n return True", "def is_visible(self):\n return self.real > 0", "def isVisible(self):\n\t\treturn True", "def is_visible(self):\n return self._currently_shown", "def is_element_visible(self):\n if self.web_element.is_displayed():\n return True\n else:\n return False", "def set_visible(self):\n\t\tself.hide()\n\t\tself.__sys_tray_icon.setVisible(True)", "def is_visible(self, position, size=0):\n # return True\n size /= self.scale # size is in pixel\n in_x = (self.focus.x + self.offset.x / self.scale - size <=\n position.x <=\n self.focus.x - self.offset.x / self.scale + size)\n in_y = (self.focus.y + self.offset.y / self.scale - size <=\n position.y <=\n self.focus.y - self.offset.y / self.scale + size)\n # if name == \"earth\":\n # print(\"{:+e} {:+e} {}\".format(self.focus.y + self.offset2.y\n # , position.y, in_y))\n # print(\"{:+e} {:+e}\".format(self.focus.x, self.focus.y))\n return in_x and in_y", "def visible(self):\n return self._visible", "def visible(self):\n return self._visible", "def get_visible(self):\n return self._visible", "def is_visible(self):\n try:\n return self.element.is_displayed()\n except (NoSuchElementException,\n ElementNotVisibleException,\n StaleElementReferenceException):\n return False", "def isShown(self):\n return self.shown", "def inspectedNodeIsVisible(self):\n return self._inspected_node_is_visible", "def is_visible ( self ):\n return not self.is_hidden and (\n self.priority is None or self.priority >= 0\n )", "def is_alive(self):\r\n return self.visible", "def show(self):\r\n if self.visible == 1 and time() - self.lastMotion > self.delay:\r\n self.visible = 2\r\n if self.visible == 2:\r\n self.deiconify()", "def show(self):\r\n if self.visible == 1 and time() - self.lastMotion > self.delay:\r\n self.visible = 2\r\n if self.visible == 2:\r\n self.deiconify()", "def IsShown(self):\r\n\r\n return self._shown", "def show(self):\n if self.visible == 1 and time() - self.lastMotion > self.delay:\n self.visible = 2\n if self.visible == 2:\n self.deiconify()", "def is_hidden():\n return False", "def is_hidden():\n return False", "def is_visible(self, url=''):\n return bool(url)", "def IsHidden(self):\r\n\r\n return self._hidden", "def GetGripperVisible(self):\r\n\r\n return self._gripper_visible", "def is_hidden(self):\n return self.has_label(HIDDEN_LABEL)", "def is_visible(self):\n return self.window.active_panel() == self.full_name", "def visible(self):\n return ctypes.windll.user32.IsWindowVisible(self.hwnd)", "def is_indicator():\n return True", "def show( self ):\n if self.visible == 1:#ohnheiser hack and time() - self.lastMotion > self.delay:\n self.visible = 2\n if self.visible == 2:\n self.deiconify()", "def isVisible(self, p_int): # real signature unknown; restored from __doc__\n return False", "def isVisible(self, p_int): # real signature unknown; restored from __doc__\n return False", "def is_visible(self):\n if self._namespace and self._namespace.is_anonymous():\n return True\n return self._rawdoc.get_inherited_visibility() != DocType.none", "def is_visible(self, locator, timeout=15):\n try:\n ui.WebDriverWait(self.driver, timeout).until(EC.visibility_of_element_located((By.CSS_SELECTOR, locator)))\n return True\n except TimeoutException:\n return False", "def IsShown(self):\r\n \r\n return not self.HasFlag(self.optionHidden)", "def visible(self):\n return -PipePair.WIDTH < self.x < WIN_WIDTH", "def is_outline_shown(self):\n return self.container['is_outline_shown']", "def _is_visible(self, key) -> bool:\n return self._get_DecoSetting(key).visible", "def visible(self) -> bool:\n try:\n return bool(self.driver.wait_until_all_visible(*self.ROOT_LOCATOR))\n except WebDriverException:\n return False", "def is_ruler_visible(self, is_ruler_visible):\n\n self.container['is_ruler_visible'] = is_ruler_visible", "def is_visible(self, x, y) :\n\t\tres_x = (x > self.x_min) and (x < self.x_max)\n\t\t# print 'res_x : {0}, x : {1}, x_min : {2}, x_max:{3}'.format(res_x, x, self.x_min, self.x_max)\n\t\tres_y = (y > self.y_min) #and (y < self.y_max)\n\t\treturn res_x and res_y", "def visible(self, hipid):\n s = self.hip_stars[hipid]\n if s[3]<min(self.inner_dec, self.outer_dec): return False\n return s[3]<=max(self.inner_dec, self.outer_dec)", "def is_visible(self, timeout=None):\n try:\n self.visibility_of_element_located(timeout)\n except TimeoutException:\n return False\n return True", "def is_visible(self):\n return self.visible_date < timezone.now()", "def visible(self, show):", "def _is_visible(self, point):\n return point[0] > 0 and point[0] < 1 and point[1] > 0 and point[1] < 1", "def is_spinner_invisible(self):\n self.q(css='#spinner').first.click()\n self.wait_for_element_invisibility('#anim', 'Button Output is Visible')", "def XPIsWidgetVisible(inWidget):\n pass", "def __bool__(self):\n return _osgAnimation.mapVertexInfluence___bool__(self)", "def visible(self):\r\n return self.column.visible", "def is_visible(self, is_visible):\n\n self.container['is_visible'] = is_visible", "def hidden():\n return False", "def check_visibility(self):\r\n\r\n for gs in self.ground_stations:\r\n if self.visible ^ (elevation_dot_product(self.r_ecef,self.ground_stations[gs][1],self.earth) > 0.0):\r\n self.visible ^= 1\r\n self.gs_id = self.ground_stations[gs][0]\r\n return True", "def drawmode(self):\n return self._turtle.isdown()", "def IsHidden(self):\n return self._common_type.IsHidden()", "def IsVisible(self, *args):\n return _XCAFDoc.XCAFDoc_ColorTool_IsVisible(self, *args)", "def has_visible_entity(self):\n ret = False\n for e in self:\n if e.is_visible() == True:\n ret = True\n break\n return ret", "def public(self):\n return not self.hidden", "def show( self ):\n if self.visible == 1 and time() - self.lastMotion > self.delay:\n self.visible = 2\n if self.visible == 2 and self.msgVar.get()!='':\n self.deiconify()", "def isdrawn(self):\n return hasattr(self, 'drawn')", "def isstart(self) -> bool:\n if len(self._pile) != self._pos + 1:\n return False\n visible_count = 0\n hidden_count = 0\n for c_card in self._pile:\n if c_card.visible:\n visible_count += 1\n else:\n hidden_count += 1\n return hidden_count == self._pos and visible_count == 1", "def IsInstanceVisible(self, *args):\n return _XCAFDoc.XCAFDoc_ColorTool_IsInstanceVisible(self, *args)", "def is_visible(self, name):\n return self.q(css=\"div.{}\".format(name)).first.visible", "def hidden(self):\n return self._hidden", "def vis(self):\n \treturn self._vis", "def transparent(self):\r\n return not not self.prototype.transparent", "def is_displayed(self):\n return len(self._find_all_by_locator()) > 0", "def off_screen(self):\n return self._x < 0", "def ensure_visible(self):\n self.set_visible(True)", "def IsVisible(self, *args):\n return _XCAFDoc.XCAFDoc_LayerTool_IsVisible(self, *args)", "def is_shaded(self):\n\t\tif self.terrain == \"#\":\n\t\t\treturn True\n\t\treturn False", "def is_iconic(h_wnd):\n _is_iconic = WINDLL.user32.IsIconic\n _is_iconic.argtypes = [HWND]\n _is_iconic.restype = bool\n\n return _is_iconic(h_wnd)", "def transparent(self):\r\n return not not self.model.prototype.transparent", "def rf_btnsVisibility(self):\n self._setBtnVis(self.pIma, self.bImage)\n self._setBtnVis(self.pSeq, self.bSequence)\n self._setBtnVis(self.pMov, self.bMovie)\n self._setBtnVis(self.pXplor, self.bExplorer)\n self._setBtnVis(self.pXterm, self.bXterm)\n self._setBtnVis(self.pGraph, self.bGrapher)", "def hidden(self) -> bool:\n return False", "def hidden(self) -> bool:\n return False", "def IsVisible(self, item):\r\n\r\n # An item is only visible if it's not a descendant of a collapsed item\r\n parent = item.GetParent()\r\n\r\n while parent:\r\n \r\n if not parent.IsExpanded():\r\n return False\r\n \r\n parent = parent.GetParent()\r\n \r\n startX, startY = self.GetViewStart()\r\n clientSize = self.GetClientSize()\r\n\r\n rect = self.GetBoundingRect(item)\r\n \r\n if not rect:\r\n return False\r\n if rect.GetWidth() == 0 or rect.GetHeight() == 0:\r\n return False\r\n if rect.GetBottom() < 0 or rect.GetTop() > clientSize.y:\r\n return False\r\n if rect.GetRight() < 0 or rect.GetLeft() > clientSize.x:\r\n return False\r\n\r\n return True", "def set_visible(self, is_visible):\n self._data['is_visible'] = 1 if is_visible else 0", "def hidden(self):\n return self._hidden", "def is_element_display(self, selector):\n return True if self.get_element(selector).is_displayed() else False", "def is_hidden(self, path):\n return False", "def isdown(self):\n return self._drawing", "def is_on(self):\n return self.coordinator.data[self.info_type] == \"red\"", "def __showIndicator(self, view, pos):\n hit = view.page().hitTestContent(pos)\n \n if hit.isContentEditable() or not hit.linkUrl().isEmpty():\n return False\n \n jsSource = \"\"\"\n var out = {\n vertical:\n window.innerWidth > document.documentElement.clientWidth,\n horizontal:\n window.innerHeight > document.documentElement.clientHeight\n };\n out;\"\"\"\n \n res = view.page().execJavaScript(jsSource)\n if res is None:\n return False\n \n vertical = res[\"vertical\"]\n horizontal = res[\"horizontal\"]\n if not vertical and not horizontal:\n return False\n \n if vertical and horizontal:\n self.__indicator.setPixmap(\n UI.PixmapCache.getPixmap(\"scrollAll.png\"))\n elif vertical:\n self.__indicator.setPixmap(\n UI.PixmapCache.getPixmap(\"scrollVertical.png\"))\n else:\n self.__indicator.setPixmap(\n UI.PixmapCache.getPixmap(\"scrollHorizontal.png\"))\n \n self.__view = view\n p = QPoint(\n pos.x() - self.__indicator.pixmap().width() // 2,\n pos.y() - self.__indicator.pixmap().height() // 2\n )\n \n self.__indicator.setParent(self.__view)\n self.__indicator.move(p)\n self.__indicator.show()\n \n self.__scroller.setPage(view.page())\n \n self.__view.inputWidget().grabMouse()\n QApplication.setOverrideCursor(Qt.ArrowCursor)\n \n return True", "def show(self):\n if not self.shown and not self.flag:\n self.shown = True\n self.configure(image=Tile.images[self.count])\n return -1 if self.mine else 1\n return 0", "def is_gridlines_visible(self):\n return self.container['is_gridlines_visible']", "def top_visible(self) -> bool:\n return self.vertical_scroll == 0", "def ison(self):\n return bool(self.pin.state) if self.pinishigh else not bool(self.pin.state)", "def isObscuredBy(self, QGraphicsItem): # real signature unknown; restored from __doc__\n return False", "def displayed(self, locator, timeout=0):\n try:\n WebDriverWait(self.browser, timeout).until(EC.visibility_of_element_located(locator))\n return True\n except ex.TimeoutException:\n return False", "def on_fruit(self):\r\n if self.grid_pos in self.app.fruit:\r\n if int(self.pix_pos.x+TOP_BOTTOM_BUFFER//2) % self.app.cell_width == 0:\r\n if self.direction == vec(1, 0) or self.direction == vec(-1, 0):\r\n return True\r\n # in the x-direction \r\n\r\n if int(self.pix_pos.y+TOP_BOTTOM_BUFFER//2) % self.app.cell_height == 0:\r\n if self.direction == vec(0, 1) or self.direction == vec(0, -1):\r\n return True\r\n # in the y-direction\r\n\r\n return False", "def at(self) -> bool:\n\n return 'step_active' in self.__get_step_2_div().get_attribute(\"class\")", "def is_win_dispute_button_present(self):\n return self.is_element_present(self.win_dispute_button_locator)" ]
[ "0.71561986", "0.6992037", "0.69445086", "0.6929709", "0.689857", "0.6858378", "0.67312574", "0.6723745", "0.6712027", "0.66735023", "0.6668584", "0.66361713", "0.66052437", "0.6484988", "0.648333", "0.648333", "0.64757276", "0.6454804", "0.631289", "0.6297589", "0.62900037", "0.6249281", "0.6192523", "0.6192523", "0.6188453", "0.6182069", "0.61766773", "0.61766773", "0.6164323", "0.6163558", "0.61633116", "0.6158777", "0.61393315", "0.6121427", "0.6089364", "0.6054632", "0.6047071", "0.6047071", "0.60442865", "0.6038544", "0.6031064", "0.6027826", "0.60221237", "0.59724027", "0.5966418", "0.59336734", "0.5927681", "0.5915021", "0.5898448", "0.5895594", "0.5884855", "0.588016", "0.578859", "0.5787606", "0.57794523", "0.5776339", "0.5771948", "0.57587004", "0.5738426", "0.57342565", "0.5733804", "0.5721293", "0.57137686", "0.5710352", "0.5693452", "0.56694615", "0.5667519", "0.565011", "0.564942", "0.564556", "0.56208736", "0.56064177", "0.559015", "0.5567435", "0.5559694", "0.5555692", "0.55189157", "0.5517251", "0.55037636", "0.55031604", "0.5501845", "0.5501845", "0.5493766", "0.5485195", "0.54812187", "0.54464793", "0.5446087", "0.54420775", "0.5434061", "0.54315454", "0.542865", "0.541427", "0.54099184", "0.5390009", "0.53888714", "0.53839517", "0.5363535", "0.5363083", "0.535682" ]
0.7854587
1
Indicates whether the turtle is in draw mode. All drawing calls are active if an only if this mode is True
def drawmode(self): return self._turtle.isdown()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def isdrawn(self):\n return hasattr(self, 'drawn')", "def setDrawingMode(self):\n pass", "def get_drawing_mode(self) -> int:\n return self._drawing_mode", "def draw(self, canvas) -> bool:\n return False", "def _set_draw_mode(draw_mode):\n###############################################################################\n global _draw_mode\n _draw_mode = draw_mode", "def conditionsAreMetForDrawing(self):\n\t\tcurrentController = self.controller.view().window().windowController()\n\t\tif currentController:\n\t\t\ttool = currentController.toolDrawDelegate()\n\t\t\ttextToolIsActive = tool.isKindOfClass_( NSClassFromString(\"GlyphsToolText\") )\n\t\t\thandToolIsActive = tool.isKindOfClass_( NSClassFromString(\"GlyphsToolHand\") )\n\t\t\tif not textToolIsActive and not handToolIsActive: \n\t\t\t\treturn True\n\t\treturn False", "def draw (self, screen):\n drew = bool(self.draw_fn(self, screen, self.dirty))\n self.dirty = False\n return drew", "def isdown(self):\n return self._drawing", "def draw2DOutlineEnabled(self):\n\n opts = self.opts\n overlay = self.overlay\n\n return ((overlay.trimesh is not None) and\n (opts.outline or opts.vertexData is not None))", "def GetDrawOption(self):\n return self._drawoption", "def draw(self):\n return self._draw", "def on_draw(self):\n return self._on_draw", "def setPrimDrawMode(self, primPath, drawMode):\n prim = self._stage.GetPrimAtPath(primPath)\n if not primPath.IsValid():\n return False\n\n if drawMode == self.DrawMode.inherit:\n prim.RemoveProperty(self.drawModeAttribute)\n return True\n if drawMode == self.DrawMode.geometry:\n prim.GetAttribute(self.drawModeAttribute).Clear()\n return True\n if drawMode == self.DrawMode.boundingBox:\n prim.GetAttribute(self.drawModeAttribute).Set(Vt.Token(\"bounds\"))\n return True\n\n return False", "def get_active(self):\n if hasattr(self, 'canvas'):\n return True\n else:\n return False", "def can_draw(self,point):\n if point <= 0:\n return False\n else:\n return True", "def _sketch_mode(self):\r\n self._mode_select(1)", "def check_mode(self):\n if self.proximity.check_press():\n self.cycle_mode()\n return self.mode", "def save_drawing_if_necessary(self):\n\n app_doc_data = AppDocData.instance()\n if app_doc_data.activeDrawing and app_doc_data.activeDrawing.modified:\n #if QMessageBox.Yes == QMessageBox.question(self, self.tr(\"Question\"),\n # self.tr(\"Do you want to save drawing?\"),\n # QMessageBox.Yes | QMessageBox.No):\n # self.actionSaveCliked()\n # return True\n if QMessageBox.Ignore == QMessageBox.question(self, self.tr('Continue?'),\n self.tr('Changes may not have been saved.'),\n QMessageBox.Ignore | QMessageBox.Cancel):\n return False\n return True", "def draw (self):\n screen = self.screen\n dirty = False\n for display in self.displays:\n dirty |= display.draw(screen)\n return dirty", "def is_graphic_driver(self):\n if self.class_id == \"0x03\":\n return True\n else:\n return False", "def _isoff(self):\n return self.dp.state()==PyTango.DevState.OFF", "def checkDraw(self) -> D:\n if self.board.positions.count(\" \") == 0:\n print(\"DRAW!\")\n return True", "def drawCells(self):\r\n self.drawing = not self.drawing\r\n if self.drawing:\r\n self.draw_button['text'] = \"No Draw\"\r\n else:\r\n self.draw_button['text'] = \"Draw\"", "def game_draw(self):\n pass", "def is_rendering_enabled():\n return _rendering_enabled", "def is_canvas(self):\n return self.canvas._isCanvas", "def toggle_draw_axes(self):\n if self.draw_axes:\n self.draw_axes = False\n else:\n self.draw_axes = True\n self.redraw()", "def toggle_draw_axes(self):\n if self.draw_axes:\n self.draw_axes = False\n else:\n self.draw_axes = True\n self.redraw()", "def draw(_user_id):\n _board = boards[_user_id]\n return _board.can_claim_draw()", "def draw(self):\n\n for row in self._board:\n for slot in row:\n if slot == 0:\n return False\n print \"It's a draw!\"\n return True", "def draw(self): # helper function to incorrect_guess()\n try:\n for draw_method in self.turtle_drawings[self.attempts]:\n method = getattr(Donatello, draw_method)\n method()\n if self.attempts > 10:\n raise Exception(KeyError)\n except KeyError:\n print(\"missing key in self.turtle_drawings\")\n return False", "def game_on(self):\n doc = self.documentation\n return (self.draw.accepted or doc[len(doc)-1].accepted) and (self.board.stones_set < self.board.max_nr_stones) and (self.board.score[opponent(self.draw.player)] > 0)", "def on_draw(self, da, ctx):\n self.referee.get_current_state().draw(ctx)", "def set_mode_point():\n global DRAW_MODE\n DRAW_MODE=\"point\"", "def on_draw(self, screen):\n raise NotImplemented(\"on_draw method should be implemented.\")", "def is_brush(self) -> bool:\n # get the brush mode context and return its value\n with self._is_brush.get_lock():\n return self._is_brush.value", "def visible(self):\n return self._turtle.isvisible()", "def visible(self):\n return self._turtle.isvisible()", "def _prepare_draw(self, view=None):\n return True", "def is_winning_state(self):\n return self.game.is_winning_state()", "def is_off(self):\n return self.value == OFF", "def draw(self, surface):\n checked_color = (0, 196, 0) if self.checked else pg.Color(\"white\")\n surface.fill(pg.Color(\"black\"), self.rect)\n surface.fill(self.color, self.rect.inflate(-2,-2))\n surface.fill(pg.Color(\"white\"), self.rect.inflate(-6,-6))\n surface.fill((205,205,205), self.rect.inflate(-8,-8))\n surface.fill(checked_color, self.select_rect)", "def draw(self, shape):\r\n if not self.s_flg:\r\n opengles.glEnable(GL_SCISSOR_TEST)\r\n opengles.glScissor(ctypes.c_int(int(0)), ctypes.c_int(self.y0),\r\n ctypes.c_int(self.ix), ctypes.c_int(1))\r\n self.s_flg = True\r\n shape.draw(shader=self.shader)", "def draw(self):\n return self._myCanvas.draw()", "def supportedDrawAPIs(self):\n return omr.MRenderer.kAllDevices", "def display_enabled(self):\n return self._display_mode == _LCD_DISPLAYON", "def draw():", "def paintGL(self):\n print \"Entereing paintGL\"\n if self.bDrawing == True:\n print \"Drawing was true so quit\"\n return\n \n \n self.bDrawing = True\n threadDrawGL = threading.Thread(target = self.drawGLScene)\n threadDrawGL.start()\n #self.drawGLScene()", "def isActive(self):\n return self.sides[0].isActive() and self.sides[1].isActive()", "def _draw(self):\r\n \r\n if self.active:\r\n self.surface = self.activeSurface # Set active surface to be displayed.\r\n else:\r\n self.surface = self.passiveSurface # Set passive surface to be displayed.\r", "def player_draw():\n print(\"\\nWhat would you like to do?\\n\")\n number_print(TURN_OPTIONS())\n user_decision = input(\"Enter the number of your decision: \")\n return user_decision == \"1\"", "def draw_animation(self, canvas, animation_tick) -> bool:\n return False", "def _is_device_active(self):\n return self.power_mode == STATE_ON", "def setup_draw(self):\n pass", "def is_on(self) -> bool:\n return self._zone.data[\"mode\"] == \"override\" and self._zone.data[\"setpoint\"]", "def needShader(self):\n return (self.threedee or\n (self.draw2DOutlineEnabled() and\n self.opts.vertexData is not None))", "def is_robot_in_canvas(self, robot):\n return robot in self.__robots", "def eco_mode_enabled(self) -> bool:\n return self._device_info[\"EcoMode\"] == \"on\"", "def draw (self):\n screen = self.screen\n dirty = False\n for z, displays in self.layers.iteritems():\n for display in displays:\n drew = display.draw(screen)\n # if made changes to the surface\n if drew:\n # set any displays that overlap this one dirty\n for d in display.overlapped:\n d.dirty = True\n dirty |= drew\n return dirty", "def isOnCanvas(self, x, y):\n return 0 <= x < self.width and 0 <= y < self.height", "def IsVisualMode(self):\n return self.mode == ViKeyHandler.VISUAL", "def is_on(self):\n if self._switch_type == \"record_motion\":\n return self._camera_data[\"recording_mode\"] == TYPE_RECORD_MOTION\n elif self._switch_type == \"record_always\":\n return self._camera_data[\"recording_mode\"] == TYPE_RECORD_ALWAYS\n elif self._switch_type == \"record_smart\":\n return self._camera_data[\"recording_mode\"] == TYPE_RECORD_SMARTDETECT\n elif self._switch_type == \"ir_mode\":\n return self._camera_data[\"ir_mode\"] == self._ir_on_cmd\n elif self._switch_type == \"hdr_mode\":\n return self._camera_data[\"hdr_mode\"] is True\n elif self._switch_type == \"high_fps\":\n return self._camera_data[\"video_mode\"] == TYPE_HIGH_FPS_ON\n else:\n return self._camera_data[\"status_light\"] == \"True\"", "def _stroke_mode(self):\r\n if not self.color:\r\n self._color_change_mode()\r\n self.input_scene.get_stk_color(self.color)\r\n self._mode_select(2)", "def __bool__(self):\n context, active_obj, actual_mode, mode = self.get_context()\n if not mode: return False\n \n if mode == 'OBJECT':\n return bool(context.selected_objects)\n elif mode == 'EDIT_MESH':\n mesh = active_obj.data\n if actual_mode == 'EDIT_MESH':\n return bool(mesh.total_vert_sel)\n else:\n return any(item.select for item in mesh.vertices)\n elif mode in {'EDIT_CURVE', 'EDIT_SURFACE'}:\n for spline in active_obj.data.splines:\n for item in spline.bezier_points:\n if (item.select_control_point or\n item.select_left_handle or\n item.select_right_handle):\n return True\n for item in spline.points:\n if item.select:\n return True\n elif mode == 'EDIT_METABALL':\n return bool(active_obj.data.elements.active)\n elif mode == 'EDIT_LATTICE':\n return any(item.select for item in active_obj.data.points)\n elif mode == 'EDIT_ARMATURE':\n return any(item.select_head or item.select_tail\n for item in active_obj.data.edit_bones)\n elif mode == 'POSE':\n return any(item.select for item in active_obj.data.bones)\n elif mode == 'PARTICLE':\n # Theoretically, particle keys can be selected,\n # but there seems to be no API for working with this\n pass\n else:\n pass # no selectable elements in other modes\n \n return False", "def texture_mode_enabled():\n for area in bpy.context.screen.areas:\n if area.type == \"VIEW_3D\":\n for space in area.spaces:\n if space.type == \"VIEW_3D\":\n if space.viewport_shade == \"TEXTURED\":\n return True\n elif (space.viewport_shade == \"SOLID\" and\n space.show_textured_solid):\n return True\n return False", "def _drawturtle(self):\n screen = self.screen\n shape = screen._shapes[self.Myturtle.shapeIndex]\n ttype = shape._type\n titem = self.Myturtle._item\n if self._shown and screen._updatecounter == 0 and screen._tracing > 0:\n self._hidden_from_screen = False\n tshape = shape._data\n if ttype == \"polygon\":\n if self._resizemode == \"noresize\": w = 1\n elif self._resizemode == \"auto\": w = self._pensize\n else: w =self._outlinewidth\n shape = self._polytrafo(self._getshapepoly(tshape))\n fc, oc = self._fillcolor, self._pencolor\n screen._drawpoly(titem, shape, fill=fc, outline=oc,\n width=w, top=True)\n elif ttype == \"image\":\n screen._drawimage(titem, self._position, tshape)\n elif ttype == \"compound\":\n for item, (poly, fc, oc) in zip(titem, tshape):\n poly = self._polytrafo(self._getshapepoly(poly, True))\n screen._drawpoly(item, poly, fill=self._cc(fc),\n outline=self._cc(oc), width=self._outlinewidth, top=True)\n else:\n if self._hidden_from_screen:\n return\n if ttype == \"polygon\":\n screen._drawpoly(titem, ((0, 0), (0, 0), (0, 0)), \"\", \"\")\n elif ttype == \"image\":\n screen._drawimage(titem, self._position,\n screen._shapes[\"blank\"]._data)\n elif ttype == \"compound\":\n for item in titem:\n screen._drawpoly(item, ((0, 0), (0, 0), (0, 0)), \"\", \"\")\n self._hidden_from_screen = True", "def draw(self, draw_surface):\n\n # The menu frame and how many surf (frame that appears in the middle\n # of the bottom of the screen).\n draw_surface.blit(self.menu_frame, (176, 112))\n draw_surface.blit(self.how_many_surf, (40, 115))\n\n if self.confirm_toss_response_dialogue is None and \\\n self.threw_away_dialogue is None:\n self.quantity_cursor.draw(draw_surface)\n\n # If on the trow away dialogue we don't need to draw anything else (it\n # is taken care of in the how many surf). Return so that cursor and\n # yes no surf are not drawn.\n if self.threw_away_dialogue is not None:\n return\n\n elif self.confirm_toss_response_dialogue is not None:\n draw_surface.blit(self.yes_no_surf, (195, 127))\n self.cursor.draw(draw_surface)", "def is_on(self):\n return self._light_on", "def ToggleDrawingTools(self, event):\n pass", "def is_off(self) -> bool:\n return not self.is_on", "def is_draw(board):\n\tif not is_game_won(board, user_player) and not is_game_won(board, server_player):\n\t\tcheck = [0, 1, 2, 3, 4, 5, 6, 7, 8]\n\n\t\tfor item in check:\n\n\t\t\tif item in board:\n\t\t\t\treturn False\n\n\t\treturn True\n\t\n\telse:\n\t\treturn False", "def is_on(self):\n return self.wink.state()", "def is_shaded(self):\n\t\tpass", "def is_on(self):\n return False", "def GetDebugMode(self):\n return bool(self.debug_mode)", "def is_on(self):\n return self._device.state", "def draw(self):\n pass", "def draw(self):\n pass", "def draw(self):\n pass", "def draw(self):\n pass", "def set_draw_callback(callback: DRAW_FUN) -> None:\n global ACTIVE_CALLBACK\n ACTIVE_CALLBACK = callback", "def is_on(self):\n return self._program.get(\"enabled\") is True", "def draw(self, force = False):\n\t\tpass", "def getImmediateRendering():\n\n\treturn False", "def is_on(self):\n camera = self.coordinator.data[self._camera_id]\n if self._switch_type == \"record_motion\":\n enabled = True if camera[\"recording_mode\"] == TYPE_RECORD_MOTION else False\n elif self._switch_type == \"record_always\":\n enabled = True if camera[\"recording_mode\"] == TYPE_RECORD_ALLWAYS else False\n else:\n enabled = True if camera[\"ir_mode\"] == self._ir_on_cmd else False\n return enabled", "def testSetMode(self):\n my_button = buttonsprite.ButtonSprite()\n my_button.setMode('inactive')\n self.assertEquals(my_button._draw_function, my_button._drawInactive)\n my_button.setMode('highlighted')\n self.assertEquals(my_button._draw_function, my_button._drawHighlighted)\n my_button.setMode('pressed')\n self.assertEquals(my_button._draw_function, my_button._drawPressed)\n my_button.setMode('normal')\n self.assertEquals(my_button._draw_function, my_button._drawNormal)", "def draw(self):\n if self.node:\n if self.async:\n if self.cancel_draw:\n self.after_cancel(self.cancel_draw)\n self.cancel_draw = self.after(3, self._draw)\n else: self._draw()", "def draw(self, surface):\n to_draw = list()\n full_screen = surface.get_rect()\n for state in self.active_states:\n to_draw.append(state)\n\n # if this state covers the screen\n # break here so lower screens are not drawn\n if (not state.transparent\n and state.rect == full_screen\n and not state.force_draw):\n break\n\n # draw from bottom up for proper layering\n for state in reversed(to_draw):\n state.draw(surface)", "def draw(self, mode, selection):\n if not self._linked:\n raise RuntimeError('Cannot draw program if code has not been set')\n # Init\n gl.check_error('Check before draw')\n mode = as_enum(mode)\n # Draw\n if len(selection) == 3:\n # Selection based on indices\n id_, gtype, count = selection\n if count:\n self._pre_draw()\n ibuf = self._parser.get_object(id_)\n ibuf.activate()\n gl.glDrawElements(mode, count, as_enum(gtype), None)\n ibuf.deactivate()\n else:\n # Selection based on start and count\n first, count = selection\n if count:\n self._pre_draw()\n gl.glDrawArrays(mode, first, count)\n # Wrap up\n gl.check_error('Check after draw')\n self._post_draw()", "def is_on(self):\n return self._cur != -1", "def is_on_ground(self):\n return bool(self.ground_sprites())", "def draw(self):\n # IMPLEMENT ME\n \"\"\"\n GRectangle(x=GAME_WIDTH/2,y=GAME_HEIGHT/2,\n width=GAME_WIDTH,height=GAME_HEIGHT,\n fillcolor=introcs.RGB(0,0,0)).draw(self.view)\n if self.getState() == STATE_INACTIVE:\n self.getText().draw(self.view)\n if self.getState() == STATE_PAUSED:\n self.getText().draw(self.view)\n if not self.getWave() is None:\n self.getWave().draw(self.view)\n if self.getState() == STATE_COMPLETE:\n self.getText().draw(self.view)\n if self.getState() == STATE_PAUSED or self.getState() == STATE_ACTIVE or self.getState() == STATE_COMPLETE:\n self.getText().draw(self.view)\n\n GRectangle(x=GAME_WIDTH/2,y=GAME_HEIGHT/2,\n width=GAME_WIDTH,height=GAME_HEIGHT,\n fillcolor=introcs.RGB(0,0,0)).draw(self.view)\"\"\"\n if not self.getText() is None:\n self.getText().draw(self.view)\n if not self.getWave() is None:\n self.getWave().draw(self.view)", "def is_outline_shown(self):\n return self.container['is_outline_shown']", "def GetGUIOpen(self):\n return bool(self.gui_open)", "def draw(self):\n\t\tpass", "def is_on(self) -> bool:\n return self._device.is_on", "def is_on(self) -> bool:\n return self._state", "def is_on(self) -> bool:\n raise NotImplementedError(\"Device subclass needs to implement this.\")", "def high_current_mode_bool(self):\n return self._high_current_mode_bool", "def draw(self, draw_surface):\n if not self._initial_prompt.is_over():\n self._initial_prompt.draw(draw_surface)\n elif self._response == 2 and not self._seeya_dialogue.is_over():\n self._seeya_dialogue.draw(draw_surface)\n elif self._response == 0 and not self._buy_menu.is_over():\n draw_surface.blit(self._money_surface, (2, 2))\n draw_surface.blit(self._help_surface, (0, 111))\n self._buy_menu.draw(draw_surface)\n elif self._response == 1 and not self._sell_menu.is_over():\n self._sell_menu.draw(draw_surface)" ]
[ "0.7248577", "0.6596341", "0.6579881", "0.6529058", "0.65262514", "0.64363253", "0.6411703", "0.6403673", "0.6296374", "0.62232405", "0.6160772", "0.60842884", "0.5986026", "0.5977863", "0.59161794", "0.5741559", "0.5738247", "0.56827796", "0.5679077", "0.56439203", "0.56351316", "0.5596153", "0.55085737", "0.5489354", "0.54806435", "0.5447787", "0.54397124", "0.54397124", "0.5431791", "0.54225755", "0.5404822", "0.5394855", "0.53697234", "0.5356719", "0.5342971", "0.53151685", "0.5315139", "0.5315139", "0.53122926", "0.52940834", "0.5278881", "0.5249475", "0.52465457", "0.52395624", "0.52330065", "0.52292794", "0.5220932", "0.52076584", "0.52060986", "0.5175267", "0.5171815", "0.5171316", "0.5166551", "0.51629907", "0.5153857", "0.5137018", "0.51293933", "0.51216406", "0.51214916", "0.51199985", "0.5118414", "0.5110791", "0.5104807", "0.5104671", "0.509919", "0.5095779", "0.5081668", "0.50637907", "0.50587416", "0.50567234", "0.50522", "0.5047343", "0.5045015", "0.5039901", "0.5034571", "0.50329167", "0.5019298", "0.5019298", "0.5019298", "0.5019298", "0.5004783", "0.5002734", "0.49976522", "0.49896118", "0.49872583", "0.4987029", "0.4985658", "0.49809772", "0.4978531", "0.4974755", "0.4970406", "0.4965131", "0.49519908", "0.4945513", "0.49428746", "0.49416062", "0.49409518", "0.49404177", "0.49381277", "0.4935557" ]
0.8096658
0
The xcoordinate of this turtle. To change the x coordinate, use one of the drawing methods. This attribute may not be (directly) altered
def x(self): return self._turtle.xcor()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def setX(self, x):\n self.position.setX(x)", "def Getxcoord(self):\n return self.x_coord", "def set_x(self, x):\n self.scene.set_x_loc(x)\n self.redraw()", "def get_x(self):\n return self.posX", "def x(self):\n return _libsbml.Point_x(self)", "def set_x(self, x):\n self._x = x", "def setX(self, x):\r\n\t\tself._x=x", "def get_pos_x(self):\n return self.__pos_x", "def set_x(self, new_x):\r\n self.x = new_x", "def SetX(self, x):\r\n\r\n self._x = x", "def _get_x(self):\n return self.position.x", "def setX(self, x):\n self.x = x\n pass", "def set_x(self, x: float):\n self.x = x", "def get_x_position(self):\n return self.rect.x", "def getXCoordinate(self) -> float:\n return self.x_coord", "def x(self):\n if self._x is None:\n self.compute_coordinates()\n return self._x", "def x_coord(self):\n\n return self.x0 + np.arange(self.nx) * self.dx", "def setX(self, *args):\n return _libsbml.Point_setX(self, *args)", "def x(self):\r\n return self.position.x", "def get_x(self):\n return self.coords[0]", "def x(self):\n return self._coords[0]", "def x(self):\n return self.coords[0]", "def setXPos(self,newXPos):\n self.xPos=newXPos", "def get_x(self) -> int:\n return self.__x", "def set_x(self, state_value):\n val = state_value / self.space_subdivisions + self.unit\n epsilon = 1e-6\n if not self.unit <= val <= 1.0 - self.unit + epsilon:\n raise AttributeError(\"Value out of bounds\")\n self.pos_x = val", "def setX(self, value):\n self.position[0] = value", "def __get_x__(self):\n return self.Direction['x']", "def get_x_position(self):\n return self.actual_coordinates[0]", "def set_new_pos_in_x(self, new_pos):\n self.__pos_x = new_pos", "def pos_x(self, *args, **kwargs) -> Any:\n pass", "def reflect_x(self):\n\n return Point(self.x, - self.y)", "def reflect_x(self):\n r_x = self.x\n r_y = self.y *-1\n\n return (Point(r_x,r_y))", "def GetX(self):\r\n\r\n return self._x", "def getX(self):\n return self.__x", "def x(self, x=None):\n\n if x is None:\n return self._x\n else:\n if not isinstance(x, int) and not isinstance(x, float):\n raise TypeError(\"x must be numeric, not '%s'\" % x)\n self._x = x", "def x(self, x=None):\n\n if x is None:\n return self._x\n else:\n if not isinstance(x, int) and not isinstance(x, float):\n raise TypeError(\"x must be numeric, not '%s'\" % x)\n self._x = x", "def getX(self):\n return self.x", "def getX(self):\r\n\t\treturn self._x", "def x(self, value):\n self.data_validator(\"x\", value)\n self.__x = value", "def set_pos(self, x):\n self._pos = x", "def x ( self ) :\n return self.xvar", "def setXOffset(self, *args):\n return _libsbml.Point_setXOffset(self, *args)", "def get_x(self):\n\t\treturn self._collision_rect.x + 14", "def offset_x(self, x: int):\n self.tk_ref.geometry(f'{self.width}x{self.height}+{x}+{self.offset_y}')", "def get_axis_x(self):\r\n return self.__x_axis", "def x(self) -> int:\n return self._x", "def x_origin(self):\n return self._x_origin", "def set_axis_x(self, new_axis_point):\r\n self.__x_axis = new_axis_point", "def origin_x(self):\n return self._origin[0]", "def x(self, number):\n self.validate_int(\"x\", number)\n if number < 0:\n raise ValueError(\"x must be >= 0\")\n self.__x = number", "def x(self, value):\n self.validate_input(x=value)\n self.__x = value", "def x(self, value: int):\n if not (0 < value < SCREEN_WIDTH - self.width):\n self.dir_x = -self.dir_x\n self._x += abs(self._x - value) * self.dir_x", "def getX(self):\n return self.position.getX()", "def get_ship_x(self):\n return self.x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def set_lx(self, val=None):\r\n self._lx = self.dx * self.nx - self.ox", "def x(self):\n return self._kml['x']", "def xaxis ( self ) :\n return self.__xaxis", "def xaxis ( self ) :\n return self.__xaxis", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def xaxis ( self ) :\n return self.__xaxis", "def x(self):\n return self.x", "def x(self, value):\n if not isinstance(value, int):\n raise TypeError(\"x must be an integer\")\n if value < 0:\n raise ValueError(\"x must be >= 0\")\n self.__x = value", "def x(self, x):\n if type(x) is not int:\n raise TypeError(\"x must be an integer\")\n if x < 0:\n raise ValueError(\"x must be >= 0\")\n self.__x = x", "def x(self, x):\n if type(x) is not int:\n raise TypeError(\"x must be an integer\")\n if x < 0:\n raise ValueError(\"x must be >= 0\")\n self.__x = x", "def getXOffset(self):\n return _libsbml.Point_getXOffset(self)", "def x(self, value):\n if isinstance(value, int) is False:\n raise TypeError(\"x must be an integer\")\n if value < 0:\n raise ValueError(\"x must be >= 0\")\n self.__x = value", "def offset_x(self) -> int:\n self.tk_ref.update()\n return self.tk_ref.winfo_x()", "def setX(self, *args):\n return _libsbml.BoundingBox_setX(self, *args)", "def x(self, x):\n if x is None:\n raise ValueError(\"Invalid value for `x`, must not be `None`\") # noqa: E501\n\n self._x = x", "def x(self, x):\n if x is None:\n raise ValueError(\"Invalid value for `x`, must not be `None`\") # noqa: E501\n\n self._x = x", "def x(self):\n return self[\"x\"]", "def x(self, x):\n if type(x) is not int:\n raise TypeError(\"x must be an integer\")\n elif x < 0:\n raise ValueError(\"x must be >= 0\")\n else:\n self.__x = x", "def get_alien_x(self):\n return self.x" ]
[ "0.76434016", "0.7627431", "0.76259303", "0.75785303", "0.75745726", "0.75719124", "0.7553244", "0.7533228", "0.7502078", "0.75011337", "0.74811924", "0.74758", "0.7464989", "0.7426505", "0.7388529", "0.7332591", "0.73232895", "0.72585595", "0.71786386", "0.71688503", "0.7164573", "0.7132896", "0.7077902", "0.7071956", "0.70676875", "0.7007853", "0.6959226", "0.6950486", "0.694609", "0.68973833", "0.68959206", "0.6889416", "0.6834155", "0.6809917", "0.6775918", "0.6775918", "0.6759473", "0.6721829", "0.6711484", "0.67092437", "0.6702299", "0.670134", "0.666158", "0.6654817", "0.66510403", "0.6633222", "0.6625623", "0.6604806", "0.65774584", "0.65672344", "0.6561662", "0.65606564", "0.6553863", "0.6550047", "0.6534027", "0.6534027", "0.6534027", "0.6534027", "0.6534027", "0.6534027", "0.6534027", "0.6534027", "0.6534027", "0.6534027", "0.6534027", "0.6534027", "0.6534027", "0.6534027", "0.6534027", "0.6534027", "0.6532995", "0.6518065", "0.65165794", "0.65165794", "0.6511992", "0.6511992", "0.6511992", "0.6511992", "0.6511992", "0.6511992", "0.6511992", "0.6511992", "0.6511992", "0.6511992", "0.6511992", "0.6498147", "0.6479484", "0.6444896", "0.6417612", "0.6417612", "0.6386528", "0.63785934", "0.63742954", "0.63698137", "0.6348598", "0.6348598", "0.6332223", "0.63216853", "0.63056767" ]
0.7628367
2
The ycoordinate of this turtle. To change the x coordinate, use one of the drawing methods. This attribute may not be (directly) altered
def y(self): return self._turtle.ycor()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def y(self):\n return _libsbml.Point_y(self)", "def getYCoordinate(self) -> float:\n return self.y_coord", "def setY(self, y):\r\n\t\tself._y=y", "def setY(self, y):\n self.y = y\n pass", "def y_coord(self):\n\n return self.y0 + np.arange(self.ny) * self.dy", "def getY(self):\n return self.__y", "def set_y(self, y):\n self._y = y", "def getY(self):\n return self.y", "def getY(self):\n return self.y", "def set_y(self, y: float):\n self.y = y", "def getY(self):\r\n\t\treturn self._y", "def SetY(self, y):\r\n\r\n self._y = y", "def set_y(self, new_y):\r\n self.y = new_y", "def setY(self, *args):\n return _libsbml.Point_setY(self, *args)", "def GetY(self):\r\n\r\n return self._y", "def getYpos(self):\n return self.y", "def getY(self):\n return self.position.getY()", "def _get_y(self):\n return self.position.y", "def y(self):\r\n return self.position.y", "def __get_y__(self):\n return self.Direction['y']", "def get_y(self):\n return self.__y", "def get_pos_y(self):\n return self.__pos_y", "def get_y(self):\n return self.posY", "def y(self):\n return self._coords[1]", "def y(self):\n return self.coords[1]", "def setY(self, y):\n self.position.setY(y)", "def getY(self):\n return self.position[1]", "def set_ly(self):\r\n self._ly = self.dy * self.ny - self.oy", "def set_y(self, y):\n self.scene.set_y_loc(y)\n self.redraw()", "def set_y(self,Y):\n self.posY = Y", "def setY(self, value):\n self.position[1] = value", "def get_y(self):\n return self.coords[1]", "def get_y_position(self): \n return self.rect.y", "def y(self):\n if self._y is None:\n self.compute_coordinates()\n return self._y", "def get_y_position(self):\n return self.actual_coordinates[1]", "def y ( self ) :\n return self.yvar", "def set_y(self, state_value):\n val = state_value / self.space_subdivisions + self.unit\n epsilon = 1e-6\n if not self.unit <= val <= 1.0 - self.unit + epsilon:\n raise AttributeError(\"Value out of bounds\")\n self.pos_y = val", "def getY(self):\n return _libsbml.BoundingBox_getY(self)", "def getY(self):\n y = self.getAttribute('y')\n kind = self.getKind()\n self._y = y if kind == 'pie' else None\n return self._y", "def get_alien_y(self):\n return self.y", "def y(self):\n return self.__y", "def y(self):\n return self.__y", "def y(self):\n return self.__y", "def y(self):\n return self.__y", "def y(self):\n return self.__y", "def y(self):\n return self.__y", "def y(self):\n return self.__y", "def y(self):\n return self.__y", "def y(self):\n return self.__y", "def y(self):\n return self.__y", "def y(self, value):\n if not (0 < value < SCREEN_HEIGHT - self.height):\n self.dir_y = -self.dir_y\n self._y += abs(self._y - value) * self.dir_y", "def y(self):\n return self._y", "def y(self):\n return self._y", "def y(self):\n return self._y", "def y(self):\n return self._y", "def y(self):\n return self._y", "def y(self):\n return self._y", "def y(self):\n return self._y", "def y(self):\n return self._y", "def y(self):\n return self._y", "def y(self):\n return self._y", "def y(self):\n return self.y", "def setY(self, *args):\n return _libsbml.BoundingBox_setY(self, *args)", "def y(self):\n return (self.__y)", "def setYPos(self,newYPos):\n self.yPos=newYPos", "def pos_y(self, *args, **kwargs) -> Any:\n pass", "def origin_y(self):\n return self._origin[1]", "def set_new_pos_in_y(self, new_pos):\n self.__pos_y = new_pos", "def setY(self, value):\n self.components[1] = value", "def setY(self, value):\n self.components[1] = value", "def Y(self, value):\n self._Y = value", "def y(self,) -> int:\n return self._y", "def setYOffset(self, *args):\n return _libsbml.Point_setYOffset(self, *args)", "def findY(self):\n return self.y", "def get_y(self, x):\n p, y = self.get_p_y(x)\n return y", "def getYOffset(self):\n return _libsbml.Point_getYOffset(self)", "def y_origin(self):\n return self._y_origin", "def y(self) -> int:\n return self.data.y_centre >> 4", "def Y(self):\n return self.y\n pass", "def y(self, y=None):\n\n if y is None:\n return self._y\n else:\n if not isinstance(y, int) and not isinstance(y, float):\n raise TypeError(\"y must be numeric, not '%s'\" % y)\n self._y = y", "def y(self, y=None):\n\n if y is None:\n return self._y\n else:\n if not isinstance(y, int) and not isinstance(y, float):\n raise TypeError(\"y must be numeric, not '%s'\" % y)\n self._y = y", "def _get_y(self):\n enabled = self.num_enabled\n\n if self.heart_enabled:\n self._heart_y = 45*(self.num_enabled - enabled) + 75\n enabled -= 1\n if self.speed_enabled:\n self._speed_y = 45*(self.num_enabled - enabled) + 75\n enabled -= 1\n if self.cadence_enabled:\n self._cadence_y = 45*(self.num_enabled - enabled) + 75\n enabled -= 1\n if self.ams_enabled:\n self._ams_y = 45*(self.num_enabled - enabled) + 75\n enabled -= 1", "def y(self, value):\n self.validate_input(y=value)\n self.__y = value", "def y(self):\n return self[\"y\"]", "def get_y(self):\n\t\treturn self._collision_rect.y + 25", "def y(self):\n return self._translation[1, 0]", "def getY(self):\n return self.proj.getY()", "def yax(self):\n return self.__yax", "def Y(self):\n return self._Y", "def y(self):\n return self.top", "def y(self):\n return self._kml['y']", "def getY(self):\n return self.components[1]", "def getY(self):\n return self.components[1]", "def get_ly(self):\r\n return self.dy * self.ny - self.oy", "def y(self, value):\n self.data_validator(\"y\", value)\n self.__y = value", "def y(self, number):\n self.validate_int(\"y\", number)\n if number < 0:\n raise ValueError(\"y must be >= 0\")\n self.__y = number", "def y(self, value):\n if not isinstance(value, int):\n raise TypeError(\"y must be an integer\")\n if value < 0:\n raise ValueError(\"y must be >= 0\")\n self.__y = value", "def y(self, y):\n if type(y) is not int:\n raise TypeError(\"y must be an integer\")\n if y < 0:\n raise ValueError(\"y must be >= 0\")\n self.__y = y", "def y(self, y):\n if type(y) is not int:\n raise TypeError(\"y must be an integer\")\n if y < 0:\n raise ValueError(\"y must be >= 0\")\n self.__y = y" ]
[ "0.77648014", "0.7557941", "0.75414705", "0.7519196", "0.7517687", "0.75156283", "0.74248564", "0.7424172", "0.7424172", "0.7421405", "0.74198735", "0.74136454", "0.73714113", "0.7355722", "0.73076034", "0.7266436", "0.7265773", "0.72609174", "0.7236153", "0.7231107", "0.7228147", "0.72175074", "0.72151315", "0.720052", "0.7169785", "0.7143287", "0.7118839", "0.7095505", "0.70832574", "0.70824456", "0.7077063", "0.7073348", "0.7061402", "0.70228004", "0.6987173", "0.69660527", "0.692774", "0.6915229", "0.69102633", "0.6887717", "0.6885156", "0.6885156", "0.6885156", "0.6885156", "0.6885156", "0.6885156", "0.6885156", "0.6885156", "0.6885156", "0.6885156", "0.68826723", "0.6856842", "0.6856842", "0.6856842", "0.6856842", "0.6856842", "0.6856842", "0.6856842", "0.6856842", "0.6856842", "0.6856842", "0.68478435", "0.67987144", "0.6781755", "0.6747699", "0.6719368", "0.6702987", "0.6695838", "0.6691934", "0.6691934", "0.66918576", "0.6687948", "0.6678643", "0.6677346", "0.66520244", "0.6644196", "0.6633252", "0.66287833", "0.6620804", "0.6582495", "0.6582495", "0.6573936", "0.6571456", "0.6566977", "0.6551497", "0.6548509", "0.6541063", "0.65077496", "0.6480519", "0.6468745", "0.64662653", "0.6462677", "0.6462677", "0.6459272", "0.6448334", "0.6439848", "0.6423513", "0.64004236", "0.64004236" ]
0.7376552
13
Deletes this turtle object.
def __del__(self): self.clear() self._screen._removeTurtle(self) del self._turtle
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __del__(self):\n self._screen._removePen(self)\n del self._turtle", "def delete(self):\n self.graph._del(handle=self.handle)", "def remove(self):\n self.node.destroy()", "def delete(self):\n\t\tself.canvas.delete('node_'+self.identifier)\n\t\tself.canvas.tag_unbind('node_'+self.identifier,\"<Any>\")", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def delete(self):\n self._vertex_list.delete()\n self._vertex_list = None", "def delete(self):\n del self.shx.atoms[self.index]", "def delete(self):\n # exit contains our clean up code\n self.exit()\n GenericAnimatedProp.GenericAnimatedProp.delete(self)", "def remove(self):\r\n\t\tself._delete()", "def delete(self) -> None:\n shutil.rmtree(self.path)", "def __del__(self):\n try:\n self._frame._destroy()\n except:\n pass\n self._turtles = []\n self._pencils = []\n del self._frame", "def destroy(self):\n for node in self.find_references():\n node.destroy()\n self._bld.RemoveObject(self.get_sobj())", "def clear(self):\n self._turtle.clear()", "def clear(self):\n self._turtle.clear()", "def __del__(self) -> None:\n self.delete()", "def delete(self):\n if self.shape is not None:\n self.shape.delete()\n if self in shared.obstacles:\n shared.obstacles.remove(self)", "def destroy(self):\r\n self._obj.destroy()\r\n self._obj = None", "def delete(self):\n self.parent.delete_node(self)", "def destroy(self):\n gameengine.GameEngine().game_objects.remove(self)", "def delete(self) -> None:\n self.pop()", "def __del__(self):\n\n # Delete sprite (if it has been defined)\n try:\n self.canvas.delete(self.sprite)\n except AttributeError:\n pass\n except tk.TclError:\n pass", "def _removeTurtle(self,turt):\n if turt in self._turtles:\n self._turtles.remove(turt)", "def delete(self):\n with self.locked():\n self.path.delete()", "def delete(self):\n with self.locked():\n self.path.delete()", "def delete(self):\n os.system(\"rm \"+self._name)", "def hdel(self):\n return self.delete()", "def delete(self):\n\n raise NotImplementedError('Must be implemented by subclasses')", "def delete_current_shape(self):\n print(\"deleting shape!\")\n self.shapes.remove(self.current_shape)\n self.current_shape = None\n self.changed()", "def delete(self):\n\n # TODO find a way to remove this when sub-classing in HCRoot\n self.parent.del_child(self)", "def delete(self):\n\n try:\n remove(self.file)\n except OSError:\n pass", "def delete(self):\n return self._finalize()", "def delete(self):\n return self._finalize()", "def delete(self):\n return self._finalize()", "def delete(self):\n return self._finalize()", "def delete(self):\n return self._finalize()", "def delete(self):\n return self._finalize()", "def delete(self):\n return self._finalize()", "def delete(self):\n return self._finalize()", "def delete(self):\n return self._finalize()", "def remove(self):\n traci.vehicle.remove(self.id)", "def remove(self):\n traci.vehicle.remove(self.id)", "def remove(self):\n self._switch.odlclient._request(self._path, method=\"delete\")", "def destroy(self):\n\n pass", "def delete_ball(self):\r\n self.movement = \"\"\r\n self.canvas.delete(self.ball)", "def delete_object(self, object):\n object.delete()", "def deleteBall(self):\n self._ball = None", "def bye(self):\n self._frame._destroy()\n self._turtles = []\n self._gpens = []\n del self._frame", "def destroy(self):\r\n self.__destroy()", "def delete(self):\n self.id = uuid4()\n DataStore.remove_instance(self)", "def delete(self):\n Texture2D.delete_glo(self._ctx, self._glo)\n self._glo.value = 0", "def delete(self):\r\n s = self.get_session()\r\n s.delete(self)\r\n s.commit()", "def destroy(self):\n pass", "def destroy(self):\n pass", "def destroy(self):\n pass", "def destroy(self):\n pass", "def _destroy(self):\n root = self._root\n turtle.Turtle._pen = None\n turtle.Turtle._screen = None\n self._root = None\n self._canvas = None\n turtle.TurtleScreen._RUNNING = True\n root.destroy()", "def delete(self):\n pass", "def delete(self):\n pass", "def delete(self):\n pass", "def delete(self):\n pass", "def delete(self):\n if os.path.exists(self.file_path):\n os.remove(self.file_path)", "def delete( self ):\n if os.path.exists(self.filename):\n os.remove(self.filename)", "def delete(self):\n self._client.delete(self)", "def remove(self):\n with managed_session() as session:\n session.delete(self)", "def delete(self):\n\t\t#self.log.info(\"Deleting file {}\".format(self._filepath))\n\t\tos.remove(self._filepath)", "def delete(self):\n self._instance.delete()\n self._instance = None\n self._data_defs = []", "def deleteStep( self ):\n assert isinstance( self._env, Env )\n assert isinstance( self._steps, list )\n\n # Save the stuff we need\n oldSteps = self._steps\n oldSteps.pop( )\n\n # Reinitialize this instance\n self._initialize( oldSteps )", "def destroy(self):\n pass # Nothing for now", "def delete(self):\n self.vera.delete_scene(self)", "def remove(self):\n for ref_node in self.node.find_references():\n ref_node.destroy()\n File.remove(self)", "def remove(self):\n if isinstance(self, Vertex):\n self.graph.remove_vertex(self)\n else:\n self.graph.remove_edge(self)", "def __del__(self):\n #print 'del in'\n if hasattr(self,'root'):\n #print 'del root'\n if self.root:\n #print 'del circ'\n self.root.delete_circulars()\n del self.root", "def __del__(self):\n \n _cantera.wall_del(self.__wall_id)", "def delete(self):\r\n if self.__abstract__:\r\n raise ThunderdomeException('cant delete abstract elements')\r\n if self.eid is None:\r\n return self\r\n query = \"\"\"\r\n g.removeVertex(g.v(eid))\r\n g.stopTransaction(SUCCESS)\r\n \"\"\"\r\n results = execute_query(query, {'eid': self.eid})", "def delete(self):\n db.session.delete(self)\n db.session.commit()", "def delete(self):\n db.session.delete(self)\n db.session.commit()", "def delete(self):\n db.session.delete(self)\n db.session.commit()", "def delete(self):\n db.session.delete(self)\n db.session.commit()", "def delete(self):\n db.session.delete(self)\n db.session.commit()", "def delete(self):\n db.session.delete(self)\n db.session.commit()", "def delete(self):\n db.session.delete(self)\n db.session.commit()", "def delete(self):\n db.session.delete(self)\n db.session.commit()", "def delete(self):\n db.session.delete(self)\n db.session.commit()", "def delete_refobj(self, refobj):\n with common.locknode(refobj, lock=False):\n cmds.delete(refobj)", "def destroy(self):\n del self.nodes\n self.nodes = {}", "def delete(self):\r\n db.session.delete(self)\r\n db.session.commit()", "def delete(self):\n\n raise NotImplementedError()" ]
[ "0.72901684", "0.69489294", "0.6906722", "0.68213683", "0.68130356", "0.68130356", "0.68130356", "0.68130356", "0.68130356", "0.68130356", "0.68130356", "0.68130356", "0.68130356", "0.68130356", "0.68130356", "0.68130356", "0.68130356", "0.68130356", "0.6793991", "0.6787353", "0.6783469", "0.67716306", "0.67514575", "0.6730611", "0.6634641", "0.6629618", "0.6629618", "0.65821636", "0.65815085", "0.65547854", "0.65537894", "0.65066445", "0.6491144", "0.6472861", "0.6467688", "0.6444615", "0.6444615", "0.6443767", "0.63996273", "0.63891065", "0.63659865", "0.6344538", "0.63153565", "0.6280118", "0.6280118", "0.6280118", "0.6280118", "0.6280118", "0.6280118", "0.6280118", "0.6280118", "0.6280118", "0.62699836", "0.62699836", "0.62516886", "0.62111676", "0.6206013", "0.6205209", "0.6198584", "0.619709", "0.6187177", "0.6152614", "0.6141955", "0.6135889", "0.61194736", "0.61194736", "0.61194736", "0.61194736", "0.610609", "0.6105911", "0.6105911", "0.6105911", "0.6105911", "0.60807973", "0.60801107", "0.6079685", "0.60737205", "0.6070666", "0.6066526", "0.6051367", "0.60474336", "0.60381484", "0.60340226", "0.60314053", "0.60309833", "0.6020719", "0.6016267", "0.60154957", "0.60154957", "0.60154957", "0.60154957", "0.60154957", "0.60154957", "0.60154957", "0.60154957", "0.60154957", "0.6012934", "0.60012156", "0.59938234", "0.5979477" ]
0.79840356
0
Moves the turtle forward by the given amount.
def forward(self,distance): assert (type(distance) in [int, float]), "parameter distance:%s is not a valid number" % `distance` self._turtle.forward(distance)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def forward(self, amount):\n newX = self._x + round(amount * math.sin(math.radians(self._rotation)), 2)\n newY = self._y - round(amount * math.cos(math.radians(self._rotation)), 2)\n self.goto(newX, newY)", "def advance(self, amount=1):\n self._current += amount\n self.redraw()", "def advance_by(self, amount: float):\n if amount < 0:\n raise ValueError(\"cannot retreat time reference: amount {} < 0\"\n .format(amount))\n self.__delta += amount", "def move_turtle(self):\n self.forward(self.move_speed)", "def move_forward(self, distance):\r\n return self.move('forward', distance)", "def advance(self, amount):\n raise NotImplementedError()", "def forward(self):\n self.position += 1", "def advance(self, amount=1):\n raise NotImplementedError()", "def move_forward(self, distance):\n quad_offset = self.quad_offset_mapping['forward']\n client.moveByVelocityAsync(self.velocity * quad_offset[0], self.velocity * quad_offset[1],\n 0.15, distance/self.velocity).join()\n # if self.logging:\n # self.log_arr.append(\"forward\")", "def advance(self, amount=1):\n self._current += amount\n if self._current - self._updateRate >= self._lastUpdated:\n self.redraw()\n # go to nearest multiple of updateRate less than current\n self._lastUpdated = (self._current // self._updateRate)*self._updateRate", "def move(self,amount):\n self.positionx=self.positionx+self.amount\n return self.positionx", "def advance(self):\n self.amount = self._nextAmount", "def move_forward():\n pass", "def backward(self, amount):\n newX = self._x - round(amount * math.sin(math.radians(self._rotation)), 2)\n newY = self._y + round(amount * math.cos(math.radians(self._rotation)), 2)\n self.goto(newX, newY)", "def step_to(self, direction):\n s = self\n s.steps += s.speed\n s.physics.move_bomberman(self, direction)\n s.update_pos()", "def forward(self, speed):\n self.controller.forward(speed)", "def move_forward(self,length,draw=True):\r\n new_x = self.x + length * math.cos(math.radians(self.angle))\r\n new_y = self.y + length * math.sin(math.radians(self.angle))\r\n self.draw_tool.line(((self.x,self.y),(new_x,new_y)), fill=(0,0,0),width=2)\r\n self.x = new_x\r\n self.y = new_y", "def move(self,amount):\n angle=self.dirction/180*math.pi\n self.postionx += amount*math.cos(angle)\n self.postiony += amount*math.sin(angle)", "def forward(self, step):\r\n x = self.pos_x + math.cos(math.radians(self.rotation)) * step\r\n y = self.pos_y + math.sin(math.radians(self.rotation)) * step\r\n prev_brush_state = self.brush_on\r\n self.brush_on = True\r\n self.move(x, y)\r\n self.brush_on = prev_brush_state", "def move_forward(self, speed):\n\n # Clamp the speed\n speed = clamp(delta_unit(speed), 0, delta_unit(Car.max_speed))\n\n # Appends the speed according to the direction\n rad = np.radians(self.direction)\n self.fx += speed * np.cos(rad)\n self.fy += speed * np.sin(rad)\n\n # Set marker to move\n self.moved = True", "def move_forward(self, dist):\r\n self.send_command_without_response(f'forward {dist}')", "def move_forward(self, val):\n val = val * 180 / math.pi\n print(\"gyro diff\", self.gyro - val)\n print(\"gyrof\", self.gyro)\n if math.fabs(self.gyro - val) > 0.6:\n if self.gyro - val > 0:\n self.om_right = self.om_right - 0.7\n self.om_left = self.om_left + 0.5\n self.set_speed(self.om_left, self.om_right)\n print(\"om_l\", self.om_left)\n print(\"om_r\", self.om_right)\n else:\n self.om_right = self.om_right + 0.3\n self.om_left = self.om_left - 0.5\n self.set_speed(self.om_left, self.om_right)\n print(\"om_l\", self.om_left)\n print(\"om_r\", self.om_right)\n else:\n self.om_right = 10\n self.om_left = 10", "def forward(self, dist):\n start = (self.pos_x, self.pos_y)\n self.pos_x += dist * math.cos(math.radians(self.angle))\n self.pos_y += dist * math.sin(math.radians(self.angle))\n self._update_limits()\n end = (self.pos_x, self.pos_y)\n if self.pen_down:\n self.draw.line([start, end], fill=self.colour, width=self.width)", "def move_by(self, increment):\n return self.move_to(self.position + increment)", "def move_to(self, new_pos, pass_go=True):\r\n new_pos = new_pos % 40\r\n if self.pos > new_pos and pass_go:\r\n self.money += 200\r\n self.pos = new_pos", "def increment(self, amount):\n pass", "def move_by(cls, value):\n cls.set_position(cls._position + value)", "def move_forward(self, steps):\n\t\tif self.movement <= steps:\n\t\t\tif self.heading == 0:\n\t\t\t\tself.grid_y -= steps\n\t\t\telif self.heading == 90:\n\t\t\t\tself.grid_x += steps\n\t\t\telif self.heading == 180:\n\t\t\t\tself.grid_y += steps\n\t\t\telif self.heading == 270:\n\t\t\t\tself.grid_x -= steps", "def move(self):\r\n segments = len(self.all_turtles) - 1\r\n for i in range(len(self.all_turtles)):\r\n if segments == 0:\r\n self.all_turtles[segments].forward(MOVE_DISTANCE)\r\n else:\r\n new_x = self.all_turtles[segments - 1].xcor()\r\n new_y = self.all_turtles[segments - 1].ycor()\r\n self.all_turtles[segments].goto(new_x, new_y)\r\n segments -= 1", "def go_forward(self, distance, speed=0.1):\n while (self._last_odom_msg == None):\n\t rospy.sleep(1.0)\n start = copy.deepcopy(self._last_odom_msg.pose.pose.position)\n rate = rospy.Rate(10)\n while self.distance_fn(self._last_odom_msg.pose.pose.position, start) < math.fabs(distance):\n direction = -1 if distance < 0 else 1\n self.move(direction * speed, 0)\n rate.sleep()", "def advance(self, step=1):\n self.set_progress(self._step + step)", "def forward(self):\n self.cursor.forward()", "def withdraw(self, amount):\n self.balance -= amount\n if self.balance < 10:\n self.balance -= 5\n self.fees += 5", "def move_balls_head_by_distance(self, amount, dx, backward=False):\n for i in range(amount):\n self.move_ball_by_distance(self.balls[i], dx, backward)", "def step_forward(self):", "def forward(self, distance):\n self.logger.debug(\"forward \" + str(distance))", "def backward(self,distance):\n assert (type(distance) in [int, float]), \"parameter distance:%s is not a valid number\" % `distance`\n self._turtle.backward(distance)", "def move(self) -> None:\r\n self._x += self._speed", "def move_forward():\n twister = Twist(linear=Vector3(x=0.5,y=0,z=0),angular=Vector3(x=0,y=0,z=0))\n pub.publish(twister)", "def move_forward(self):\n self.pos += 1\n if self.pos > len(self.text) - 1:\n self.current_char = None\n else:\n self.current_char = self.text[self.pos]", "def advance(self, distance):\n self.cursor += distance", "def increment_speed(self):\n self.speed += 0.0004", "def random_move(turtle, distance):\n angle = uniform(-90,90)\n d = uniform(0,distance)\n turtle.left(angle)\n turtle.forward(d)", "def left(self, amount):\n self.setheading(self._rotation - amount)", "def _move_forward(enemy):\n\t\tBoard.board[enemy.x][enemy.y]=' '\n\t\tenemy.y += Enemy.vel \n\t\tif((enemy.x,(enemy.y)-Enemy.vel) in Board.triangle):\n\t\t\tBoard.board[enemy.x][(enemy.y)-Enemy.vel]='.'\n\n\t\tif(Board.board[enemy.x][enemy.y]=='M'):\n\t\t\tMario.lives -= 1\n\t\t\tif Mario.lives<=0:\n\t\t\t\treturn \"exit\"\n\t\t\tos.system('clear')\n\t\t\tprint(\"\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\t\\t\\t\\t\\t\\tNumber of Mario left:\",Mario.lives)\n\t\t\tMario.respawn(enemy.x,enemy.y)\n\t\t\ttime.sleep(2)\n\n\t\tBoard.board[enemy.x][enemy.y]='@'", "def move(self, distance):\n self._go(distance)", "def move_forward(self, speed):\n\t\t# You should modify the bias of 4 wheels depending on your hardware.\n\t\tself._front_left_wheel.anticlockwise_rotate(speed + LEFT_FR_BIAS + LEFT_RIGHT_BIAS)\n\t\tself._front_right_wheel.clockwise_rotate(speed + RIGHT_FR_BIAS)\n\t\tself._rear_left_wheel.anticlockwise_rotate(speed + LEFT_RIGHT_BIAS)\n\t\tself._rear_right_wheel.clockwise_rotate(speed)", "def forward(self, speed):\n self.pwm_backward.ChangeDutyCycle(0)\n self.pwm_forward.ChangeDutyCycle(speed)", "def forward(self, speed):\n self.pwm_backward.ChangeDutyCycle(0)\n self.pwm_forward.ChangeDutyCycle(speed)", "def step(self):\r\n\r\n self.velocity = 1\r\n new_pos = self.pos\r\n self.model.space.move_agent(self, new_pos)", "def move_head_looking_forward():\n return _move_head(degrees(0))", "def move(self, head, steps):\n self.turn(head)\n if self.direction == 0:\n self.x += int(steps)\n if self.direction == 1:\n self.y += int(steps)\n if self.direction == 2:\n self.x -= int(steps)\n if self.direction == 3:\n self.y -= int(steps)", "def step(self, move):", "def moved_forward(self, distance: \"moves in facing direction\") -> Position:\n newx = self.x + distance * math.cos(self.facing)\n newy = self.y + distance * math.sin(self.facing)\n return Position(newx, newy, self.facing)", "def walkTo(self, x, y, angle):\n self.motionProxy.moveTo(x, y, math.pi*float(angle)/180.0)", "def move(self):\n self.position += self.speed", "def move_forward(self):\n self.x, self.y = self.compute_positions()", "def advance():\n global angle_movement, bullet_distance, fire, time\n time += 1\n angle_movement += angle_step\n if angle_movement >= 360:\n angle_movement -= 360 # So angle doesn't get too large.\n elif angle_movement < 0:\n angle_movement += 360 # So angle doesn't get too small.", "def advance(self, amount):\n right_now = self.rightNow + amount\n self._sortCalls()\n while self.calls and self.calls[0].getTime() <= right_now:\n self.rightNow = self.calls[0].getTime()\n call = self.calls.pop(0)\n call.called = 1\n call.func(*call.args, **call.kw)\n self._sortCalls()\n self.rightNow = right_now", "def _go(self, distance):\n ende = self._position + self._orient * distance\n self._goto(ende)", "def moveStep(self):\n\t\tif self.pos[0] <= self.boundsX[0] or \\\n\t\t(self.pos[0]+ 2*(self.radius)) >= self.boundsX[1]:\n\t\t\tself.dir[0] *= -1\n\t\t\t\n\t\tself.pos[0] += self.dir[0]*self.speed\n\t\tself.pos[1] += self.dir[1]*self.speed", "def move_forward(power):\n message = \"FORWARD:\" + str(power) + '\\n'\n sock.sendall(message)\n return", "def walk(self, dir):\n x, y, theta = dir\n self.motionProxy.moveToward(x, y, theta, [[\"Frequency\", 1]])\n self.isWalking = True", "def step(self):\n all_p = self.amount\n neighbors = self.model.grid.get_neighbors(self.pos, True)\n for n in neighbors:\n all_p += n.amount\n ave_p = all_p / (len(neighbors) + 1)\n\n self._nextAmount = (1 - self.model.evaporate) * \\\n (self.amount + (self.model.diffusion * \\\n (ave_p - self.amount)))\n\n if self._nextAmount < self.model.lowerbound:\n self._nextAmount = 0", "def Advance(self, *, forward: bool = True, amount: int = 1, extend: bool = False):\n i = self.Index\n if forward: i += amount\n else: i -= amount\n\n if i > self.Count:\n if extend:\n for _ in range(amount): self.Append('')\n else: i = self.Count\n elif i < 0: i = 0\n\n self.Index = i", "def forward(self, forward):\n\n self._forward = forward", "def move(self,x,y):\n assert (type(x) in [int, float]), \"parameter x:%s is not a valid number\" % `x`\n assert (type(y) in [int, float]), \"parameter y:%s is not a valid number\" % `y`\n d = self._turtle.isdown()\n if d:\n self._turtle.penup()\n self._turtle.setposition(x,y)\n if d:\n self._turtle.pendown()", "def up(self):\n self.forward(MOVE_DISTANCE)", "def move_fine(self, direction, count=1):\n if self._direction != direction and self.simulate_backlash:\n self._direction = direction\n self._move(direction, count, 1)\n self.backlash_count += 1\n else:\n self._direction = direction\n self._move(direction, count, 1)", "def rotate(self,amount):\n self.angle += amount\n if self.drawn == True:\n self.draw()", "def moveForward(self):\n if self.onGround:\n self.vx = 4", "def move(self, *step):\n self.x += step[0]\n self.y += step[1]", "def move(self, friction = 0.0):\n try:\n newX = self.xcor() + self.dx\n newY = self.ycor() + self.dy\n self.goto(newX, newY)\n # apply friction\n self.dx = self.dx * (1 - friction)\n self.dy = self.dy * (1 - friction)\n except:\n print(\"Error, probably because dx and dy are not properties of the turtle\")", "def _advance(self):\n self._current += 1", "def increase_car_speed(self):\r\n self.car_speed += 5", "def withdraw(self, amount):\n self.balance -= amount", "def give_space(self):\r\n pen.forward(20)", "def move(self, amount):\n self.__validate_index(amount)\n for i in range(amount):\n self.__list = self.__list[1:] + [self.__list[0]]\n return self.__list", "def _advance(self):\n self._prev, self._current = self._current, abs(self._prev - self._current)", "def steer(self, direction):\n\n if -1 <= direction <= 1:\n target_position = self.steering_limit * direction\n self.brick_pi.set_motor_position(\n self.motor_steer, -target_position)", "def win(self, amount: int):\n self.current_balance += amount\n self.current_bet = 0\n return self ## fluent", "def advanceTan():\n global tanBallX, speed\n tanBallX += speed\n if tanBallX <= -4:\n # Reached the bottom - switch directions\n tanBallX = -4\n speed = -speed\n elif tanBallX >= 2.8:\n # Reached the top - switch directions\n tanBallX = 2.8\n speed = -speed", "def jump(distance):\r\n t.penup()\r\n t.forward(200)\r\n t.pendown()\r\n return None", "def _advance(self):\n self._prev, self._current = self._current, self._prev + self._current", "def withdraw(self, amount):\n self.withdrw = amount\n \n if (self.balance-self.withdrw) < 0:\n self.balance = self.balance - 5 - self.withdrw\n self.fee += 5\n else:\n self.balance -= self.withdrw", "def move(self, is_forward):\n wh, lh = self.get_heading\n self.w += wh\n self.l += lh\n if self.get_pos() == blocks['wall']:\n self.w -= wh\n self.l -= lh", "def steer(direction):\n if direction == 1:\n steerMotor.run(Adafruit_MotorHAT.FORWARD)\n steerMotor.setSpeed(255)\n if direction == -1:\n steerMotor.run(Adafruit_MotorHAT.BACKWARD)\n steerMotor.setSpeed(255)\n if direction == 0:\n steerMotor.setSpeed(0)\n steerMotor.run(Adafruit_MotorHAT.RELEASE)", "def accelerateForwards(self,movementSpeed=0.1):\n self.xMomentum+=math.sin(self.faceHeading*(math.pi/180))*movementSpeed\n self.yMomentum+=math.cos(self.faceHeading*(math.pi/180))*movementSpeed", "def walk(self):\n self.speed = self.speed + (0.2 * self.legs)", "def right(self, amount):\n self.setheading(self._rotation + amount)", "def forward_character():\r\n set_point(point()+1)", "def increment_steps(self):\n self.num_steps += 1", "def move_step(self, direction):\n x = self.objects[0].x\n y = self.objects[0].y\n if direction == 0 and y >= 1:\n self.objects[0].y -= 1\n elif direction == 1 and y <= self.size_y - 2:\n self.objects[0].y += 1\n elif direction == 2 and x >= 1:\n self.objects[0].x -= 1\n elif direction == 3 and x <= self.size_x - 2:\n self.objects[0].x += 1", "def goto(x, y):\n turtleTmp.setposition(x, y)", "def go_forward(self):\n command = _build_robovac_command(RobovacModes.GO_FORWARD, RobovacCommands.MOVE)\n message = self._build_command_user_data_message(command)\n\n self._send_packet(message, False)", "def win(self, amount):\n self.balance += amount", "def MoveLeftStep(self):\n if self.facing == 0:\n self.facing = 3\n self.x -= self.stepLeft\n elif self.facing == 1:\n self.facing = 0\n self.y -= self.stepUp\n elif self.facing == 2:\n self.facing = 1\n self.x += self.stepRight\n elif self.facing == 3:\n self.facing = 2\n self.y += self.stepDown", "def moveTurt(t, pole, count):\n x = t.xcor()\n y = t.ycor()\n t.goto(x, y + 10)\n t.goto(pole, y + 10)\n t.goto(pole, y)\n count += 1", "def withdraw(self, amount):\n self.deposit(-amount)", "def incTurn(self):\n self.turnOn = (self.turnOn+1)%self.turns" ]
[ "0.805098", "0.7168281", "0.71475154", "0.7083541", "0.6969918", "0.6827667", "0.682601", "0.6692048", "0.66343707", "0.66327345", "0.6570666", "0.64380103", "0.6395247", "0.6392661", "0.6286539", "0.6284747", "0.6278547", "0.6266692", "0.6242889", "0.6207868", "0.6171793", "0.6100668", "0.60597295", "0.6040921", "0.6037567", "0.6027313", "0.6005516", "0.59614027", "0.5951403", "0.59485054", "0.59431857", "0.593698", "0.59252053", "0.59036005", "0.5871531", "0.5855685", "0.5854489", "0.58294725", "0.5782246", "0.5777254", "0.57689273", "0.5767767", "0.57673764", "0.5761376", "0.57610846", "0.57587206", "0.57389367", "0.57249576", "0.57249576", "0.5722506", "0.57123333", "0.57119703", "0.56977636", "0.5681136", "0.5668791", "0.56520903", "0.5633445", "0.5633195", "0.56317145", "0.56251496", "0.56185687", "0.56105703", "0.5607025", "0.55995864", "0.559843", "0.5598096", "0.5585916", "0.55846363", "0.5571324", "0.55702025", "0.5566344", "0.55603623", "0.5560195", "0.55586845", "0.553071", "0.5520417", "0.5517377", "0.55096936", "0.55081594", "0.55063057", "0.5505561", "0.5499546", "0.5496849", "0.5494943", "0.548978", "0.5476332", "0.54667556", "0.5461389", "0.54557645", "0.5453654", "0.5447849", "0.5443898", "0.5443079", "0.5433661", "0.54312104", "0.5429731", "0.54285747", "0.5424971", "0.5415025", "0.54018617" ]
0.7646355
1
Moves the turtle backward by the given amount.
def backward(self,distance): assert (type(distance) in [int, float]), "parameter distance:%s is not a valid number" % `distance` self._turtle.backward(distance)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def backward(self, amount):\n newX = self._x - round(amount * math.sin(math.radians(self._rotation)), 2)\n newY = self._y + round(amount * math.cos(math.radians(self._rotation)), 2)\n self.goto(newX, newY)", "def move_backward(self, distance):\r\n return self.move('back', distance)", "def move_backward(self, dist):\r\n self.send_command_without_response(f'back {dist}')", "def back(self, distance):\n self._go(-distance)", "def move_backward(self, distance):\n quad_offset = self.quad_offset_mapping['backward']\n client.moveByVelocityAsync(self.velocity * quad_offset[0], self.velocity * quad_offset[1],\n 0.15, distance/self.velocity).join()\n # if self.logging:\n # self.log_arr.append(\"backward\")", "def back(self, step):\r\n self.forward(-step)", "def backward(self, speed):\n self.controller.reverse(speed)", "def down(self, angle):\n self.up(-angle)", "def backward(self, duration):\n self.set_motor(self.left_motor, 'right', 0.5)\n self.set_motor(self.right_motor, 'left', 0.5)\n time.sleep(duration)", "def move_down(self):\n self.y -= 1", "def down(self):\n self.move(0,-1)", "def move_down(self, distance):\r\n return self.move('down', distance)", "def moveDown():\n tt.right(90)\n tt.forward(60)\n tt.right(90)\n tt.forward(250)\n tt.right(180)", "def move_down(self):\n self.move_step(1)", "def lose(self, amount: int):\n self.win(-amount)\n return self ## fluent", "def back(self):\n self.position -= 1", "def move_backward():\n pass", "def move_down(self):\n self.pitch_motor.step_forward()", "def forward(self, amount):\n newX = self._x + round(amount * math.sin(math.radians(self._rotation)), 2)\n newY = self._y - round(amount * math.cos(math.radians(self._rotation)), 2)\n self.goto(newX, newY)", "def go_backward(self):\n command = _build_robovac_command(RobovacModes.GO_BACKWARD, RobovacCommands.MOVE)\n message = self._build_command_user_data_message(command)\n\n self._send_packet(message, False)", "def withdraw(self, amount):\n self.balance -= amount", "def move_lift_down():\n return _move_lift(0.2)", "def move_back(t,n):\n lt(t)\n bk(t, n)\n rt(t)", "def backward(self, speed):\n\n self.pwm_forward.ChangeDutyCycle(0)\n self.pwm_backward.ChangeDutyCycle(speed)", "def backward(self, speed):\n\n self.pwm_forward.ChangeDutyCycle(0)\n self.pwm_backward.ChangeDutyCycle(speed)", "def down(self):\n if self.bottom == self.current:\n return\n else:\n self.current -= 1", "def move_down(self):\n self.move_measurement(1)", "def right_backward(self):\n self.right_motor.run_forever(speed_sp=-self.MAX_SPEED)", "def backward(self, param):\n\t\tif param:\n\t\t\tself.linear_move(-1 * param * .3048)\n\t\telse:\n\t\t\tself.linear_move(-1 * riu.default_dist * .3048)", "def move_down(self):\n client.moveByVelocityAsync(0, 0, -1, 0.3).join()\n # if self.logging:\n # self.log_arr.append(\"down\")", "def backward(self, speed):\n vrep.simxSetJointTargetVelocity(self.client_id, self.handles['rollingJoint_rr' + self.postfix], speed,\n ONE_SHOT_MODE)\n vrep.simxSetJointTargetVelocity(self.client_id, self.handles['rollingJoint_rl' + self.postfix], speed,\n ONE_SHOT_MODE)\n vrep.simxSetJointTargetVelocity(self.client_id, self.handles['rollingJoint_fr' + self.postfix], speed,\n ONE_SHOT_MODE)\n vrep.simxSetJointTargetVelocity(self.client_id, self.handles['rollingJoint_fl' + self.postfix], speed,\n ONE_SHOT_MODE)", "def move_up(self):\n self.move_step(-1)", "def move_down(self):\n\n if self.ycor() < -280:\n self.sety(-300)\n else:\n new_y = self.ycor() - 40\n self.sety(new_y)", "def backward(self, y):\n pass", "def move_down ( self ):\n list, index = self.get_info()\n self.value = (list[:index] + [ list[index+1], list[index] ] + \n list[index+2:])", "def moveBackward(self):\n if self.onGround:\n self.vx = -4", "def right(self, amount):\n self.setheading(self._rotation + amount)", "def _decrease_money(self, amount):\n if 1 in self.money:\n self.money[1] -= amount", "def down(self):\n if self.head.heading() != UP and self.last_direction != UP:\n self.head.setheading(DOWN)", "def move_up(self):\n self.pitch_motor.step_backward()", "def move_backward(power):\n message = \"BACKWARD:\" + str(power) + '\\n'\n sock.sendall(message)\n return", "def _move_backward(enemy):\n\t\tBoard.board[enemy.x][enemy.y]=' '\n\t\tenemy.y -= Enemy.vel\n\t\tif((enemy.x,(enemy.y)+Enemy.vel) in Board.triangle):\n\t\t\tBoard.board[enemy.x][(enemy.y)+Enemy.vel]='.'\n\n\t\tif(Board.board[enemy.x][enemy.y]=='M'):\n\t\t\tMario.lives -= 1\n\t\t\tif Mario.lives<=0:\n\t\t\t\treturn \"exit\"\n\t\t\tos.system('clear')\n\t\t\tprint(\"\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\t\\t\\t\\t\\tNumber of Mario left:\",Mario.lives)\n\t\t\tMario.respawn(enemy.x,enemy.y)\t\t\n\t\t\ttime.sleep(2)\n\n\t\tBoard.board[enemy.x][enemy.y]='@'", "def backward(self, top, propagate_down, bottom):\r\n pass", "def lift_down(self):\n\n # Can't reuse set_lift_pos due to bug above\n bottom_limit = self.get_lift_limit()\n self.send(self.cmd.SET_LIFT_SET, bottom_limit)", "def down():\n turtleTmp.pendown()", "def backward(self, top, propagate_down, bottom):\n pass", "def backward(self, top, propagate_down, bottom):\n pass", "def backward(self, top, propagate_down, bottom):\n pass", "def backward(self, top, propagate_down, bottom):\n pass", "def backward(self, top, propagate_down, bottom):\n pass", "def backward(self, top, propagate_down, bottom):\n pass", "def backward(self, top, propagate_down, bottom):\n pass", "def backward(self, top, propagate_down, bottom):\n pass", "def backward(self, top, propagate_down, bottom):\n pass", "def backward(self, top, propagate_down, bottom):\n pass", "def backward(self, top, propagate_down, bottom):\n pass", "def backward(self, top, propagate_down, bottom):\n pass", "def backward(self, top, propagate_down, bottom):\n pass", "def move_down(self):\n\t\treturn self._move(up=False)", "def withdraw(self, amount):\n self.balance -= amount\n if self.balance < 10:\n self.balance -= 5\n self.fees += 5", "def move_down(self, num=1):\n self.position += num\n if self.moved:\n return self.refresh()\n return u''", "def backward_step():\n #print 'a step backward'\n maze.turn_left()\n maze.turn_left()\n if maze.found():\n return maze.found()\n maze.go()\n maze.turn_left()\n maze.turn_left()", "def advance_by(self, amount: float):\n if amount < 0:\n raise ValueError(\"cannot retreat time reference: amount {} < 0\"\n .format(amount))\n self.__delta += amount", "def move_turtle(self):\n self.forward(self.move_speed)", "def move_back(self):\n\n # slowly drive backwards\n self.velocity = -1 * const.Driving.CAUTIOUS_VELOCITY\n self.angle = const.Driving.NEUTRAL_STEERING_ANGLE\n\n # drive as long there is enough space to the next vehicle or obstacle\n gap = self.formation.calc_gap()\n self.start_driving()\n while self.sensor_manager.rear > gap: continue\n\n self.stop_driving()", "def right_backward(self, state, speed):\n if state:\n self.right_motor.run_forever(speed_sp=-speed)\n ev3.Leds.set_color(ev3.Leds.RIGHT, ev3.Leds.RED)\n else:\n self.right_motor.stop()\n ev3.Leds.set_color(ev3.Leds.RIGHT, ev3.Leds.BLACK)", "def move_down(self):\n return self._move(up=False)", "def backward(self):\n self.units = self._units_history.pop()\n self._backward()\n # We must set the utop to previous state immediately, because the utop could be other gate's input unit\n # And other gate's backward could be called before this gate's backward\n self._utop_history.pop()\n if self._utop_history:\n self.utop = self._utop_history[-1]", "def backward_character():\r\n set_point(point().offset(-1))", "def backward(self, top, propagate_down, bottom):\n pass", "def backward(self, top, propagate_down, bottom):\n pass", "def backward(self, top, propagate_down, bottom):\n pass", "def backward(self, top, propagate_down, bottom):\n\t\tpass", "def bring_down(self):\n\n self.move(self.__min_step__)", "def withdraw(self, amount):\n self.deposit(-amount)", "def ramp_down(self):\n value = self.current_event[\"ramp_down\"][\"value\"]\n self.current_value.append(self.current_value[-1] - value)", "def backtrack(self, amount):\n raise NotImplementedError()", "def backward(self, j=1):\n if j < 0:\n return self.forward(-j)\n assert self.__i - j >= 0, 'Cannot move more than %d back' % self.__i\n self.__i -= j\n return self[self.__i:self.__i + j]", "def reverse(self):\n print(\"Reversing\")\n self.miles -= 5\n return self", "def backward(self):\n #print('backward\\r')\n self.linearVector = Vector3(x=-1.0, y=0.0, z=0.0)\n self.angularVector = Vector3(x=0.0, y=0.0, z=0.0)", "def do_down(self, arg):\r\n moveDirection('down')", "def move_bolt_down(self):\n self.y -= self._velocity", "def up(self):\n self.forward(MOVE_DISTANCE)", "def backward(self, j=1):\n if j < 0:\n return self.forward(-j)\n assert self.__i - j >= 0, 'Cannot move more than %d backward' % self.__i\n self.__i -= j\n return self[self.__i:self.__i+j]", "def backward(self):\n raise NotImplementedError", "def goRight(self, seconds):\n self.change_x = 5", "def forward(self,distance):\n assert (type(distance) in [int, float]), \"parameter distance:%s is not a valid number\" % `distance`\n self._turtle.forward(distance)", "def backward(self, grad, index):\n pass", "def skip_backward(self) -> None:\n self.keyboard.press(Key.left)\n self.keyboard.release(Key.left)", "def move_down():\n return __maze.move_down()", "def advance(self, amount=1):\n self._current += amount\n self.redraw()", "def move_down(self):\n\n next_sibling = self.get_next_sibling()\n if next_sibling!=None: \n self.move_to(next_sibling,'right')\n self.save()", "def backward(self, left_speed, right_speed):\n self.left_motor.run_forever(speed_sp=-left_speed)\n self.right_motor.run_forever(speed_sp=-right_speed)", "def drive_backward(self):\n\n print(f\"{self.make.title()} driving backward.\")", "def decrement_depth(self):\r\n self.depth = self.depth - 1", "def moveturtle(x,y,t):\n t.penup()\n t.goto(x,y)\n t.pendown()", "def withdraw(self, amount):\r\n self.balance = self.balance - amount\r\n self.transactions.append(-amount)\r\n return amount", "def remove_token(self, amount):\n self.M -= amount", "def move_up(self):\n self.move_measurement(-1)", "def deposit(self, amount):\n self.balance += amount" ]
[ "0.82581335", "0.73676413", "0.6859591", "0.6773016", "0.67576206", "0.6722952", "0.67022717", "0.6687509", "0.66345984", "0.6610055", "0.65762097", "0.6467502", "0.6444055", "0.6443134", "0.6412966", "0.6340994", "0.63096267", "0.6303683", "0.62736183", "0.62425214", "0.6214122", "0.61998975", "0.6198537", "0.61974204", "0.61974204", "0.61464936", "0.6139964", "0.61381555", "0.6136587", "0.611242", "0.61084193", "0.60363764", "0.6026732", "0.6015082", "0.60125136", "0.599627", "0.5993865", "0.59813696", "0.5975397", "0.5975075", "0.59728724", "0.59465164", "0.5919298", "0.59177643", "0.59102607", "0.5901992", "0.5901992", "0.5901992", "0.5901992", "0.5901992", "0.5901992", "0.5901992", "0.5901992", "0.5901992", "0.5901992", "0.5901992", "0.5901992", "0.5901992", "0.58978283", "0.5889762", "0.58849615", "0.58738744", "0.5856886", "0.5855043", "0.5847939", "0.58404386", "0.58264685", "0.5824243", "0.5804692", "0.5799413", "0.5799413", "0.5799413", "0.5792132", "0.5782134", "0.57458454", "0.5738802", "0.5736754", "0.5731602", "0.57121515", "0.56909895", "0.568623", "0.5684728", "0.56706357", "0.56627977", "0.5656352", "0.56561536", "0.5635422", "0.56301886", "0.5624485", "0.56179297", "0.56118166", "0.5600184", "0.55920655", "0.5579224", "0.5567051", "0.55593336", "0.555589", "0.5553503", "0.5528116", "0.5506988" ]
0.78231084
1
Turns the turtle to the right by the given amount.
def right(self,degrees): assert (type(degrees) in [int, float]), "parameter degrees:%s is not a valid number" % `distance` self._turtle.right(degrees)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def right(self, amount):\n self.setheading(self._rotation + amount)", "def right(self, angle):\r\n self.dir += math.radians(angle)", "def go_right(self):\n self.change_x = 6\n self.direction = \"R\"", "def turn_right(self):\n temp = self.direction[0]\n self.direction[0] = -self.direction[1]\n self.direction[1] = temp", "def turn_right(self):\n turn = self.__heading - Ship.TURN\n if turn < Ship.MIN_HEADING:\n turn += Ship.MAX_HEADING\n self.__heading = turn", "def right(length, depth):\n turtle.setheading(300)\n turtle.forward(length)\n binary_tree(length / 2, depth - 1)", "def move_right(self, num):\n self.right_position = num", "def move_right(self):\n\n if self.xcor() > 230:\n self.setx(250)\n else:\n new_x = self.xcor() + 40\n self.setx(new_x)", "def go_right(self):\n self.change_x = 6", "def go_right(self):\n self.change_x = 6", "def right(self, angle):\r\n self.rotation += angle", "def move_right(self, distance):\r\n return self.move('right', distance)", "def right(self):\n self.move(1,0)", "def turn_right(self):\n pass", "def goRight(self, seconds):\n self.change_x = 5", "def MoveRightStep(self):\n if self.facing == 0:\n self.facing = 1\n self.x += self.stepLeft\n elif self.facing == 1:\n self.facing = 2\n self.y += self.stepUp\n elif self.facing == 2:\n self.facing = 3\n self.x -= self.stepRight\n elif self.facing == 3:\n self.facing = 0\n self.y -= self.stepDown", "def turn_ship_right(self):\n self.degrees -= movement", "def move_right(self):\n self.rect.x += 5 # Moves to the right by 5\n\n # If the player reaches the edge of the screen, they can't go further\n if self.rect.x >= 580:\n self.rect.x = 580", "def move_right(self,distance):\n self.turn_right()\n self.move_forward(distance)\n # self.log_arr.append(\"right\")", "def turn_right(self):\n self.facing_direction += self.config\n if self.facing_direction > 7:\n self.facing_direction -= 8\n self.x, self.y = self.compute_positions()", "def rotate_turtle(angle, mv_direction):\n \n if mv_direction == 1:\n turtle.right(angle)\n else:\n turtle.left(angle)", "def right(self, angle: Degrees):\n prev = self.angle\n self.angle = (self.angle + angle) % 360.0", "def go_right(self):\n self.rect.centerx += self.__dx", "def right(self, angle):\n self._rotate(-angle)", "def move_right(self, dist):\r\n self.send_command_without_response(f'right {dist}')", "def turn_right(self, speed):\n\t\t# You should modify the bias of 4 wheels depending on your hardware.\n\t\tself._front_left_wheel.anticlockwise_rotate(speed + LEFT_FR_BIAS + LEFT_RIGHT_BIAS)\n\t\tself._front_right_wheel.clockwise_rotate(1 + RIGHT_FR_BIAS)\n\t\tself._rear_left_wheel.anticlockwise_rotate(speed + LEFT_RIGHT_BIAS)\n\t\tself._rear_right_wheel.clockwise_rotate(1)", "def turn_right(self, angle_degrees, rate=RATE):\n action = TurnRight(angle_degrees, rate=rate)\n goal = PositionControlGoal(pickle.dumps(action))\n self._add_mc_goal(goal)", "def move_right(self):\n\t\tself.set_x_vector(constants.DONKEY_SPEED)", "def move_right(self):\r\n self.left += self.__speed", "def move_right(self):\n self.yaw_motor.step_forward()", "def move_turtle(self):\n self.forward(self.move_speed)", "def right():\n Robot.rotate(\"RIGHT\")", "def random_move(turtle, distance):\n angle = uniform(-90,90)\n d = uniform(0,distance)\n turtle.left(angle)\n turtle.forward(d)", "def turn_right(self, duration):\n self.set_motor(self.left_motor, 'left', 0.5)\n self.set_motor(self.right_motor, 'left', 0.5)\n time.sleep(duration)", "def move_right(self):\n if self.change_valid(dx=1):\n self.x += 1", "def go_right(self):\n self.rect.centerx += 9", "def spin_right(self, speed):\n self.controller.spin_right(speed)", "def rotate_right(self, angle, maze, game_display):\n for _ in range(angle):\n self.rotate(maze=maze, direction=1, game_display=game_display)", "def turn_right(self):\n self.direction_mod_offset += 1\n self.calculate_offset_mapping()\n direction_num = self.direction_mod_offset % len(self.direction_arr)\n client.rotateToYawAsync(direction_num * 90).join()", "def set_right(self, spd):\n self.r_motor.set(spd)", "def turnRight(ev3):\n ev3.set_angle(\"A\", \"30\", \"90\")\n ev3.set_angle(\"B\", \"-30\", \"-90\")\n ev3.set_angle(\"C\", \"30\", \"90\")", "def rotate_right(self, speed):\n\t\t# You should modify the bias of 4 wheels depending on your hardware.\n\t\tself._front_left_wheel.anticlockwise_rotate(speed + LEFT_FR_BIAS + LEFT_RIGHT_BIAS)\n\t\tself._front_right_wheel.anticlockwise_rotate(speed + RIGHT_FR_BIAS)\n\t\tself._rear_left_wheel.anticlockwise_rotate(speed + 1 + LEFT_RIGHT_BIAS)\n\t\tself._rear_right_wheel.anticlockwise_rotate(speed)", "def turn(self, dir):\n if dir.upper() == 'R':\n if self.direction == 3:\n self.direction = 0\n else:\n self.direction += 1\n if dir.upper() == 'L':\n if self.direction == 0:\n self.direction = 3\n else:\n self.direction -= 1", "def draw_flower():\n turtle.right(45)\n draw_petal()\n turtle.right(90)\n draw_petal()\n turtle.right(90)\n draw_petal()\n turtle.right(90)\n draw_petal()\n turtle.right(135)\n turtle.forward(150)", "def _move_right(self):\n self.x += self.settings.mario_speed\n if self.settings.direction == -1:\n self.image = pygame.transform.flip(self.image, True, False)\n self.settings.direction = 1", "def left(self, amount):\n self.setheading(self._rotation - amount)", "def rotateRight(self):\n self.faceHeading+=-1*shipRotationSpeed\n self.reDraw()", "def repositionTurtle(t, x, y):\n t.up()\n t.goto(x, y)\n t.down()", "def move_right(self):\r\n if self.rect.right < BG_WIDTH:\r\n self.rect.right += self.speed", "def right(self, angle):\n self.matrix = matrixMultiply(yawMatrix(angle), self.matrix)\n self.directionOut()\n self.delay()", "def turn(self, is_right):\n if is_right:\n self.heading.rotate(1)\n else:\n self.heading.rotate(-1)", "def backward(self, amount):\n newX = self._x - round(amount * math.sin(math.radians(self._rotation)), 2)\n newY = self._y + round(amount * math.cos(math.radians(self._rotation)), 2)\n self.goto(newX, newY)", "def right(self):\n if self.head.heading() != LEFT and self.last_direction != LEFT:\n self.head.setheading(RIGHT)", "def rotate(self,amount):\n self.angle += amount\n if self.drawn == True:\n self.draw()", "def settle(self):\n if (self.angle >= self.max_angle) or (\n self.angle <= -self.max_angle\n ): # time to reverse\n print(\"reverse\", self.angle, self.max_angle)\n self.speed *= -0.9 # damped\n self.max_angle *= 0.9\n if self.speed > 0:\n self.angle = self.max_angle\n else:\n self.angle = -self.max_angle\n\n self.angle += radians(self.speed)\n print(self.angle, self.max_angle, self.speed)\n self.x = self.cx + self.length * sin(self.angle)\n self.y = self.cy + self.length * cos(self.angle)", "def right(self, speed):\n self.controller.front_left_forward(speed)\n self.controller.front_right_backward(speed)\n self.controller.rear_left_backward(speed)\n self.controller.rear_right_forward(speed)", "def rotate_right(self):\n current = compass.index(self.heading)\n return replace(self, heading=compass[(current + 1) % 4])", "def MoveBasicRight(self):\n if self.facing == 0:\n self.facing = 1\n self.x += 1\n elif self.facing == 1:\n self.facing = 2\n self.y += 1\n elif self.facing == 2:\n self.facing = 3\n self.x -= 1\n elif self.facing == 3:\n self.facing = 0\n self.y -= 1", "def moveturtle(x,y,t):\n t.penup()\n t.goto(x,y)\n t.pendown()", "def move_right(self, step: int = 1) -> None:\n if self.cursor_pos.y < self.width - 1:\n self.cursor_pos = Point(self.cursor_pos.x, self.cursor_pos.y+step)\n else:\n self.cursor_pos = Point(self.cursor_pos.x, 0)", "def moveDown():\n tt.right(90)\n tt.forward(60)\n tt.right(90)\n tt.forward(250)\n tt.right(180)", "def move_right():\n return __maze.move_right()", "def spin_right(self, speed, degrees):\n print('turn right')\n self.robot.drive_system.right_motor.turn_on(speed)\n self.robot.drive_system.left_motor.turn_on(-speed)\n while True:\n print('in while loop')\n if self.robot.drive_system.right_motor.get_position() / 5.5 <= \\\n -degrees:\n self.robot.drive_system.right_motor.turn_off()\n self.robot.drive_system.left_motor.turn_off()\n self.robot.drive_system.right_motor.reset_position()\n break", "def forward(self, amount):\n newX = self._x + round(amount * math.sin(math.radians(self._rotation)), 2)\n newY = self._y - round(amount * math.cos(math.radians(self._rotation)), 2)\n self.goto(newX, newY)", "def advance(self, amount=1):\n self._current += amount\n self.redraw()", "def right(self, speed):\n vrep.simxSetJointTargetVelocity(self.client_id, self.handles['rollingJoint_rr' + self.postfix], speed,\n ONE_SHOT_MODE)\n vrep.simxSetJointTargetVelocity(self.client_id, self.handles['rollingJoint_rl' + self.postfix], -speed,\n ONE_SHOT_MODE)\n vrep.simxSetJointTargetVelocity(self.client_id, self.handles['rollingJoint_fr' + self.postfix], speed,\n ONE_SHOT_MODE)\n vrep.simxSetJointTargetVelocity(self.client_id, self.handles['rollingJoint_fl' + self.postfix], -speed,\n ONE_SHOT_MODE)", "def right(self, speed=1):\n self.left_motor.forward(speed)\n self.right_motor.backward(speed)", "def rotate_right(self, times: int):\n for i in range(0, times):\n new_rows = [''] * self.side\n self.tile_rows.reverse()\n\n for row in self.tile_rows:\n for i, ch in enumerate(row):\n new_rows[i] += ch\n\n self.tile_rows = new_rows", "def do_right_turn(robot_name):\n global current_direction_index\n\n current_direction_index += 1\n if current_direction_index > 3:\n current_direction_index = 0\n\n return True, ' > '+robot_name+' turned right.'", "def changeLaneRight(self, speed, accel):\n self.changeLane(speed, accel, 44.5)", "def right(self, speed):\n self.pwm_left.ChangeDutyCycle(0)\n self.pwm_right.ChangeDutyCycle(speed)", "def right():\n global x, canvas # x é modificado\n canvas.create_line(x, y, x + 10, y)\n x += 10", "def lose(self, amount: int):\n self.win(-amount)\n return self ## fluent", "def forward_right(self, speed):\n self.pwm_backward.ChangeDutyCycle(0)\n self.pwm_forward.ChangeDutyCycle(speed)\n self.pwm_left.ChangeDutyCycle(0)\n self.pwm_right.ChangeDutyCycle(100)", "def draw_petal():\n turtle.forward(30)\n turtle.left(45)\n turtle.forward(30)\n turtle.left(135)\n turtle.forward(30)\n turtle.left(45)\n turtle.forward(30)\n turtle.left(135)", "def draw_petal():\n turtle.forward(30)\n turtle.left(45)\n turtle.forward(30)\n turtle.left(135)\n turtle.forward(30)\n turtle.left(45)\n turtle.forward(30)\n turtle.left(135)", "def turn_right():\n for i in range(3):\n turn_left()", "def move_rectangle(r,dx,dy):\n\n r.corner.x=r.corner.x+dx\n r.corner.y=r.corner.y+dy\n turtle.setx(r.corner.x)\n turtle.sety(r.corner.y)\n for i in range(2):\n turtle.fd(r.width)\n turtle.lt(90)\n turtle.fd(r.height)\n turtle.lt(90)\n return r", "def right(self):\n self.counterUp(teamNumber = 2)", "def walk_right(self, sound): \n \n # Checks if tile to the right of the Player is free, and if they are not in an animation cycle\n if (self.__maze_arrangement[self.__user_x + 1][self.__user_y] != 1) and not self.__animating:\n \n # Sets Player direction to right, animating state to true, moves the Player to the\n # right by one tile, and plays the walking sound effect \n self.__direction = \"RIGHT\"\n self.__animating = True\n self.__user_x += 1\n sound.play()", "def withdraw(self, amount):\n self.balance -= amount\n if self.balance < 10:\n self.balance -= 5\n self.fees += 5", "def right(event):\n if event.action == sense_hat.ACTION_RELEASED:\n snake.changeDirection(RIGHT)", "def rotate_right(self):\n\t\ttemp = self.left\n\t\tself.left = temp.right\n\t\ttemp.right = self\n\t\tself = temp", "def right_forward(self, state, speed):\n if state:\n self.right_motor.run_forever(speed_sp=speed)\n ev3.Leds.set_color(ev3.Leds.RIGHT, ev3.Leds.GREEN)\n else:\n self.right_motor.stop()\n ev3.Leds.set_color(ev3.Leds.RIGHT, ev3.Leds.BLACK)", "def advance_by(self, amount: float):\n if amount < 0:\n raise ValueError(\"cannot retreat time reference: amount {} < 0\"\n .format(amount))\n self.__delta += amount", "def move(self,amount):\n self.positionx=self.positionx+self.amount\n return self.positionx", "def right_twist(self):\n self.turn_by_deg(180)\n #time.sleep(.1)\n self.stop()\n self.turn_by_deg(180)\n #time.sleep(.1)\n self.stop()", "def moveRight(self):\n if self._position.x != 14:\n self._position.x +=1\n return True\n return False", "def rotate_subtree_right(subtree):\n left = subtree.left\n subtree.left = left.right\n left.right = subtree\n left.colour = subtree.colour\n subtree.colour = True # set red\n left.size = subtree.size\n subtree.size = size_node(subtree.left) + size_node(subtree.right) + 1\n return left", "def TransformRightMovement(field):\n i = 0\n side = int(math.sqrt(len(field)))\n while i < len(field):\n j = (i + side) - 1\n line = []\n for x in range(j, i - 1, -1):\n line.append(field[x])\n line = move(line)\n k = 0\n for x in range(j, i - 1, -1):\n field[x] = line[k]\n k = k + 1\n i = i + side\n return field", "def draw_square():\r\n turtle.forward(100)\r\n turtle.left(90)\r\n turtle.forward(100)\r\n turtle.left(90)\r\n turtle.forward(100)\r\n turtle.left(90)\r\n turtle.forward(100)\r\n turtle.left(90)", "def desenha_quadrado(lado):\n for i in range(4):\n turtle.forward(lado)\n turtle.left(90)", "def increase_right_boundary(self):\n self.R = self.R + 1.0\n self.Ne = self.Ne + 1", "def right_turn(self, left_speed, right_speed):\n self.left_motor.run_forever(speed_sp=left_speed)\n self.right_motor.run_forever(speed_sp=-right_speed)", "def move(self,amount):\n angle=self.dirction/180*math.pi\n self.postionx += amount*math.cos(angle)\n self.postiony += amount*math.sin(angle)", "def wandering_turtle():\n u = turtle.Turtle()\n u.shape(\"turtle\")\n u.color(\"green\")\n t.color(\"red\")\n for i in [t, u]:\n i.penup()\n i.setpos(random.randrange(-300,300), random.randrange(-300,300))\n i.pendown()\n\n while True:\n for t1, t2 in [(t, u), (u, t)]:\n coin = random.randrange(2)\n angle = random.randrange(360)\n if coin:\n t1.left(angle)\n else:\n t1.right(angle)\n t1.forward(50)\n if t1.distance(0,0) > 390 or t1.distance(t2) < 25:\n t1.setpos(0,0)", "def right(self):\r\n z = len(direction_tuple)\r\n if self.d in direction_tuple:\r\n index = direction_tuple.index(self.d)\r\n if index == (z-1):\r\n self.d = direction_tuple[0]\r\n else:\r\n self.d = direction_tuple[index + 1]\r\n else:\r\n print(\"NO VALID ROBOT POSITION\")", "def player_right(self) -> None:\n self._routes[self._current_route_key][\"RIGHT\"] += 1\n new_pos = self._player.x + self.MOVE_INC\n if new_pos + self.PLAYER_DIM <= self._height and new_pos - self.PLAYER_DIM >= 0:\n self._player.x = new_pos", "def addRightSpeed(self, newSpeed):\n ns = self.rightWeelSpeed + newSpeed\n ns = min(ns, self.maxSpeed)\n ns = max(ns, self.minSpeed)\n e = vrep.simxSetJointTargetVelocity(self.clientID, self.rightMotor, ns, vrep.simx_opmode_oneshot_wait)\n self.erCheck(e, 'rightMotor')", "def forward(self,distance):\n assert (type(distance) in [int, float]), \"parameter distance:%s is not a valid number\" % `distance`\n self._turtle.forward(distance)" ]
[ "0.75367314", "0.690994", "0.6860483", "0.6700921", "0.6642686", "0.6610772", "0.6585941", "0.65618926", "0.63780653", "0.63780653", "0.63710123", "0.6350068", "0.63473946", "0.6344162", "0.62829196", "0.6280385", "0.6238296", "0.6236012", "0.62327635", "0.6231904", "0.6229835", "0.6215596", "0.6164251", "0.61625475", "0.6147705", "0.6135492", "0.6117227", "0.61111283", "0.60968626", "0.6096521", "0.60622424", "0.6021712", "0.6021679", "0.6021483", "0.602103", "0.602049", "0.600516", "0.59648585", "0.59219116", "0.58735764", "0.5834272", "0.58232105", "0.57985705", "0.5774613", "0.5763594", "0.5731606", "0.5724345", "0.5710923", "0.56900656", "0.56834596", "0.5677833", "0.56741875", "0.5673021", "0.56421506", "0.5640812", "0.5622429", "0.5618946", "0.56154203", "0.5599685", "0.5577186", "0.5565558", "0.5557873", "0.55520743", "0.5543178", "0.55368507", "0.553334", "0.5523504", "0.55190337", "0.5517918", "0.55086267", "0.5499735", "0.5489308", "0.5487189", "0.5477591", "0.54726404", "0.54726404", "0.5469473", "0.54652715", "0.5450494", "0.5432583", "0.5430004", "0.54038614", "0.5387192", "0.53696203", "0.53617144", "0.53537726", "0.5353534", "0.5353415", "0.5349199", "0.53399926", "0.53344756", "0.53307533", "0.53202087", "0.53187764", "0.5317227", "0.5308187", "0.53016585", "0.5286566", "0.5282259", "0.52815175" ]
0.66845566
4
Turns the turtle to the left by the given amount.
def left(self,degrees): assert (type(degrees) in [int, float]), "parameter degrees:%s is not a valid number" % `distance` self._turtle.left(degrees)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def left(self, amount):\n self.setheading(self._rotation - amount)", "def move_left(self):\n\n if self.xcor() < -230:\n self.setx(-255)\n else:\n new_x = self.xcor() - 40\n self.setx(new_x)", "def go_left(self):\n self.change_x = -6\n self.direction = \"L\"", "def turn_left(self):\n\t\tself.direction = (self.direction - 1)%4", "def left(self, angle):\r\n self.dir -= math.radians(angle)", "def right(self, amount):\n self.setheading(self._rotation + amount)", "def move_left(self, num):\n self.left_postion = num", "def left(self, angle):\n self.right(-angle)", "def turn_left(self):\n temp = self.direction[0]\n self.direction[0] = self.direction[1]\n self.direction[1] = -temp", "def go_left(self):\n self.change_x = -6", "def go_left(self):\n self.change_x = -6", "def left(length, depth):\n turtle.setheading(240)\n turtle.forward(length)\n binary_tree(length / 2, depth - 1)", "def go_left(self):\n self.rect.centerx -= self.__dx", "def move_left(self):\n self.rect.x -= 5 # Moves to the left by 5\n\n # If the player reaches the edge of the screen, they can't go further\n if self.rect.x <= -50:\n self.rect.x = -50", "def turn_left(self):\n turn = self.__heading + Ship.TURN\n if turn >= Ship.MAX_HEADING:\n turn -= Ship.MAX_HEADING\n self.__heading = turn", "def go_left(self):\n self.rect.centerx -= 9", "def MoveLeftStep(self):\n if self.facing == 0:\n self.facing = 3\n self.x -= self.stepLeft\n elif self.facing == 1:\n self.facing = 0\n self.y -= self.stepUp\n elif self.facing == 2:\n self.facing = 1\n self.x += self.stepRight\n elif self.facing == 3:\n self.facing = 2\n self.y += self.stepDown", "def left(self):\n self.move(-1, 0)", "def move_left(self, distance):\r\n return self.move('left', distance)", "def left(self):\n if self.head.heading() != RIGHT and self.last_direction != RIGHT:\n self.head.setheading(LEFT)", "def move_left(self):\n\t\tself.set_x_vector(-1 * constants.DONKEY_SPEED)", "def left(self):\n if self.pos > 0:\n self.pos -= 1", "def goLeft(self, seconds):\n self.change_x = -5", "def move(self,amount):\n self.positionx=self.positionx+self.amount\n return self.positionx", "def move_left(self):\n if self.change_valid(dx=-1):\n self.x -= 1", "def move_left(self):\r\n if self.rect.left > 0:\r\n self.rect.left -= self.speed", "def turn_ship_left(self):\n self.degrees += movement", "def left(self, angle):\r\n self.rotation -= angle", "def left(self, angle: Degrees):\n prev = self.angle\n self.angle = self.angle - angle\n if self.angle < 0:\n self.angle += 360.0", "def move_left(self,distance):\n self.turn_left()\n self.move_forward(distance)\n # self.log_arr.append(\"left\")", "def left(self, angle):\n self._rotate(angle)", "def move_right(self):\n\n if self.xcor() > 230:\n self.setx(250)\n else:\n new_x = self.xcor() + 40\n self.setx(new_x)", "def steerleft(self):\n self.direction = self.direction+self.steering\n if self.direction > 360:\n self.direction = 0+90\n self.image, self.rect = rot_center(self.image_orig,self.rect,self.direction)", "def forward(self, amount):\n newX = self._x + round(amount * math.sin(math.radians(self._rotation)), 2)\n newY = self._y - round(amount * math.cos(math.radians(self._rotation)), 2)\n self.goto(newX, newY)", "def turn_left(self):\n self.facing_direction -= self.config\n if self.facing_direction < 0:\n self.facing_direction += 8\n self.x, self.y = self.compute_positions()", "def turn_left(self, duration):\n self.set_motor(self.left_motor, 'right', 0.5)\n self.set_motor(self.right_motor, 'right', 0.5)\n time.sleep(duration)", "def go_right(self):\n self.change_x = 6", "def go_right(self):\n self.change_x = 6", "def set_left(self, spd):\n self.l_motor.set(-spd)", "def move_left(self, step: int = 1) -> None:\n if self.cursor_pos.y == 0:\n self.cursor_pos = Point(self.cursor_pos.x, self.width-step)\n else:\n self.cursor_pos = Point(self.cursor_pos.x, self.cursor_pos.y-step)", "def move_left(self):\n self.yaw_motor.step_backward()", "def left():\n Robot.rotate(\"LEFT\")", "def random_move(turtle, distance):\n angle = uniform(-90,90)\n d = uniform(0,distance)\n turtle.left(angle)\n turtle.forward(d)", "def go_right(self):\n self.change_x = 6\n self.direction = \"R\"", "def move_left(self, dist):\r\n self.send_command_without_response(f'left {dist}')", "def turn_left(self): #py:UR.turn_left\n RUR._UR.turn_left_(self.body)", "def rotate_left(self, angle, maze, game_display):\n for _ in range(angle):\n self.rotate(maze=maze, direction=-1, game_display=game_display)", "def left():\n global x, canvas # x é modificado\n canvas.create_line(x, y, x - 10, y)\n x -= 10", "def move_turtle(self):\n self.forward(self.move_speed)", "def turn_left(self, speed):\n\t\t# You should modify the bias of 4 wheels depending on your hardware.\n\t\tself._front_left_wheel.anticlockwise_rotate(1 + LEFT_FR_BIAS + LEFT_RIGHT_BIAS)\n\t\tself._front_right_wheel.clockwise_rotate(speed + RIGHT_FR_BIAS)\n\t\tself._rear_left_wheel.anticlockwise_rotate(1 + LEFT_RIGHT_BIAS)\n\t\tself._rear_right_wheel.clockwise_rotate(speed)", "def spin_left(self, speed):\n self.controller.spin_left(speed)", "def right(self):\n self.move(1,0)", "def go_right(self):\n self.rect.centerx += self.__dx", "def rotate_left(self):\n if self.change_valid(dr=-1):\n self.rotate = (self.rotate-1)%4", "def turn_left(self, angle_degrees, rate=RATE):\n action = TurnLeft(angle_degrees, rate=rate)\n goal = PositionControlGoal(pickle.dumps(action))\n self._add_mc_goal(goal)", "def right(self, angle):\r\n self.dir += math.radians(angle)", "def go_right(self):\n self.rect.centerx += 9", "def _move_left(self):\n self.x -= self.settings.mario_speed\n if self.settings.direction == 1:\n self.image = pygame.transform.flip(self.image, True, False)\n self.settings.direction = -1", "def move_left():\n return __maze.move_left()", "def turnLeft(ev3):\n ev3.set_angle(\"A\", \"-30\", \"-90\")\n ev3.set_angle(\"B\", \"30\", \"90\")\n ev3.set_angle(\"C\", \"-30\", \"-90\")", "def spin_left(self, speed, degrees):\n print('turn left')\n self.robot.drive_system.right_motor.turn_on(-speed)\n self.robot.drive_system.left_motor.turn_on((speed))\n while True:\n if self.robot.drive_system.right_motor.get_position() / 5.5 >= \\\n degrees:\n self.robot.drive_system.right_motor.turn_off()\n self.robot.drive_system.left_motor.turn_off()\n self.robot.drive_system.right_motor.reset_position()\n break", "def turn_left(self):\n self.direction_mod_offset -= 1\n self.calculate_offset_mapping()\n direction_num = self.direction_mod_offset % len(self.direction_arr)\n client.rotateToYawAsync(direction_num * 90).join()", "def turn_left(self):\n pass", "def move_right(self, num):\n self.right_position = num", "def rotate_turtle(angle, mv_direction):\n \n if mv_direction == 1:\n turtle.right(angle)\n else:\n turtle.left(angle)", "def rotate_left(self, speed):\n\t\t# You should modify the bias of 4 wheels depending on your hardware.\n\t\tself._front_left_wheel.clockwise_rotate(speed + LEFT_FR_BIAS + LEFT_RIGHT_BIAS)\n\t\tself._front_right_wheel.clockwise_rotate(speed + RIGHT_FR_BIAS)\n\t\tself._rear_left_wheel.clockwise_rotate(speed + 1 + LEFT_RIGHT_BIAS)\n\t\tself._rear_right_wheel.clockwise_rotate(speed)", "def shift_display(self, amount):\n if amount == 0:\n return\n direction = self.LCD_MOVERIGHT if amount > 0 else self.LCD_MOVELEFT\n for i in range(abs(amount)):\n self.command(self.LCD_CURSORSHIFT | self.LCD_DISPLAYMOVE | direction)\n self._usleep(50)", "def rotate_left(self):\n\t\ttemp = self.right\n\t\tself.right = temp.left\n\t\ttemp.left = self\n\t\tself = temp", "def turn_left(): #py:turn_left\n RUR._turn_left_()", "def move_left(state: State) -> State:\n assert state.index >= 0\n if not state.index:\n return state\n\n assert state.index > 0\n return state._replace(index=state.index - 1)", "def rotateLeft(self):\n self.faceHeading+=shipRotationSpeed\n self.reDraw()", "def rotate_left(self, x=None):\n if x is None:\n x = self.root\n assert x.right is not self.nil, \"Cannot rotate any further left\"\n\n # Right node replaces x as the top node\n y = x.right\n\n # Move new top node's child under old top node.\n x.right = y.left\n if x.right is not self.nil:\n x.right.parent = x\n\n # Set new node's parent and fix child relationships.\n y.parent = x.parent\n if y.parent is self.nil:\n self.root = y\n elif x is y.parent.left:\n y.parent.left = y\n else:\n y.parent.right = y\n\n # Old node moves left of new node.\n y.left = x\n x.parent = y\n\n # Update the balance.\n x.balance += 1 - min(y.balance, 0)\n y.balance += 1 + max(x.balance, 0)", "def left(self):\r\n if self.d in direction_tuple:\r\n index = direction_tuple.index(self.d)\r\n if index == 0:\r\n self.d = direction_tuple[3]\r\n else:\r\n self.d = direction_tuple[index - 1]\r\n else:\r\n print(\"NO VALID ROBOT POSITION\")", "def goRight(self, seconds):\n self.change_x = 5", "def left_steering(measurement):\n measurement = (measurement + CORRECTION_FACTOR)\n return measurement", "def move_left_everything(equation):\n\tright_flipped = ''\n\ttwo_sides = equation.split('=')\n\tassert len(two_sides) == 2, \"\"\"\\033[1;91mERROR: Equation must have single '=' sign\\033[0m\"\"\"\n\tassert two_sides[0] and two_sides[1], \"\"\"\\033[1;91mERROR: Equation left and right sides must be non-empty\\033[0m\"\"\"\n\tif two_sides[1] != '0':\n\t\tright_flipped = two_sides[1].replace('-', '%temp%').replace('+', '-').replace('%temp%', '+')\n\tif right_flipped and not right_flipped[0] in ['-', '+']:\n\t\tright_flipped = '-' + right_flipped\n\treturn two_sides[0] + right_flipped", "def advance(self, amount=1):\n self._current += amount\n self.redraw()", "def left_twist(self):\n self.turn_by_deg(-179)\n #time.sleep(.1)\n self.stop()\n self.turn_by_deg(-179)\n #time.sleep(.1)\n self.stop()", "def shift_display(self, amount):\n if amount == 0:\n return\n direction = _LCD_MOVERIGHT if amount > 0 else _LCD_MOVELEFT\n for i in range(abs(amount)):\n self.command(_LCD_CURSORSHIFT | _LCD_DISPLAYMOVE | direction)\n time.sleep(50*MICROSECOND)", "def rotate_left(self):\n assert self.right.color == RED\n x = self.right\n self.right = x.left\n\n x.parent = self.parent\n self.parent = x\n\n if x.left:\n x.left.parent = self\n x.left = self\n x.color = self.color\n self.color = RED\n return x", "def move_item_left(self, event=None, index=None):\n if index is None:\n index = self.selected_index\n if index <= 0:\n return wx.Bell()\n level = self.item_level(index)\n if level==0 or ( index+1 < self.items.GetItemCount() and (level < self.item_level(index+1)) ):\n return wx.Bell()\n label = self._get_item_text(index, \"label\")\n if level==1 and label.endswith(\"---\"):\n return wx.Bell()\n level -= 1\n self._set_item_string(index, \"label\", label[4:])\n self._set_item_string(index, \"level\", level)\n self.items.SetItemState(index, wx.LIST_STATE_SELECTED, wx.LIST_STATE_SELECTED)\n self._enable_buttons()", "def backward(self, amount):\n newX = self._x - round(amount * math.sin(math.radians(self._rotation)), 2)\n newY = self._y + round(amount * math.cos(math.radians(self._rotation)), 2)\n self.goto(newX, newY)", "def left(self, speed=1):\n self.right_motor.forward(speed)\n self.left_motor.backward(speed)", "def move_right(self):\r\n self.left += self.__speed", "def right(length, depth):\n turtle.setheading(300)\n turtle.forward(length)\n binary_tree(length / 2, depth - 1)", "def changeLaneLeft(self, speed, accel):\n self.changeLane(speed, accel, -44.5)", "def moveLeft(self):\n if self._position.x != 0:\n self._position.x -=1\n return True\n return False", "def do_left_turn(robot_name):\n global current_direction_index\n\n current_direction_index -= 1\n if current_direction_index < 0:\n current_direction_index = 3\n\n return True, ' > '+robot_name+' turned left.'", "def pos_left(self, x=1):\n\n self.x -= x\n return self.pos(self.x, self.y)", "def right(self,degrees):\n assert (type(degrees) in [int, float]), \"parameter degrees:%s is not a valid number\" % `distance`\n self._turtle.right(degrees)", "def move_right(self):\n\t\tself.set_x_vector(constants.DONKEY_SPEED)", "def turn_around():\n for i in range(2):\n turn_left()", "def left(self, speed):\n self.controller.front_left_backward(speed)\n self.controller.front_right_forward(speed)\n self.controller.rear_left_forward(speed)\n self.controller.rear_left_backward(speed)", "def move_right(self):\n if self.change_valid(dx=1):\n self.x += 1", "def turn_right(self):\n turn = self.__heading - Ship.TURN\n if turn < Ship.MIN_HEADING:\n turn += Ship.MAX_HEADING\n self.__heading = turn", "def lose(self, amount: int):\n self.win(-amount)\n return self ## fluent", "def assign_leftLimit():\r\n player.rect.x = 25", "def advance_by(self, amount: float):\n if amount < 0:\n raise ValueError(\"cannot retreat time reference: amount {} < 0\"\n .format(amount))\n self.__delta += amount", "def rotate_right_left(self):\n\t\treturn", "def left_forward(self):\n self.left_motor.run_forever(speed_sp=self.MAX_SPEED)" ]
[ "0.780476", "0.70262265", "0.69437015", "0.6939617", "0.6917791", "0.6892099", "0.68578196", "0.6803361", "0.6797746", "0.677668", "0.677668", "0.6751713", "0.67103636", "0.6688399", "0.6688173", "0.66532737", "0.6642124", "0.6549695", "0.652959", "0.6492968", "0.64817894", "0.6465483", "0.64437836", "0.6425751", "0.63424635", "0.6342407", "0.6326384", "0.63019717", "0.62996495", "0.6286133", "0.6253161", "0.62526786", "0.6249394", "0.6220351", "0.62113076", "0.61982805", "0.6169637", "0.6169637", "0.6169613", "0.61694646", "0.614513", "0.61443704", "0.61363214", "0.613251", "0.6130215", "0.6121911", "0.6107866", "0.6103186", "0.60925996", "0.60675865", "0.6049159", "0.6032068", "0.60007876", "0.5987635", "0.59864765", "0.59562", "0.59527254", "0.5939718", "0.5926897", "0.5925081", "0.58920085", "0.5886692", "0.58828396", "0.5871419", "0.5858889", "0.5856404", "0.5853535", "0.58469343", "0.5841406", "0.58384734", "0.58169687", "0.58016294", "0.57878745", "0.57842034", "0.57807636", "0.5779392", "0.575767", "0.57295376", "0.57170147", "0.57160354", "0.5699357", "0.56978375", "0.5689119", "0.56780934", "0.5663358", "0.56507677", "0.5635726", "0.563076", "0.56235105", "0.56208843", "0.56114537", "0.5608399", "0.56041384", "0.56019473", "0.5601885", "0.5599334", "0.559927", "0.5593335", "0.5583412", "0.55805767" ]
0.69226944
4
Moves the turtle to given position without drawing.
def move(self,x,y): assert (type(x) in [int, float]), "parameter x:%s is not a valid number" % `x` assert (type(y) in [int, float]), "parameter y:%s is not a valid number" % `y` d = self._turtle.isdown() if d: self._turtle.penup() self._turtle.setposition(x,y) if d: self._turtle.pendown()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def goto(x, y):\n turtleTmp.setposition(x, y)", "def repositionTurtle(t, x, y):\n t.up()\n t.goto(x, y)\n t.down()", "def move_turtle(self):\n self.forward(self.move_speed)", "def move_turtle(self, x, y):\n tortuga = self.turtle\n if self.capture_mode:\n tortuga.setheading(tortuga.towards(x, y))\n tortuga.setpos(x, y)\n self.add_punto(Punto(x, y))", "def moveturtle(x,y,t):\n t.penup()\n t.goto(x,y)\n t.pendown()", "def move(self,x,y):\n assert (type(x) in [int, float]), \"parameter x:%s is not a valid number\" % `x`\n assert (type(y) in [int, float]), \"parameter y:%s is not a valid number\" % `y`\n fstate = self._turtle.fill()\n if fstate: # only need to do this if in mid-fill\n self._turtle.fill(False)\n self._turtle.penup()\n self._turtle.setposition(x,y)\n self._turtle.pendown()\n if fstate: # only need to do this if in mid-fill\n self._turtle.fill(True)", "def setTurtle(t):\r\n t.pu()\r\n t.goto(initialCoordinates())", "def move_silent(self, pos):\n self.move(pos, silent=True)", "def reset_position(self):\n self.goto(STARTING_POSITION)", "def move_to(self, position):\n raise NotImplementedError", "def move(self):\n \n self.position = self.explore()", "def position(self, position):\n self.move_to(position)", "def set_position(self, x, y):\n self.tx = -x\n self.ty = -y", "def down():\n turtleTmp.pendown()", "def up():\n turtleTmp.penup()", "def reset(self):\n self._turtle.clear()\n self._turtle.setposition((0,0)) \n self._turtle.shape('turtle')\n self.color = 'red'\n self.heading = 180\n self.speed = 0", "def unmakeMove(self, move):", "def gohome(turtle):\n turtle.penup()\n turtle.goto(0,0)\n turtle.pendown()", "def move(self, p):\r\n self.position.setvalue(p)", "def move(self):\r\n segments = len(self.all_turtles) - 1\r\n for i in range(len(self.all_turtles)):\r\n if segments == 0:\r\n self.all_turtles[segments].forward(MOVE_DISTANCE)\r\n else:\r\n new_x = self.all_turtles[segments - 1].xcor()\r\n new_y = self.all_turtles[segments - 1].ycor()\r\n self.all_turtles[segments].goto(new_x, new_y)\r\n segments -= 1", "def drawTo(self, x, y):\n assert (type(x) in [int, float]), \"parameter x:%s is not a valid number\" % `x`\n assert (type(y) in [int, float]), \"parameter y:%s is not a valid number\" % `y`\n self._turtle.setposition(x, y)", "def stop(self):\n self.move(None)", "def move_to_position1(self):", "def stop(self):\n self.move(0, 0)", "def reset_position(self, x, y):\n\t\tself.grid[x][y] = self.terminal", "def random_move(turtle, distance):\n angle = uniform(-90,90)\n d = uniform(0,distance)\n turtle.left(angle)\n turtle.forward(d)", "def move_to_position2(self):", "def moveTo(self, pt: Tuple[float, float]) -> None:\n raise NotImplementedError", "def move_to(self, x, y):\n self.x = x\n self.y = y", "def move_to(self, x, y):\n self.x = x\n self.y = y", "def go_to_position(self, position):\n raise NotImplementedError", "def reset_movement(self):\n self.direction = [0, 0]", "def _move_tetrino(self, tetrino, x, y):\n tetrino.location_offset[constant.X] += x\n tetrino.location_offset[constant.Y] += y\n tetrino.update_location()", "def move(self):\n \n self.position = self.wander()", "def move(self):\n pass", "def move_to_start(self):\n self.pos = (SCREEN_WIDTH / 2, SCREEN_HEIGHT - 64)", "def relmoveto(self, x = 0, y = 0):\n self.cur_x += x\n self.cur_y += y\n if x < 0:\n self.out.write(self.csi + \"%sD\" % -x)\n elif x > 0:\n self.out.write(self.csi + \"%sC\" % x)\n if y < 0:\n self.out.write(self.csi + \"%sA\" % -y)\n elif y > 0:\n self.out.write(self.csi + \"%sB\" % y)", "def move_to(self, x, y):\r\n self.__current_room = x, y", "def move_stop(event):\n window['cursor'] = utils.CURSORS['wait']\n nonlocal x, y\n x = y = None", "def move(x,y):\r\n pass", "def move_to(self, ypos, xpos):\n # the screen's coordinates are 1 based, but the command is 0 based\n xpos -= 1\n ypos -= 1\n self.exec_command(\"MoveCursor({0}, {1})\".format(ypos, xpos).encode(\"utf-8\"))", "def move():\n Robot.move()", "def down(self):\n self.move(0,-1)", "def teleport(self, x, y):\n self.rect.x = x\n self.rect.y = y", "def loops_back_to_screen(self):\r\n for segment in self.all_turtles:\r\n if segment.xcor() < -300 or segment.xcor() > 300:\r\n segment.goto(-segment.xcor(), segment.ycor())\r\n\r\n elif segment.ycor() < -300 or segment.ycor() > 300:\r\n segment.goto(segment.xcor(), -segment.ycor())", "def test_move_step(self):\n t = AioBaseTurtle()\n t._move_step(Vec2D(-100, 0), 20, Vec2D(10,5))\n self.assertAlmostEqual(t._position[0], 100)\n self.assertAlmostEqual(t._position[1], 100)\n t.screen._drawline.assert_called_once_with(\n t.currentLineItem,\n ((-100.0, 0.0), (100.0, 100.0)), # called with mutable _position\n \"black\",\n 1,\n False\n )\n self.mock_update.assert_called_once_with()", "def moveDown():\n tt.right(90)\n tt.forward(60)\n tt.right(90)\n tt.forward(250)\n tt.right(180)", "def moveTo(self, x: int, y: int):\n raise NotImplementedError", "def move_down(self):\n self.move_step(1)", "def move_to_random_pos(self):\n newpos = [(np.random.rand() - 0.5) * 0.1,\n (np.random.rand() - 0.5) * 0.1,\n np.random.rand() * 0.9 + 0.2]\n self.move_to(newpos)", "def __moveTo(self, x, y):\n newbox = (x, y, self.currentBox[2], self.currentBox[3])\n self.__drawAndErase(boxToDraw=newbox, boxToErase=self.currentBox)\n self.currentBox = newbox", "def move_down():\n return __maze.move_down()", "def jump(self):\r\n if self.grounded == True:\r\n self.vel.y = -13", "def turtle_movement(turtle_shape, bg_color, turtle_color, turtle_speed):\n turtle_name = initialize(turtle_shape, bg_color,\n turtle_color, turtle_speed)\n\n for i in range(36):\n for i in range(4):\n turtle_name.forward(200)\n turtle_name.right(90)\n turtle_name.right(10)", "def move(self, direction):\n newx = self.x\n newy = self.y\n newy += random.randint(-1, 1)\n newx += random.randint(-1, 1)\n if self.tmap.contents[newy][newx] != '#':\n self.x = newx\n self.y = newy", "def move(self, pos):\n self.widget.move(*pos)", "def moveToPosition(self, pos):\n if pos != 1:\n prevPos = pos - 1\n self.setPointColor(prevPos, self.completedColor)\n if pos != self.num_points+1:\n self.setPointColor(pos, self.currentColor)", "def moveTo(self, location):\n self.currentLocation = location", "def _move(self, pos):\n self.put_par(\"drive\", pos)", "def mouse_move(self, pos):\n if (self.setup_type == \"position\"):\n x, y = pos\n self.canvas.move(x, y)", "def move(self, t, s):\n raise NotImplementedError", "def move_to(xy):\n (x,y) = xy\n win32api.SetCursorPos((x,y))", "def up(self):\n self.move(0, 1)", "def move_up(self):\n self.move_step(-1)", "def init_turtle():\n turtle.up()\n turtle.home()", "def move_down(self):\n self.move_measurement(1)", "def move_down(self):\n self.y -= 1", "def move(self):\n\n if self.range > 0:\n self.dirty = 1\n self.rect.move_ip([self.x * self.speed, self.y * self.speed])\n self.range -= self.speed\n else:\n self.kill()", "def jump(self):\n self.vy = -9", "def reset(self):\n self._position = TwoDV(0.0, 0.0)\n self._orient = TNavigator.START_ORIENTATION[self._mode]", "def MoveTo(self, x, y):\n return _Terminal.move % (y,x)", "def draw_triangle():\r\n turtle.forward(100)\r\n turtle.left(120)\r\n turtle.forward(100)\r\n turtle.left(120)\r\n turtle.forward(100)\r\n turtle.left(120)", "def move_start_node(self, x, y):", "def move(self,x,y):\n self.pos.x = x\n self.pos.y = y", "def move(self, friction = 0.0):\n try:\n newX = self.xcor() + self.dx\n newY = self.ycor() + self.dy\n self.goto(newX, newY)\n # apply friction\n self.dx = self.dx * (1 - friction)\n self.dy = self.dy * (1 - friction)\n except:\n print(\"Error, probably because dx and dy are not properties of the turtle\")", "def move_to(self, x, y):\n self._impl.move_to(x, y)", "def clear(self):\n self._turtle.clear()", "def clear(self):\n self._turtle.clear()", "def setPosition(position):", "def move_to(self, x, y):\n return _Terminal.move % (y, x)", "def _move(self, pos):\n self._set_block(self._pos, _AIR)\n self._set_block(self._pos + _Vec3(0, 1, 0), _AIR)\n self._set_block(pos, self._BOT_BLOCK)\n self._set_block(pos + _Vec3(0, 1, 0), self._BOT_BLOCK)\n self._pos = pos", "def move(self):\n if self.x_pos < const.screenwidth:\n self.x_pos += 1\n self.x_pos = self.x_pos\n\n self.draw()\n return", "def move_to(self, position, env=None):\n\n env = self._find_env(env)\n env.move_agent(self, position)", "def cancel_move(self):\n self.should_move = False", "def AeroMove(self, pos):\r\n\r\n pass", "def run(self):\n # type: () -> None\n self.move_to(self.location)", "def draw_square():\r\n turtle.forward(100)\r\n turtle.left(90)\r\n turtle.forward(100)\r\n turtle.left(90)\r\n turtle.forward(100)\r\n turtle.left(90)\r\n turtle.forward(100)\r\n turtle.left(90)", "def move_toward(state, location):\n return move_relative(state, location, True)", "def move(self, dt):\n dt = dt", "def move_up(self):\n self.move_measurement(-1)", "def move(self, x, y):\r\n if self.brush_on:\r\n for lx, ly in line(self.pos_x, self.pos_y, x, y):\r\n self.set(lx, ly)\r\n\r\n self.pos_x = x\r\n self.pos_y = y", "def move_down(self, step: int = 1) -> None:\n if self.cursor_pos.x < self.height - 1:\n self.cursor_pos = Point(self.cursor_pos.x+step, self.cursor_pos.y)\n else:\n self.cursor_pos = Point(0, self.cursor_pos.y)", "def move_dial(self, pos):\n return self.move(pos, dial=True)", "def move():\n print(\" ------ Execution -----\\n\")\n pyautogui.moveRel(0, 10)\n pyautogui.moveRel(0, -10)\n pyautogui.click()", "def move_to_target():\n keyboard.send('f')", "def reset_position(self):\n self.rect.left, self.rect.top = self.start_pos", "def setBlank(self, pos):\n self.tiles[-1] = pos", "def moveTurt(t, pole, count):\n x = t.xcor()\n y = t.ycor()\n t.goto(x, y + 10)\n t.goto(pole, y + 10)\n t.goto(pole, y)\n count += 1", "def update(self):\n self.pos_x -=1", "def set_stroke_move(self, use_stroke=True):\r\n self.board.set_stroke_move(use_stroke)" ]
[ "0.7493656", "0.72473425", "0.7133724", "0.6950032", "0.6805629", "0.6631148", "0.6597516", "0.6596458", "0.6490909", "0.63468707", "0.62488174", "0.6248645", "0.6203357", "0.6185314", "0.6184686", "0.61762464", "0.61565167", "0.60920906", "0.60809743", "0.6067138", "0.6025628", "0.60019755", "0.6000889", "0.59834975", "0.59745574", "0.5974534", "0.5969199", "0.5867107", "0.58538526", "0.58538526", "0.5811461", "0.58105594", "0.58070755", "0.5789782", "0.57647824", "0.5744767", "0.57351065", "0.57341146", "0.5731229", "0.5726274", "0.572222", "0.5721368", "0.5711772", "0.5693136", "0.5688197", "0.5680031", "0.5678374", "0.5665884", "0.56647587", "0.56498843", "0.5647072", "0.5643936", "0.56351656", "0.56324005", "0.56319404", "0.5627018", "0.56265897", "0.5610908", "0.5609417", "0.56077254", "0.5604482", "0.55978346", "0.5596923", "0.55914843", "0.5585625", "0.55807686", "0.5578927", "0.5578124", "0.5577944", "0.5572275", "0.5568932", "0.55638427", "0.55530703", "0.55425996", "0.55361617", "0.55225474", "0.5519784", "0.5519784", "0.5506695", "0.5501987", "0.5501419", "0.5495884", "0.54938567", "0.54846233", "0.54782736", "0.5477685", "0.54766923", "0.54740566", "0.5473037", "0.54719716", "0.547192", "0.54707444", "0.5466416", "0.5463624", "0.5461948", "0.54613435", "0.5459896", "0.54534775", "0.5446834", "0.5441746" ]
0.6717303
5
Deletes the turtle's drawings from the window. This method does not move the turtle or alter its attributes.
def clear(self): self._turtle.clear()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __del__(self):\n self._screen._removePen(self)\n del self._turtle", "def __del__(self):\n self.clear()\n self._screen._removeTurtle(self)\n del self._turtle", "def _destroy(self):\n root = self._root\n turtle.Turtle._pen = None\n turtle.Turtle._screen = None\n self._root = None\n self._canvas = None\n turtle.TurtleScreen._RUNNING = True\n root.destroy()", "def _clear_drawing(self) -> None:\n self.vertices.clear()\n self.edges.clear()\n self.subplot.clear()\n self.selected_element = None\n self.pressed_elements.clear()", "def bye(self):\n self._frame._destroy()\n self._turtles = []\n self._gpens = []\n del self._frame", "def clear(self):\n self._frame.clear()\n self._turtles = []\n self._gpens = []", "def reset(self):\n TNavigator.reset(self)\n TPen._reset(self)\n self._clear()\n self._drawturtle()\n self._update()", "def _clear_drawing(self) -> None:\n self.vertices.clear()\n self.edges.clear()\n self.subplot.clear()\n self.subplot2.clear()", "def clear_press(self):\n\n for win in self.window.additional_windows:\n win.del_win()\n\n pos = self.window.physics_canvas.physics_objects\n self.window.physics_canvas.physics_objects = []\n\n for obj in pos:\n self.window.physics_canvas.canvas.delete(obj.canvas_id)\n\n for force in self.window.physics_canvas.interacting_forces:\n force.remove()\n\n for particle in self.window.physics_canvas.particles:\n self.window.physics_canvas.canvas.delete(particle.canvas_id)", "def clear_selected_shapes(self):\n self.shapes_to_draw = []", "def __del__(self):\n try:\n self._frame._destroy()\n except:\n pass\n self._turtles = []\n self._pencils = []\n del self._frame", "def undraw(self):\n \n if not self.canvas: return\n if not self.canvas.isClosed():\n #self.canvas.delete(self.id)\n _tkExec(self.canvas.delete, self.id)\n if self.canvas.autoflush:\n #_root.update()\n _tkCall(_root.update)\n pass\n self.canvas = None\n self.id = None", "def clear_scene(self, event):\n self.shapes = []\n self.redraw()", "def remove_drawing_rect(self):\n self.drawing_rect = QPolygonF()\n if self.connecting_rect:\n self.connecting_rect.setVisible(False)\n self.connecting_rect = None\n self.first_draw = True", "def clear(self):\n self._delayvalue = _CFG[\"delay\"]\n self._colormode = _CFG[\"colormode\"]\n self._delete(\"all\")\n self._bgpic = self._createimage(\"\")\n self._bgpicname = \"nopic\"\n self._tracing = 1\n self._updatecounter = 0\n self._turtles = []\n self.bgcolor(\"white\")\n for btn in 1, 2, 3:\n self.onclick(None, btn)\n self.onkeypress(None)\n for key in self._keys[:]:\n self.onkey(None, key)\n self.onkeypress(None, key)\n Myturtle._pen = None", "def delwin(self):\n\t\tfor c in self.components:\n\t\t\tc.delwin()\n\t\tself.win = None", "def delete_current_shape(self):\n print(\"deleting shape!\")\n self.shapes.remove(self.current_shape)\n self.current_shape = None\n self.changed()", "def flush(self):\n if self.fill:\n self._turtle.fill(False)\n self._turtle.fill(True)", "def clear_drawn_objects(self, view_manager):\n view = view_manager.get_view()\n for item in self._drawnObjects:\n view.removeItem(item)\n # clear the list:\n self._drawnObjects = []", "def clear_visualization(self) -> None:\n if self._drawing_handle is not None:\n sim.simAddDrawingObjectItem(self._drawing_handle, None)", "def _removeTurtle(self,turt):\n if turt in self._turtles:\n self._turtles.remove(turt)", "def _clear(self):\n self._fillitem = self._fillpath = None\n for item in self.items:\n self.screen._delete(item)\n self.currentLineItem = self.screen._createline()\n self.currentLine = []\n if self._drawing:\n self.currentLine.append(self._position)\n self.items = [self.currentLineItem]\n self.clearstamps()", "def on_draw():\n window.clear()\n world.draw()", "def remove_circle(self, removing):\r\n t = turtle.Turtle()\r\n# For whatever number, either the user of the computer, is removing it will draw over the existing circles on the screen.\r\n for total_num in range(removing):\r\n t.speed(20)\r\n t.penup()\r\n t.goto(self.posn.x,self.posn.y)\r\n t.pendown()\r\n t.color(\"#696969\") # Changes the color to dark grey\r\n t.begin_fill()\r\n t.circle(30)\r\n t.end_fill()\r\n# Moves the turtle to the next row to start removing circle\r\n self.posn.x=self.posn.x+65\r\n if self.posn.x>=25:\r\n self.posn.y= self.posn.y-65\r\n self.posn.x=-300", "def close(self):\n \n self.renderer.RemoveActor(self._crosshair.actor)\n self.renderer.RemoveActor(self._scalar_bar_actor)\n self.renderer.RemoveActor(self._orientation_annotation)\n self.renderer.RemoveActor(self._corner_annotation)\n \n for layer in self._layers :\n self.renderer.RemoveActor(layer.actor)\n \n for gui_annotation in self._gui_annotations.values() :\n self.renderer.RemoveActor(gui_annotation.shape_actor)\n self.renderer.RemoveActor(gui_annotation.text_actor)", "def paint(self):\r\n self.canvas.delete(tkinter.ALL)\r\n self.visit(self.tree.root)", "def remove_drawing_poly(self):\n\n self.drawing_poly = QPolygonF()\n self.drawing_points_coords = []\n\n for p in self.drawing_points:\n p.setVisible(False)\n\n for line in self.connecting_line_list:\n line.setVisible(False)\n if self.connecting_line:\n self.connecting_line.setVisible(False)\n self.connecting_line = None\n self.first_draw = True\n if self.set_tooltip:\n self.set_tooltip(\"\")", "def delete_ball(self):\r\n self.movement = \"\"\r\n self.canvas.delete(self.ball)", "def destroy(self):\n\t\tfor team in range(len(self.dots)): #will cycle through each team\n\t\t\tfor i in range(len(self.dots[team])): #will cycle through each member of the team\n\t\t\t\tdot = self.dots[team][i]\n\t\t\t\tdot.removeNode()\n\t\tself.mousePosition.removeNode()\n\t\tself.mapimage.removeNode()\n\t\tself.map.removeNode()", "def reset(self):\n self._turtle.clear()\n self._turtle.setposition((0,0)) \n self._turtle.shape('turtle')\n self.color = 'red'\n self.heading = 180\n self.speed = 0", "def OnRemoveAutomation(self, event, automation):\n\n self.app.RemoveAutomation(automation)\n for child in self.GetChildren():\n child.Destroy()\n\n self.Draw()", "def clearCanvas():\n global c, coordinates\n c.delete(\"all\")\n drawMusicLines()\n coordinates.clear()", "def clear_canvas():\n self.parent_class.canvas.delete(\"all\")", "def reset(self):\n self._turtle.clear()\n self._turtle.setposition((0,0)) \n try:\n self._turtle.shape('pen.gif')\n except:\n self._turtle.shape('classic')\n self._turtle.color('red')\n self.speed = 0\n \n #pair = self._turtle.color()\n self._pencolor = self._turtle.color()[0]\n self._fillcolor = self._turtle.color()[0]", "def end_fill():\n turtleTmp.end_fill()", "def delete(self):\n\t\tself.canvas.delete('node_'+self.identifier)\n\t\tself.canvas.tag_unbind('node_'+self.identifier,\"<Any>\")", "def main():\r\n intialize()\r\n draw_hexagon()\r\n draw_square()\r\n draw_triangle()\r\n shapes()\r\n shapes2()\r\n print (\"Close the window\")\r\n turtle.done()", "def _drawturtle(self):\n screen = self.screen\n shape = screen._shapes[self.Myturtle.shapeIndex]\n ttype = shape._type\n titem = self.Myturtle._item\n if self._shown and screen._updatecounter == 0 and screen._tracing > 0:\n self._hidden_from_screen = False\n tshape = shape._data\n if ttype == \"polygon\":\n if self._resizemode == \"noresize\": w = 1\n elif self._resizemode == \"auto\": w = self._pensize\n else: w =self._outlinewidth\n shape = self._polytrafo(self._getshapepoly(tshape))\n fc, oc = self._fillcolor, self._pencolor\n screen._drawpoly(titem, shape, fill=fc, outline=oc,\n width=w, top=True)\n elif ttype == \"image\":\n screen._drawimage(titem, self._position, tshape)\n elif ttype == \"compound\":\n for item, (poly, fc, oc) in zip(titem, tshape):\n poly = self._polytrafo(self._getshapepoly(poly, True))\n screen._drawpoly(item, poly, fill=self._cc(fc),\n outline=self._cc(oc), width=self._outlinewidth, top=True)\n else:\n if self._hidden_from_screen:\n return\n if ttype == \"polygon\":\n screen._drawpoly(titem, ((0, 0), (0, 0), (0, 0)), \"\", \"\")\n elif ttype == \"image\":\n screen._drawimage(titem, self._position,\n screen._shapes[\"blank\"]._data)\n elif ttype == \"compound\":\n for item in titem:\n screen._drawpoly(item, ((0, 0), (0, 0), (0, 0)), \"\", \"\")\n self._hidden_from_screen = True", "def clear(self):\n self.animation.stop()\n self.draw(0, 0, 0, 0, 0)", "def destroy(self):\n bullet_tools.tear_down_scene()", "def reset(self):\n for Myturtle in self._turtles:\n Myturtle._setmode(self._mode)\n Myturtle.reset()", "def delete_selection(self):\n if self.selected_point_index is not None:\n del self.current_shape[self.selected_point_index]\n self.selected_point_index = None\n self.changed()", "def on_draw(event):\n # First, we clear the window in white\n # (it is necessary to do that at every frame)\n gloo.set_clear_color((1.0, 1.0, 1.0, 1.0))\n gloo.clear()\n program.draw(\"line_strip\")", "def cleanup(self):\r\n\r\n # Remove strip from window.\r", "def discard(self) -> None:\n\n self.plot.close()", "def close_visualization(self) -> None:\n for id in self.visual_ids:\n pybullet.removeBody(id)\n self.visual_ids = []", "def done(self):\n turtle.done()", "def _clear(self, event):\n if self.ignore(event) or self._changed_canvas():\n return\n self._background = self.canvas.copy_from_bbox(self.ax.bbox)\n self.ax.draw_artist(self._buttons)\n if hasattr(self, \"_circles\"):\n for circle in self._circles:\n self.ax.draw_artist(circle)", "def quit():\n #quits from python turtle graphics screen\n bye()", "def clear(self):\r\n if self.groundPath:\r\n self.groundPath.clearProjectTexture(self.stage)\r\n self.groundPath = None\r\n\r\n if self.lightPath:\r\n self.lightPath.detachNode()\r\n self.lightPath = None\r\n\r\n if self.cameraPath:\r\n self.cameraPath.detachNode()\r\n self.cameraPath = None\r\n self.camera = None\r\n self.lens = None\r\n\r\n if self.buffer:\r\n base.graphicsEngine.removeWindow(self.buffer)\r\n self.tex = None\r\n self.buffer = None", "def draw(self, *args, **kwargs):\n self.window.clear()\n self.batch.draw()", "def redraw(self):\n glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)\n for shape in self.shapes:\n shape.redraw()\n glFlush()\n self.SwapBuffers()", "def redraw(self):\n glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)\n for shape in self.shapes:\n shape.redraw()\n glFlush()\n self.SwapBuffers()", "def removePick(self):\n self.pnt = None\n vtkRenWin.delMarker(self.renWin)", "def down():\n turtleTmp.pendown()", "def on_delete():\r\n del win.box[-1] # delete last line\r\n #del win.box[0:-1] # delete all lines \r", "def reset_window(self):\n self.sorting = False\n self.sort_list = []\n self.window.delete('all')\n for i in range(100):\n random_height = randint(40,280)\n line_id = self.window.create_line(4*i+50, 20, 4*i+50, random_height)\n self.sort_list.append([random_height, line_id])\n self.window.update()", "def up():\n turtleTmp.penup()", "def clear_screen(self):\r\n lst_grid = self.root.grid_slaves()\r\n for widget in lst_grid:\r\n widget.destroy()\r\n lst_pack = self.root.pack_slaves()\r\n for widget in lst_pack:\r\n widget.destroy()", "def clearScreen():\n dislin.erase()", "def erase(self):\r\n self.in_arrow = None\r\n self.out_arrow = None", "def clear(self):\n black = neo.Color(0,0,0)\n self.set_all(black)\n self.draw()", "def clear(self):\n self.clear_markers()\n self.l_marker.remove()\n self.l_line.remove()\n self.r_marker.remove()\n self.r_line.remove()", "def remove_pos(self):\r\n selected_items = self.treeview.selection()\r\n for items in selected_items:\r\n values = self.treeview.item(items, 'values')\r\n if values[0] in self.holdings:\r\n del self.holdings[values[0]]\r\n self.treeview.delete(items)\r\n return None", "def erase_plot(self, line_position=0):\n self.axplot.lines.pop(line_position).remove\n self.fig.canvas.draw()\n return", "def destroy (self, *attrs):\n for attr in ('rect', 'draw_fn') + attrs:\n try:\n delattr(self, attr)\n except AttributeError:\n pass", "def clearPlayground(self):\n\n for cell in self.cells:\n cell.delete()\n self.cells = []\n self.generation = 0", "def __del__(self):\n pyplot.clf()", "def deleteSelected(self):\n self.p.dat.flowsheet.deleteEdges(self.selectedEdges)\n self.selectedEdges = []\n self.p.dat.flowsheet.deleteNodes(self.selectedNodes)\n self.selectedNodes = []\n self.p.noneSelectedEmit()\n self.p.createScene()", "def clear(self):\n self._plt.clear()\n self._layer_items = {}", "def game_window(self):\r\n t = turtle.Turtle()\r\n t.hideturtle()\r\n\r\n for total_num in range(self.circle):\r\n t.hideturtle()\r\n t.speed(20)\r\n t.penup()\r\n t.goto(self.posn.x,self.posn.y)\r\n t.pendown()\r\n t.color(\"#40e0d0\")\r\n t.begin_fill()\r\n t.circle(30)\r\n t.end_fill()\r\n self.posn.x= self.posn.x+65\r\n if self.posn.x>=25:\r\n self.posn.y= self.posn.y-65\r\n self.posn.x=-300", "def erase(self):\n self.view.erase_status('00_git_gutter')", "def destroy(self):\r\n self._tidy()\r\n self.stop()\r\n try:\r\n self.opengl.destroy(self)\r\n except:\r\n pass\r\n if self.external_mouse:\r\n try:\r\n self.external_mouse.stop()\r\n except:\r\n pass_\r\n try:\r\n self.mouse.stop()\r\n except:\r\n pass\r\n try:\r\n self.tkwin.destroy()\r\n except:\r\n pass\r\n Display.INSTANCE = None", "def clear(self):\n try:\n # This causes stupid errors with tkagg, so just wrap it in\n # try-except for now\n self.fig.clear()\n except: pass\n self.annotators.clear()\n self.dims.clear()\n self.ph.remove(self.ID)", "def destroy_view(self): \n\n self.canvas.destroy()\n self.scrollbar.destroy()\n self.header_frame.destroy()\n self.button_frame.destroy()\n self.twitter_canvas.destroy()\n self.twitter_scrollbar.destroy()", "def clear(self):\n self._x_prev = None\n self._y_prev = None", "def remove_canvas(self,):\r\n # reset plot view beofre change\r\n self.canvas.toolbar.home()\r\n # remove widgets from canvas_vlayout\r\n self.canvas_vlayout.removeWidget(self.toolbar)\r\n self.toolbar.close()\r\n self.canvas_vlayout.removeWidget(self.canvas)\r\n self.canvas.close()", "def draw_objects():\n\n # Disable the turtle animation, and erase the scren.\n turtle.tracer(False)\n turtle.hideturtle()\n turtle.clear()\n\n # Draw all the parts of the scene.\n draw_ball()\n draw_target()\n draw_bounds()\n draw_pins()\n\n show_status()\n\n # Now show the screen, after everything has been drawn\n turtle.tracer(True)", "def deleteSelected(self):\n self.scene().deleteSelected()", "def removeLatticeFrame(self):\n self.latticeFrame.remove()", "def destroy(self):\n for window in self.windows:\n try:\n destroy_window(window)\n except:\n pass", "def remove_object_from_canvas(self, tk_object):\n self.canvas.delete(tk_object)", "def clearwin(event=None):\r\n # for child in mframe.winfo_children():\r\n # child.destroy()\r\n global mframe\r\n mframe.destroy()\r\n mframe = tkinter.Frame(main, width=800, height=600, background='pink')\r\n mframe.pack(fill=\"both\", expand=True, padx=20, pady=20)", "def clear_scene(self):\n # Set all robots variables as invisible\n for robot in self.__robots:\n robot.set_reference_visibility(False)\n robot.set_robot_visibility(False)\n\n self.scene.waitfor(\"draw_complete\")\n\n new_list = []\n for name in self.__ui_controls.get('menu_robots').choices:\n new_list.append(name)\n\n self.__selected_robot = 0\n self.__reload_caption(new_list)", "def deleteRace(self, raceIndex):\n\t\tself._ogreWin.selected.entry = None\n\t\tr = self.races[raceIndex]\n\t\ttry:\n\t\t\tfor c in r.points:\n\t\t\t\tif c['entry'] is not None:\n\t\t\t\t\tr.deleteCallback(c['entry'])\n\t\t\t\t\tself._ogreWin.entries.pop(str(c['entry'].uuid))\n\t\t\t\t\tc['entry'].removeFromScene()\n\t\t\t\t\tdel c['entry']\n\t\tfinally:\n\t\t\tself.races.__delitem__(raceIndex)\n\t\t\tself.modified = True\n\t\t\tself._ogreWin.renderWindow.update()", "def clear(self, event):\r\n self.selectedRegion = None\r\n self.paint()", "def stopLineDrawing(self):\n taskMgr.remove(\"drawLineTask\")\n if self.line is not None:\n self.line.reset()\n self.line = None", "def remove_old_graphs(self):\r\n widgets = self.winfo_children()\r\n graph_frames = []\r\n\r\n for widget in widgets:\r\n if type(widget) == tk.Frame:\r\n graph_frames.append(widget)\r\n\r\n for frame in range(len(graph_frames) - 1):\r\n graph_frames[frame].destroy()", "def cog_unload(self):\n self._get_sketch_prompt.cancel()", "def penup(self):\n if not self._drawing:\n return\n self.pen(pendown=False)", "def remove(self) -> None:\n self.map.remove_brush(self)", "def removeWidgets(self): \n for widget in self.activeWidget:\n if widget in self.window.children:\n self.window.remove_child(widget)\n widget.destroy()\n self.activeWidget = []", "def do_paint(self):\r\n curses.curs_set(0)\r\n if self.win:\r\n self.paint()\r\n self.done_paint()", "def destroy_all(self):\n\n for k in self.widgets:\n self.widgets[k].destroy()\n self.widgets = {}\n self.window.destroy()\n self.window = tk.Frame(self.root)\n self.window.pack(side=\"top\", fill=\"both\", expand=True)", "def _onRemove(self, event):\n index = self.colorlist.GetSelection()\n del self.graphColors[index]\n self._tupleListToStrings()\n if len(self.graphColors) > 0:\n self.colorlist.SetSelection(0)\n self._updateButtons(None)", "def clear(self) -> None:\n\n self.screen.fill(self.bg)", "def delete_fit(self):\n self.fft_fit_plotter.delete_plot(self.ax)\n plt.draw()", "def shapes():\r\n turtle.up()\r\n turtle.forward(500)\r\n turtle.down()\r\n draw_hexagon()\r\n draw_square()\r\n draw_triangle()", "def delete(self):\n if self.shape is not None:\n self.shape.delete()\n if self in shared.obstacles:\n shared.obstacles.remove(self)" ]
[ "0.7095369", "0.7073882", "0.67882663", "0.6711661", "0.65692496", "0.6532985", "0.63668907", "0.6328843", "0.62957186", "0.6284367", "0.628283", "0.622739", "0.60716885", "0.6029884", "0.5946259", "0.5937733", "0.58444446", "0.58253604", "0.5806614", "0.57928294", "0.57825464", "0.5763256", "0.5736391", "0.57327366", "0.5708937", "0.5702122", "0.56901664", "0.5688011", "0.56875837", "0.5659958", "0.56513864", "0.5631425", "0.5558756", "0.5532763", "0.5525643", "0.5516383", "0.5493083", "0.54896706", "0.54877836", "0.5484531", "0.5483971", "0.5452844", "0.54510754", "0.54457587", "0.5442873", "0.543464", "0.54259527", "0.5413272", "0.5395587", "0.53860515", "0.53709865", "0.5349955", "0.5349955", "0.5347834", "0.53433955", "0.53397363", "0.5330614", "0.5324091", "0.5324073", "0.5315487", "0.53136086", "0.5309334", "0.52910143", "0.52676696", "0.52625805", "0.52540797", "0.5249159", "0.52444047", "0.5242686", "0.52330345", "0.523145", "0.5228177", "0.52228826", "0.5219193", "0.5218382", "0.5211449", "0.5211035", "0.52101666", "0.52062726", "0.5191319", "0.51762915", "0.51738304", "0.5173381", "0.51663023", "0.515647", "0.51535046", "0.51479137", "0.51439804", "0.5143034", "0.51308537", "0.51165897", "0.51082766", "0.51080465", "0.509715", "0.50953645", "0.5094966", "0.50906366", "0.50879794", "0.5086399" ]
0.70327747
2
Deletes the turtle's drawings from the window. This method recenters the turtle and resets all attributes to their default values.
def reset(self): self._turtle.clear() self._turtle.setposition((0,0)) self._turtle.shape('turtle') self.color = 'red' self.heading = 180 self.speed = 0
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def clear(self):\n self._turtle.clear()", "def clear(self):\n self._turtle.clear()", "def __del__(self):\n self._screen._removePen(self)\n del self._turtle", "def reset(self):\n TNavigator.reset(self)\n TPen._reset(self)\n self._clear()\n self._drawturtle()\n self._update()", "def __del__(self):\n self.clear()\n self._screen._removeTurtle(self)\n del self._turtle", "def _clear_drawing(self) -> None:\n self.vertices.clear()\n self.edges.clear()\n self.subplot.clear()\n self.selected_element = None\n self.pressed_elements.clear()", "def _destroy(self):\n root = self._root\n turtle.Turtle._pen = None\n turtle.Turtle._screen = None\n self._root = None\n self._canvas = None\n turtle.TurtleScreen._RUNNING = True\n root.destroy()", "def clear(self):\n self._frame.clear()\n self._turtles = []\n self._gpens = []", "def bye(self):\n self._frame._destroy()\n self._turtles = []\n self._gpens = []\n del self._frame", "def _clear_drawing(self) -> None:\n self.vertices.clear()\n self.edges.clear()\n self.subplot.clear()\n self.subplot2.clear()", "def clear_selected_shapes(self):\n self.shapes_to_draw = []", "def clear(self):\n self._delayvalue = _CFG[\"delay\"]\n self._colormode = _CFG[\"colormode\"]\n self._delete(\"all\")\n self._bgpic = self._createimage(\"\")\n self._bgpicname = \"nopic\"\n self._tracing = 1\n self._updatecounter = 0\n self._turtles = []\n self.bgcolor(\"white\")\n for btn in 1, 2, 3:\n self.onclick(None, btn)\n self.onkeypress(None)\n for key in self._keys[:]:\n self.onkey(None, key)\n self.onkeypress(None, key)\n Myturtle._pen = None", "def __del__(self):\n try:\n self._frame._destroy()\n except:\n pass\n self._turtles = []\n self._pencils = []\n del self._frame", "def reset(self):\n for Myturtle in self._turtles:\n Myturtle._setmode(self._mode)\n Myturtle.reset()", "def clear_press(self):\n\n for win in self.window.additional_windows:\n win.del_win()\n\n pos = self.window.physics_canvas.physics_objects\n self.window.physics_canvas.physics_objects = []\n\n for obj in pos:\n self.window.physics_canvas.canvas.delete(obj.canvas_id)\n\n for force in self.window.physics_canvas.interacting_forces:\n force.remove()\n\n for particle in self.window.physics_canvas.particles:\n self.window.physics_canvas.canvas.delete(particle.canvas_id)", "def _clear(self):\n self._fillitem = self._fillpath = None\n for item in self.items:\n self.screen._delete(item)\n self.currentLineItem = self.screen._createline()\n self.currentLine = []\n if self._drawing:\n self.currentLine.append(self._position)\n self.items = [self.currentLineItem]\n self.clearstamps()", "def undraw(self):\n \n if not self.canvas: return\n if not self.canvas.isClosed():\n #self.canvas.delete(self.id)\n _tkExec(self.canvas.delete, self.id)\n if self.canvas.autoflush:\n #_root.update()\n _tkCall(_root.update)\n pass\n self.canvas = None\n self.id = None", "def reset(self):\n self._turtle.clear()\n self._turtle.setposition((0,0)) \n try:\n self._turtle.shape('pen.gif')\n except:\n self._turtle.shape('classic')\n self._turtle.color('red')\n self.speed = 0\n \n #pair = self._turtle.color()\n self._pencolor = self._turtle.color()[0]\n self._fillcolor = self._turtle.color()[0]", "def clear_scene(self, event):\n self.shapes = []\n self.redraw()", "def remove_drawing_rect(self):\n self.drawing_rect = QPolygonF()\n if self.connecting_rect:\n self.connecting_rect.setVisible(False)\n self.connecting_rect = None\n self.first_draw = True", "def clear_visualization(self) -> None:\n if self._drawing_handle is not None:\n sim.simAddDrawingObjectItem(self._drawing_handle, None)", "def flush(self):\n if self.fill:\n self._turtle.fill(False)\n self._turtle.fill(True)", "def clear_drawn_objects(self, view_manager):\n view = view_manager.get_view()\n for item in self._drawnObjects:\n view.removeItem(item)\n # clear the list:\n self._drawnObjects = []", "def clear(self):\n black = neo.Color(0,0,0)\n self.set_all(black)\n self.draw()", "def reset_window(self):\n self.sorting = False\n self.sort_list = []\n self.window.delete('all')\n for i in range(100):\n random_height = randint(40,280)\n line_id = self.window.create_line(4*i+50, 20, 4*i+50, random_height)\n self.sort_list.append([random_height, line_id])\n self.window.update()", "def clearCanvas():\n global c, coordinates\n c.delete(\"all\")\n drawMusicLines()\n coordinates.clear()", "def clear(self):\n self._x_prev = None\n self._y_prev = None", "def clear_canvas():\n self.parent_class.canvas.delete(\"all\")", "def reset(self):\n if hasattr(self, \"W\"):\n del self.W\n if hasattr(self, \"T\"):\n del self.T\n if hasattr(self, \"P\"):\n del self.P", "def delwin(self):\n\t\tfor c in self.components:\n\t\t\tc.delwin()\n\t\tself.win = None", "def close(self):\n \n self.renderer.RemoveActor(self._crosshair.actor)\n self.renderer.RemoveActor(self._scalar_bar_actor)\n self.renderer.RemoveActor(self._orientation_annotation)\n self.renderer.RemoveActor(self._corner_annotation)\n \n for layer in self._layers :\n self.renderer.RemoveActor(layer.actor)\n \n for gui_annotation in self._gui_annotations.values() :\n self.renderer.RemoveActor(gui_annotation.shape_actor)\n self.renderer.RemoveActor(gui_annotation.text_actor)", "def on_draw():\n window.clear()\n world.draw()", "def clear(self):\n self._plt.clear()\n self._layer_items = {}", "def paint(self):\r\n self.canvas.delete(tkinter.ALL)\r\n self.visit(self.tree.root)", "def remove_drawing_poly(self):\n\n self.drawing_poly = QPolygonF()\n self.drawing_points_coords = []\n\n for p in self.drawing_points:\n p.setVisible(False)\n\n for line in self.connecting_line_list:\n line.setVisible(False)\n if self.connecting_line:\n self.connecting_line.setVisible(False)\n self.connecting_line = None\n self.first_draw = True\n if self.set_tooltip:\n self.set_tooltip(\"\")", "def cleanup(self):\r\n\r\n # Remove strip from window.\r", "def clear(self):\r\n if self.groundPath:\r\n self.groundPath.clearProjectTexture(self.stage)\r\n self.groundPath = None\r\n\r\n if self.lightPath:\r\n self.lightPath.detachNode()\r\n self.lightPath = None\r\n\r\n if self.cameraPath:\r\n self.cameraPath.detachNode()\r\n self.cameraPath = None\r\n self.camera = None\r\n self.lens = None\r\n\r\n if self.buffer:\r\n base.graphicsEngine.removeWindow(self.buffer)\r\n self.tex = None\r\n self.buffer = None", "def clear(self):\n try:\n # This causes stupid errors with tkagg, so just wrap it in\n # try-except for now\n self.fig.clear()\n except: pass\n self.annotators.clear()\n self.dims.clear()\n self.ph.remove(self.ID)", "def delete_current_shape(self):\n print(\"deleting shape!\")\n self.shapes.remove(self.current_shape)\n self.current_shape = None\n self.changed()", "def clear(self):\n self.clear_markers()\n self.l_marker.remove()\n self.l_line.remove()\n self.r_marker.remove()\n self.r_line.remove()", "def clear(self):\n self.animation.stop()\n self.draw(0, 0, 0, 0, 0)", "def end_fill():\n turtleTmp.end_fill()", "def erase(self):\r\n self.in_arrow = None\r\n self.out_arrow = None", "def destroy(self):\n\t\tfor team in range(len(self.dots)): #will cycle through each team\n\t\t\tfor i in range(len(self.dots[team])): #will cycle through each member of the team\n\t\t\t\tdot = self.dots[team][i]\n\t\t\t\tdot.removeNode()\n\t\tself.mousePosition.removeNode()\n\t\tself.mapimage.removeNode()\n\t\tself.map.removeNode()", "def delete(self):\n\t\tself.canvas.delete('node_'+self.identifier)\n\t\tself.canvas.tag_unbind('node_'+self.identifier,\"<Any>\")", "def destroy (self, *attrs):\n for attr in ('rect', 'draw_fn') + attrs:\n try:\n delattr(self, attr)\n except AttributeError:\n pass", "def clear_trail(self):\n self.obj.make_trail = False\n self.obj.clear_trail()\n self.sum_ang = 0", "def reset(self):\r\n self.tree = KDTree()\r\n self.paint()", "def clear(self):\n self._fig = go.Figure()", "def clearScreen():\n dislin.erase()", "def clear_screen(self):\r\n lst_grid = self.root.grid_slaves()\r\n for widget in lst_grid:\r\n widget.destroy()\r\n lst_pack = self.root.pack_slaves()\r\n for widget in lst_pack:\r\n widget.destroy()", "def clearPlayground(self):\n\n for cell in self.cells:\n cell.delete()\n self.cells = []\n self.generation = 0", "def discard(self) -> None:\n\n self.plot.close()", "def reset(self):\n self.x_pos = 10\n self.y_pos = 10\n self.line_height = 15", "def delete_ball(self):\r\n self.movement = \"\"\r\n self.canvas.delete(self.ball)", "def clear_scene(self):\n # Set all robots variables as invisible\n for robot in self.__robots:\n robot.set_reference_visibility(False)\n robot.set_robot_visibility(False)\n\n self.scene.waitfor(\"draw_complete\")\n\n new_list = []\n for name in self.__ui_controls.get('menu_robots').choices:\n new_list.append(name)\n\n self.__selected_robot = 0\n self.__reload_caption(new_list)", "def removePick(self):\n self.pnt = None\n vtkRenWin.delMarker(self.renWin)", "def main():\r\n intialize()\r\n draw_hexagon()\r\n draw_square()\r\n draw_triangle()\r\n shapes()\r\n shapes2()\r\n print (\"Close the window\")\r\n turtle.done()", "def clear(self):\n self.raster_path_line.clear()\n self.labels_path.clear()\n self.shapefile_path.clear()\n self.costumelabels.clear()\n self.layer_name.clear()\n self.class_name.clear()\n self.idfield.clear()", "def destroy_all(self):\n\n for k in self.widgets:\n self.widgets[k].destroy()\n self.widgets = {}\n self.window.destroy()\n self.window = tk.Frame(self.root)\n self.window.pack(side=\"top\", fill=\"both\", expand=True)", "def _removeTurtle(self,turt):\n if turt in self._turtles:\n self._turtles.remove(turt)", "def clearAnim():\n for node in nuke.selectedNodes():\n # rotopaint\n if node.Class() == \"RotoPaint\":\n rotoCurves = node['curves']\n for knob in node.knobs():\n if nuke.Knob.isAnimated(node[knob]):\n nuke.Knob.clearAnimated(node[knob]) \n print \"clearing animation of: \"+node.name()+\" \"+node[knob].name()\n # other nodes\n if not node.Class() == \"RotoPaint\":\n for knob in node.knobs():\n if nuke.Knob.isAnimated(node[knob]):\n nuke.Knob.clearAnimated(node[knob]) \n print \"clearing animation of: \"+node.name()+\" \"+node[knob].name()", "def _reset_blender(self):\n\n # restore factory settings\n #bpy.ops.wm.read_factory_settings()\n for scene in bpy.data.scenes:\n for obj in scene.objects:\n scene.objects.unlink(obj)\n\n # consider only the objects in the default scene\n data = [\n bpy.data.objects,\n bpy.data.meshes,\n bpy.data.lamps,\n bpy.data.cameras\n ]\n for bpy_data_iter in data:\n for id_data in bpy_data_iter:\n bpy_data_iter.remove(id_data)", "def clear(self) -> None:\n self.screen.clear()", "def erase(self):\n\tself.state={}\n\tself.display(update_board=0)", "def undoChanges(self):\n Objects.undoChanges(self)\n self.draw()", "def clickClearReferences(self, event):\n self.whiteReference = None\n self.lightBtn.color = '0.85'\n self.darkReference = None\n self.darkBtn.color = '0.85'\n plt.pause(0.3)\n self.axes.autoscale_view()", "def _clear(self, event):\n if self.ignore(event) or self._changed_canvas():\n return\n self._background = self.canvas.copy_from_bbox(self.ax.bbox)\n self.ax.draw_artist(self._buttons)\n if hasattr(self, \"_circles\"):\n for circle in self._circles:\n self.ax.draw_artist(circle)", "def clear(self) -> None:\n\n self.screen.fill(self.bg)", "def _clear_window(self):\n self.buf[:] = []", "def up():\n turtleTmp.penup()", "def cog_unload(self):\n self._get_sketch_prompt.cancel()", "def OnRemoveAutomation(self, event, automation):\n\n self.app.RemoveAutomation(automation)\n for child in self.GetChildren():\n child.Destroy()\n\n self.Draw()", "def destroy(self):\n bullet_tools.tear_down_scene()", "def deinit(self):\n self.reset()", "def __del__(self):\n pyplot.clf()", "def clearwin(event=None):\r\n # for child in mframe.winfo_children():\r\n # child.destroy()\r\n global mframe\r\n mframe.destroy()\r\n mframe = tkinter.Frame(main, width=800, height=600, background='pink')\r\n mframe.pack(fill=\"both\", expand=True, padx=20, pady=20)", "def deinit(self):\n self._font.close()", "def setDrawing(self):\n self.graph_drawing=[]", "def clear(tft, oled):\n oled.fill(tft.BLACK)", "def reset(self) -> None:\r\n self.tree.delete(*self.tree.get_children())", "def reset(self):\n for lane in self.lanes.values():\n lane.puck_area.clear_widgets()\n lane.patrons = list()\n lane.disabled = False\n lane.beers = list()\n\n self.message_holder.remove_widget(self.you_lose_label)\n self.message_holder.remove_widget(self.you_win_label)", "def reset(self):\n self._unset_defaults_and_overrides()\n self.clear()", "def clear_attrs(self):\n self._attributes.clear()", "def remove_circle(self, removing):\r\n t = turtle.Turtle()\r\n# For whatever number, either the user of the computer, is removing it will draw over the existing circles on the screen.\r\n for total_num in range(removing):\r\n t.speed(20)\r\n t.penup()\r\n t.goto(self.posn.x,self.posn.y)\r\n t.pendown()\r\n t.color(\"#696969\") # Changes the color to dark grey\r\n t.begin_fill()\r\n t.circle(30)\r\n t.end_fill()\r\n# Moves the turtle to the next row to start removing circle\r\n self.posn.x=self.posn.x+65\r\n if self.posn.x>=25:\r\n self.posn.y= self.posn.y-65\r\n self.posn.x=-300", "def _drawturtle(self):\n screen = self.screen\n shape = screen._shapes[self.Myturtle.shapeIndex]\n ttype = shape._type\n titem = self.Myturtle._item\n if self._shown and screen._updatecounter == 0 and screen._tracing > 0:\n self._hidden_from_screen = False\n tshape = shape._data\n if ttype == \"polygon\":\n if self._resizemode == \"noresize\": w = 1\n elif self._resizemode == \"auto\": w = self._pensize\n else: w =self._outlinewidth\n shape = self._polytrafo(self._getshapepoly(tshape))\n fc, oc = self._fillcolor, self._pencolor\n screen._drawpoly(titem, shape, fill=fc, outline=oc,\n width=w, top=True)\n elif ttype == \"image\":\n screen._drawimage(titem, self._position, tshape)\n elif ttype == \"compound\":\n for item, (poly, fc, oc) in zip(titem, tshape):\n poly = self._polytrafo(self._getshapepoly(poly, True))\n screen._drawpoly(item, poly, fill=self._cc(fc),\n outline=self._cc(oc), width=self._outlinewidth, top=True)\n else:\n if self._hidden_from_screen:\n return\n if ttype == \"polygon\":\n screen._drawpoly(titem, ((0, 0), (0, 0), (0, 0)), \"\", \"\")\n elif ttype == \"image\":\n screen._drawimage(titem, self._position,\n screen._shapes[\"blank\"]._data)\n elif ttype == \"compound\":\n for item in titem:\n screen._drawpoly(item, ((0, 0), (0, 0), (0, 0)), \"\", \"\")\n self._hidden_from_screen = True", "def clear(self):\n if self.flag == 0:\n for coord in INDICES:\n self.kill(coord)\n self.chart[coord] = DEAD", "def clear(self):\n for key in self.__columns:\n self.__widths[key] = 0\n self.__data = []\n self.__selectedRow = -1\n self.__formatString = \"\"\n self._window.clear()\n self.drawBorder()", "def clear():\n\tglobal _s\n\t_s.screen.fill(_s.back)\n\t_s.tab(0,0)\n\t_flip()", "def down():\n turtleTmp.pendown()", "def clear(self):\n self._plots[:] = []", "def erase(self):\n self.view.erase_status('00_git_gutter')", "def updateDraw(self):\r\n self.delConns()\r\n self.delTags()\r\n self.drawConns()\r\n self.drawTags()", "def done(self):\n turtle.done()", "def destroy_view(self): \n\n self.canvas.destroy()\n self.scrollbar.destroy()\n self.header_frame.destroy()\n self.button_frame.destroy()\n self.twitter_canvas.destroy()\n self.twitter_scrollbar.destroy()", "def deleteSelected(self):\n self.p.dat.flowsheet.deleteEdges(self.selectedEdges)\n self.selectedEdges = []\n self.p.dat.flowsheet.deleteNodes(self.selectedNodes)\n self.selectedNodes = []\n self.p.noneSelectedEmit()\n self.p.createScene()", "def clean_all(self):\n self.scene.clear()\n self.image.fill(Qt.color0)", "def clear(self):\n self.recorders = set([])\n self.reset()\n\n # Stop any currently running SpiNNaker application\n self.stop()", "def clear(screen):\n screen.clear()\n screen.refresh()", "def clear(self, event):\r\n self.selectedRegion = None\r\n self.paint()" ]
[ "0.72609514", "0.72609514", "0.7131574", "0.7130945", "0.7106821", "0.70200294", "0.6937271", "0.69109285", "0.6570212", "0.6568165", "0.6520926", "0.6450158", "0.6344266", "0.6308535", "0.62942463", "0.622969", "0.6226668", "0.617589", "0.6108122", "0.60560745", "0.6039392", "0.6014528", "0.59123975", "0.5847923", "0.58170545", "0.580479", "0.5789845", "0.5780638", "0.5762891", "0.57569367", "0.57139283", "0.5704563", "0.56788814", "0.56634957", "0.56615186", "0.56511086", "0.56393665", "0.5632603", "0.5632494", "0.56253684", "0.56225014", "0.56148756", "0.5612269", "0.55986005", "0.5566881", "0.5552818", "0.5550244", "0.554837", "0.5543953", "0.55335903", "0.552555", "0.5519436", "0.55148274", "0.5513392", "0.55059904", "0.54942924", "0.54859346", "0.5484952", "0.54848117", "0.5479329", "0.54779387", "0.5477723", "0.54744995", "0.54594237", "0.5455355", "0.54537183", "0.54518574", "0.5447458", "0.5441829", "0.5432883", "0.54225427", "0.54205424", "0.54162365", "0.5404571", "0.53901464", "0.53871036", "0.5378467", "0.5374817", "0.53739244", "0.537034", "0.53696555", "0.5368572", "0.536724", "0.5364059", "0.53640443", "0.5363675", "0.536253", "0.5361015", "0.53587544", "0.53582907", "0.5356803", "0.5349951", "0.5348381", "0.5342007", "0.53419846", "0.533711", "0.5333367", "0.5323365", "0.5315044", "0.53075916" ]
0.637487
12
Unsupported method for compatibility
def flush(self): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _platform_compatible():\r\n raise NotImplementedError", "def __call__(self):\n raise NotImplementedError()", "def __call__(self):\n raise NotImplementedError", "def __upgrade(self):", "def support(self):", "def __call__(self):\r\n raise NotImplementedError('override me')", "def curvetype(self):\n\t\traise Exception(NotImplemented)", "def __call__(self) -> None:", "def __call__(self, *args, **kwargs):\r\n raise NotImplementedError", "def __call__(self, *args, **kwargs):\r\n raise NotImplementedError", "def __call__(self, *args, **kwargs):\r\n raise NotImplementedError", "def __call__(self, *args, **kwargs):\n msg = '{0} is not usable in pure-python'.format(self)\n raise NotImplementedError(msg)", "def to_legacy(self) -> object:\n pass", "def _patch_implementation(self, original, *args, **kwargs):\n pass", "def __call__(self, *args, **kwargs):\n raise NotImplementedError", "def __call__(self, *args, **kwargs):\n raise NotImplementedError", "def __call__(self, f):\n raise NotImplementedError()", "def __call__(self, *args, **kwargs) -> None:\n raise NotImplementedError()", "def _raise_not_supported(self):\n raise CpoNotSupportedException(\"Method '{}' is not available in solver agent '{}' ({}).\"\n .format(inspect.stack()[1][3], self.context.agent, type(self)))", "def _not_implemented(self, *args, **kwargs):\n raise NotImplementedError", "def checkCompatibility(self, *args):\n return _libsbml.SBase_checkCompatibility(self, *args)", "def test(self):\n raise NotImplementedError", "def __call__(self):\n pass", "def __call__(self):\n pass", "def __call__(self, *args, **kwargs): # real signature unknown\n pass", "def is_compatible(self, function, arguments):", "def version(self):\r\n raise NotImplementedError()", "def __call__():", "def __call__():", "def __call__():", "def __call__():", "def __call__():", "def __call__(self, **kwargs):\n raise NotImplementedError", "def __call__(self, a, b):\n # STUDENT CODE HERE\n raise NotImplementedError", "def __call__(self, a, b):\n # STUDENT CODE HERE\n raise NotImplementedError", "def __call__(self, a, b):\n # STUDENT CODE HERE\n raise NotImplementedError", "def extension (self):\n assert False, \"To be implemented by child\"", "def bad(self):\n raise NotImplementedError", "def bad(self):\n raise NotImplementedError", "def _GetOpener(self):\n raise NotImplementedError()", "def check_supported_features(self):", "def interface(self):\n raise exceptions.NotImplementedError()", "def func(*args, **kwargs): # pragma: no cover\n raise NotImplementedError(\"{name} not ported from upstream\"\n .format(name=name))", "def override(self):\n return None", "def function(self):\n raise NotImplementedError", "def __call__(self):\n\t\treturn", "def check(self):\n raise NotImplementedError", "def _create_impl(self):", "def _GetOpener(self):\r\n raise NotImplementedError()", "def check_stability(self):", "def __call__(self):\r\n raise self", "def __call__(self):\r\n raise self", "def warning(self, *args, **kwargs): # real signature unknown\n pass", "def fallback(self):\n pass", "def fallback(self):\n pass", "def method(self):\n return None", "def __call__( self ):\n pass", "def __call__(object):", "def _different_curvatures_not_supported(self):\n if self._extension_cls_directions != self._extension_cls_second:\n raise NotImplementedError(\n \"Different extensions for (directions, second) not supported.\"\n )", "def f_get(self):\n raise NotImplementedError(\"Should have implemented this.\")", "def base(self):\n raise NotImplementedError()", "def patch_sdk():", "def patch_sdk():", "def patch_sdk():", "def __nonzero__ ( self ) :\n raise AbstractMethodException( self , \"__nonzero__\" )", "def __call__(self, req):\n raise NotImplementedError(\"%s.__call__()\" % self.__class__.__name__)", "def __call__(self, req):\n raise NotImplementedError(\"%s.__call__()\" % self.__class__.__name__)", "def __int__(self):\n pass", "def get(self):\n raise NotImplementedError", "def available(self):\n\t\traise NotImplementedError", "def test_4_4_1_1(self):\n pass", "def downgrade():\n raise NotImplementedError(\"Downgrade is not supported\")", "def downgrade():\n raise NotImplementedError(\"Downgrade is not supported\")", "def downgrade():\n raise NotImplementedError(\"Downgrade is not supported\")", "def protocol(self):\n raise UnsupportedCall(f\"'{self.__class__.__name__}' object has no attribute 'protocol'\")", "def lint(self):\n raise NotImplementedError()", "def unsupported(self) -> Union[object, Sequence]:\n return self._unsupported", "def __init__(self):\n raise NotImplementedError()", "def _get_version(self):", "def offering(self):\r\n raise NotImplementedError()", "def retinanet(self, *args, **kwargs):\n raise NotImplementedError('retinanet method not implemented.')", "def available(self):\n raise ClixxException(\"Not implemented.\")", "def function(self, *args):\n raise NotImplemented", "def dl():\n raise NotImplementedError()", "def version(self):\n raise NotImplementedError", "def version(self):\n raise NotImplementedError", "def version(self):\n raise NotImplementedError", "def version(self):\n raise NotImplementedError", "def upgrade(self):", "def upgrade(self):", "def onJT808Operation(self):\n pass", "def default(self):\n raise NotImplementedError", "def _to_be_wrapped(self) -> None:", "def method(self):", "def test_method_not_supported(self):\n result = self.app.get('/api/v1.0/documents/convert')\n # assert the status code of the response 405 (method not allowed)\n self.assertEqual(result.status_code, 405)", "def __tr_getattr__(self, name):\n raise AttributeError(name)", "def __tr_getattr__(self, name):\n raise AttributeError(name)", "def method_get_version(self) -> str: # pragma: nocover\n raise NotImplementedError", "def __call__(self):", "def __call__(self):", "def original(self) -> Any:\n raise NotImplementedError" ]
[ "0.7279129", "0.68142307", "0.6761746", "0.6669215", "0.661067", "0.6566959", "0.6410402", "0.62955695", "0.6209175", "0.6209175", "0.6209175", "0.6150251", "0.6138908", "0.6107612", "0.6102898", "0.6102898", "0.60230196", "0.60135454", "0.59595454", "0.59073716", "0.59036094", "0.588363", "0.58825576", "0.58825576", "0.58590126", "0.58429396", "0.582205", "0.5804422", "0.5804422", "0.5804422", "0.5804422", "0.5804422", "0.5799619", "0.57945853", "0.57945853", "0.57945853", "0.57794803", "0.57730615", "0.57730615", "0.5769253", "0.57617915", "0.5761162", "0.57336277", "0.56964874", "0.56950104", "0.5678023", "0.5673142", "0.56418127", "0.56072664", "0.5604688", "0.5604218", "0.5604218", "0.5600337", "0.55896837", "0.55896837", "0.5588693", "0.55734813", "0.5561598", "0.55558157", "0.5546809", "0.55339056", "0.55225104", "0.55225104", "0.55225104", "0.55130005", "0.5511499", "0.5511499", "0.5511015", "0.5509951", "0.5495512", "0.5488144", "0.5480076", "0.5480076", "0.5480076", "0.5479503", "0.5475619", "0.54702616", "0.5457303", "0.5456054", "0.5447579", "0.5441189", "0.5438457", "0.54357326", "0.54073244", "0.5403553", "0.5403553", "0.5403553", "0.5403553", "0.5397426", "0.5397426", "0.53834", "0.538178", "0.5377456", "0.5369526", "0.53668934", "0.5359533", "0.5359533", "0.5356159", "0.5351877", "0.5351877", "0.5348911" ]
0.0
-1
The animation speed of this pen. The speed is an integer from 0 to 10. Speed = 0 means that no animation takes place. The drawLine and drawCircle methods happen instantly with no animation. Speeds from 1 to 10 enforce increasingly faster animation of line drawing. 1 is the slowest speed while 10 is the fastest (noninstantaneous) speed.
def speed(self): return self._turtle.speed()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def speed(self, speed=None):\n speeds = {'fastest':0, 'fast':10, 'normal':6, 'slow':3, 'slowest':1 }\n if speed is None:\n return self._speed\n if speed in speeds:\n speed = speeds[speed]\n elif 0.5 < speed < 10.5:\n speed = int(round(speed))\n else:\n speed = 0\n self.pen(speed=speed)", "def set_animation_speed(self, speed):\n self.m_animation_speed = self.calculate_animation_speed(speed)", "def calculate_animation_speed(self, speed):\n speed = float(speed)\n\n self.m_scl_pause = True if speed == 0 else False\n\n calc_speed = int(-1715 * pow(abs(speed), 3) + 4121 * pow(abs(speed), 2) - 3735 * abs(speed) + 1332)\n\n return calc_speed if speed >= 0 else -calc_speed", "def set_speed(self, speed):\n self._set_sub_text('speed', text=str(speed))\n return self", "def speed(self, speed: int, time: int = 0, /) -> None:", "def speed(self, speed):\n self._speed = speed\n self._rotspeed = speed", "def set_speed(self, speed):\n self.speed = speed", "def set_speed(self, speed=0):\n speed = clamp(speed)\n self._state.speed = speed\n self.send_command(Command.SET_SPEED, [int(speed)])", "def speed(self, value: float):\n self._speed = value", "def movespeed(self, speed):\n self._speed = speed", "def set_speed(self, speed):\n self._kernel.set_speed(float(speed))", "def set_speed(self, speed):\n assert isinstance(speed, float), \"Must be a float\"\n \n if speed < 0.0:\n raise ValueError(\"Negative speeds not supported\")\n \n self.speed = speed", "def set_speed(self,speed):\n self.speed = speed", "def speed(self, s=0):", "def speed(self) -> int:\n return self._speed", "def speed(self) -> int:\n return self._speed", "def speed(self):\n return self._speed.value", "def set_speed(self,speed):\n self.speed_p = speed", "def speed(self) -> float:\n return self._speed", "def speed(self) -> float:\n return self._speed", "def speed(self) -> str:\n return self._current_speed", "def set_speed(self, speed: str) -> None:\n self.wink.set_state(True, speed)", "def speed(self) -> int:", "def speed(self) -> int:", "def increment_speed(self):\n self.speed += 0.0004", "def set_speed(self, axis, speed):\n #log.info(f\"set speed {axis} {speed}\")\n self.cmd_axis_speed[axis] = speed", "def set_speed(self, speed):\r\n speed = float(speed)\r\n speed = int(round(speed * 27.7778))\r\n return self.send_command('speed %s' % speed)", "def speed(self):\n return 1 # speed system not implemented yet", "def set_speed(self, speed):\n self.device.set_speed(speed)\n return \"OK\"", "def speed(self) -> str:\n return self._attributes.get(\"current_speed\")", "def set_speed(self, speed):\n return self.bot_client.send_command(_Command.SetSpeed, speed)", "def speed(n):\n turtleTmp.speed(max(1, min(n, 10)))", "def speed(self, value: int, /) -> None:", "def get_speed(self):\n return self._speed", "def set_motor_speed(self, speed=0.0):\r\n self.target_speed = speed", "def set_speed(self, speed: str) -> None:\n if speed == SPEED_HIGH:\n self._bond.setSpeed(self._deviceId, self._speed_high)\n elif speed == SPEED_MEDIUM:\n self._bond.setSpeed(self._deviceId, self._speed_medium)\n elif speed == SPEED_LOW:\n self._bond.setSpeed(self._deviceId, self._speed_low)\n self._attributes['current_speed'] = speed", "def clock_speed(self, clock_speed):\n\n self._clock_speed = clock_speed", "def move_set_speed(self, speed):\n # self.motor_set_speed(MOTOR_LEFT, speed)\n # self.motor_set_speed(MOTOR_RIGHT, speed)\n self.move_speed = speed\n print(\"move_speed is now:\", self.move_speed)", "def speed(self):\n return sqrt(self.velocity_x ** 2 + self.velocity_y ** 2)", "def set_cmd_velocity(self, speed):\n self.gripper_io.set_signal_value(\"speed_mps\", speed)", "def change_motor_speed(self, speed=0.0):\r\n if not self.enabled:\r\n self.set_neutral(braked=False)\r\n return\r\n\r\n # logging.info(\"{} Motor Speed: {}\".format(self.motor_name, speed))\r\n self.current_speed = speed # Store current set speed\r\n\r\n # If speed is < 0.0, we are driving in reverse.\r\n self.forward = True\r\n if speed < 0.0:\r\n # Normalise speed value to be in range [0, 100]\r\n speed = -speed\r\n # Store direction\r\n self.forward = False\r\n\r\n # Apply a factor to the speed to limit speed\r\n speed *= self.speed_factor\r\n\r\n # Set motor directional pins\r\n if self.forward:\r\n if self.a_pin >= 0:\r\n self.GPIO.output(self.a_pin, 1)\r\n if self.b_pin >= 0:\r\n self.GPIO.output(self.b_pin, 0)\r\n else:\r\n if self.a_pin >= 0:\r\n self.GPIO.output(self.a_pin, 0)\r\n if self.b_pin >= 0:\r\n self.GPIO.output(self.b_pin, 1)\r\n\r\n # Convert speed into PWM duty cycle\r\n # and clamp values to min/max ranges.\r\n dutycycle = speed\r\n if dutycycle < 0.0:\r\n dutycycle = 0.0\r\n elif dutycycle > self.max_speed:\r\n dutycycle = self.max_speed\r\n\r\n # Change the PWM duty cycle based on fabs() of speed value.\r\n self.PWM.ChangeDutyCycle(dutycycle)", "def set_speed(self, ratio):\n self._speed = ratio", "def speed(self):\n self.convert_window(\"Speed\", \"meters/second\", [\"Mach number\", \"Nm/24hr\", \"centimeters/minute\", \"centimeters/second\", \"feet/hour\", \"feet/minute\", \"feet/second\", \"inches/minute\", \"inches/second\", \"kilometers/hour\", \"kilometers/second\", \"knots\", \"meters/hour\", \"meters/minute\", \"meters/second\", \"miles/hour\", \"miles/minute\", \"miles/second\", \"nautical miles/hour\", \"speed of light\", \"speed of sound\", \"yards/hour\", \"yards/minute\", \"yards/second\"])", "def get_speed(self):\n raise NotImplementedError", "def get_speed(self):\n raise NotImplementedError", "def set_joystick_speed(self, speed):\n if self.table_ready:\n command = self.build_command(self.device, (\"set_joy_speed\", str(speed)))\n self.vcw.write(self.device, command)", "def increase_speed(self, character):\n character.speed = min(character.max_steps/4, character.speed * 1.25)", "def spit(self, speed=Constants.SPIT_SPEED):\n self.setPercentOutput(speed, -speed)", "def set_speed(self, new_speed):\n self.__x_speed, self.__y_speed = new_speed", "def increase_speed(self):\n self.target_speed *= self.speedup_scale\n self.bullet_speed_factor *= self.speedup_scale", "def _nextSpeed(self, position, speed, action):\n next_speed = speed + self.integration_step * self._speedDiff(position, speed, action)\n\n # Check if you reach a terminal state\n if abs(next_speed) > 3:\n self.stuck = True\n return next_speed", "def GetSpeed(self):\n pass", "def increase_speed(self):\n self.ship_speed_factor *= self.speed_up_scale\n self.bullet_speed_factor *= self.speed_up_scale\n self.alien_speed_factor *= self.speed_up_scale", "def get_speed(self):\n raise NotImplementedError()", "def walk(self):\n self.speed = self.speed + (0.2 * self.legs)", "def set_speed(self, speed):\n # create the MAV_CMD_DO_CHANGE_SPEED command\n msg = self.message_factory.command_long_encode(0, 0,mavutil.mavlink.MAV_CMD_DO_CHANGE_SPEED,0,0,speed,0, 0, 0, 0, 0)\n\n # send command to vehicle\n self.send_mavlink(msg)\n self.flush()", "def get_speed(self):\r\n return self.__x_speed, self.__y_speed", "def set_blinkSeqSpeed(self, seqIndex, speed):\n return self.sendCommand(\"CS\" + str(int(seqIndex)) + \",\" + str(int(speed)))", "def on_speed_change(self, event) -> None:\r\n\r\n speed_level = int(self.speed_scale.get())\r\n self.animator.time_per_gen = self.TIMES_PER_GEN[speed_level]", "async def speed(self, value=None):\n return self.extract(await self._rpc.speed()) if value is None else (await self._rpc.speed(self.extend(value)))", "def speed(self):\n return self._dll.JLINKARM_GetSpeed()", "def set_speed (self, dx = None, dy = None) :\n if dx != None :\n self.speed[0] = dx\n if dy != None :\n self.speed[1] = dy", "def set_move_speed(cls, quad):\n\n\t\tspeed = cls.get_address_value(quad.result)\n\t\treturn speed/1000.0", "def clock_speed(self):\n return self._clock_speed", "def setVoiceSpeed(self, speed):\n\n try:\n assert speed >= 50 and speed <= 400\n\n except AssertionError:\n self.logger.warning(\"incorrect voice speed, resesting to the default speed\")\n speed = 100\n\n self.tts.setParameter(\"speed\", speed)", "def acceleration(self):\n # speed is by formula: x axis speed: by cos of the heading and y\n # axis by sine of the heading\n self.x_speed += math.cos(math.radians(self.degrees))\n self.y_speed += math.sin(math.radians(self.degrees))", "def accelerate(self):\n x_speed = self.__calc_speed(Ship._X)\n y_speed = self.__calc_speed(Ship._Y)\n self._speed_vect = (x_speed, y_speed)", "def move_forward(self, speed):\n\n # Clamp the speed\n speed = clamp(delta_unit(speed), 0, delta_unit(Car.max_speed))\n\n # Appends the speed according to the direction\n rad = np.radians(self.direction)\n self.fx += speed * np.cos(rad)\n self.fy += speed * np.sin(rad)\n\n # Set marker to move\n self.moved = True", "def set_speed(self, v):\n self.v = v", "def set_speed():\n pass", "def step(self):\n if self.change_rate != 0:\n self.speed += stats.norm(loc=0, scale=self.change_rate).rvs()\n\n if self.speed < 0.5 * self._initial_speed:\n self.speed = 0.5 * self._initial_speed\n if self.speed > 2.0 * self._initial_speed:\n self.speed = 2.0 * self._initial_speed\n else:\n pass", "def get_custom_speed(self):\n return self._custom_speed", "def calculate_velocity(self, speed):\n self.velocity.dx += math.cos(math.radians(self.angle)) * speed\n self.velocity.dy += math.sin(math.radians(self.angle)) * speed", "def set_speed(self, speed, motor):\n self.driver.set_speed(speed, motor)\n self.last_control = time.time()", "def __calc_speed(self, axis):\n old_speed = self._speed_vect[axis]\n radian = math.radians(self.__heading)\n\n if axis == Ship._X:\n heading_factor = math.cos(radian)\n else:\n # axis == Ship.Y\n heading_factor = math.sin(radian)\n\n return old_speed + heading_factor", "def set_speed(speed):\n if speed >255:\n speed =255\n elif speed <0:\n speed =0\n set_left_speed(speed)\n #time.sleep(.1)\n set_right_speed(speed)", "def _update_speed(self, speed):\n if speed is None:\n return\n if speed == self._current_speed:\n return\n\n self._current_speed = speed\n self._update_speed_attributes()\n LOG.info(\n f\"Updated LUNOS {self._name}: {self.percentage}% {self._current_speed}\"\n )", "def getStartSpeed(self):\n cmd_string = '?1'\n data = self.sendRcv(cmd_string)\n self.state['start_speed'] = int(data)\n return self.state['start_speed']", "def increase_speed(self):\n self.covid_horizontal_speed_factor *= self.speedup_scale\n self.bullet_speed_factor *= self.speedup_scale\n self.hero_speed_factor *= self.speedup_scale", "def set_speed(self, level):\n speed = self.SPEED + (self.SPEED_INCREMENT * level)\n\n if self.lane % 2:\n # Move to the right\n self.velocity = (speed, 0)\n else:\n # Move to the left\n self.velocity = (-speed, 0)", "def increase_speed(self):\n self.ship_speed*=self.speedup_scale\n self.bullet_speed*=self.speedup_scale\n self.alien_speed*=self.speedup_scale\n self.alien_points=int(self.alien_points*self.score_scale)\n print(self.alien_points)", "def __get_speed(self):\n if self.speed_method == 'average_gap':\n total_gap = 0\n for i in range(1, len(self.__spike_buffer)):\n total_gap += self.__spike_buffer[i] - self.__spike_buffer[i-1]\n\n average_gap = total_gap / len(self.__spike_buffer)\n\n\n if self.__spike_buffer[-1] > timeit.default_timer() - self.cooldown:\n speed = self.tick_length/average_gap\n else:\n speed = 0.00\n\n return speed", "def set_linear_track_speed(self, speed):\r\n return self._arm.set_linear_track_speed(speed)", "def increase_speed(self):\n self.ship_speed_factor *= self.speedup_scale\n self.bullet_speed_factor *= self.speedup_scale\n self.alien_speed_factor *= self.speedup_scale\n self.alien_points = int(self.alien_points * self.score_scale)", "def set_speed(self, speed, ports='ABCD'):\n\n speed += self.avg_speed\n if self.inverted:\n speed = -speed\n\n if speed > self.margin:\n speed = self.margin\n elif speed < -self.margin:\n speed = self.margin\n\n for p in ports:\n if self.motors[p].connected:\n self.motors[p].run_forever(speed_sp=speed, speed_regulation=True)\n else:\n print(\"Cant run motor on\", p, \"- not connected\")", "def adjustSpeed(self, speed):\n\t\tif self.timeout <= 0:\n\t\t\tself.speed = max(self.minimumSpeed, min(self.maximumSpeed, self.speed + speed))", "def send_tspeed(self):\n return self.shell.terminal_speed", "def get_normalized_speed(self, speed):\n return round(self.normal_trigger_slope * speed + self.normal_trigger_intercept, 2)", "def get_sound_speed(self):\n return calculate_speed_of_sound(self.T, self.H, self.p)", "def change_speed(self, action):\r\n if action == \"faster\":\r\n self.speed += 1\r\n else:\r\n if self.speed > 1:\r\n self.speed -= 1", "def set_speed(self, SHIP_MOVEMENT):\n self._speed = SHIP_MOVEMENT", "def __init__(self, speed, get_current_time):\r\n self.get_current_time = get_current_time\r\n self.speed = speed", "def movement_speed(self) -> Union[int, float]:\n return self.type_data.proto.movement_speed", "def movement_speed(self) -> Union[int, float]:\n return self.type_data.proto.movement_speed", "def increase_speed(self):\n self.state['speed_boost'] = True\n self.speed = self.maze.block_size / 8", "def step(self, speed):\n\n obstacle_speed_double = ctypes.c_double(speed[0])\n agent_x_speed_double = ctypes.c_double(speed[1])\n agent_y_speed_double = ctypes.c_double(speed[2])\n\n self.wrapper.step(self.instance, obstacle_speed_double, agent_x_speed_double, agent_y_speed_double)", "def speed(self) -> str:\n current_wink_speed = self.wink.current_fan_speed()\n if SPEED_AUTO == current_wink_speed:\n return SPEED_AUTO\n if SPEED_LOWEST == current_wink_speed:\n return SPEED_LOWEST\n if SPEED_LOW == current_wink_speed:\n return SPEED_LOW\n if SPEED_MEDIUM == current_wink_speed:\n return SPEED_MEDIUM\n if SPEED_HIGH == current_wink_speed:\n return SPEED_HIGH\n return None", "def target_speed(self):\n return self._target_speed.value", "def set_flywheel_speeds(self, speed):\n\n # Set the flywheel speeds\n self.fmt.set_speed(speed)\n self.fmb.set_speed(speed)" ]
[ "0.7604746", "0.7200397", "0.6807912", "0.6662664", "0.65087324", "0.64902276", "0.6479542", "0.64367783", "0.64187455", "0.63251877", "0.62875676", "0.62830615", "0.6264852", "0.62495047", "0.623983", "0.623983", "0.62264585", "0.61983454", "0.6166356", "0.6166356", "0.6133991", "0.61261547", "0.6099801", "0.6099801", "0.60411406", "0.60297203", "0.59916925", "0.59904116", "0.59845155", "0.597637", "0.5961799", "0.5910599", "0.5903408", "0.5887077", "0.58503485", "0.57829016", "0.5767845", "0.57567835", "0.57424635", "0.57194376", "0.5707359", "0.5703764", "0.57017183", "0.569785", "0.569785", "0.56775475", "0.5672571", "0.5672006", "0.56665105", "0.5662598", "0.56395954", "0.5630694", "0.56185263", "0.5611433", "0.56081086", "0.5606855", "0.55984294", "0.5597481", "0.55702084", "0.55656546", "0.55563277", "0.5555237", "0.553121", "0.5527349", "0.5514116", "0.5503431", "0.5494459", "0.5484638", "0.54805565", "0.54734206", "0.54556894", "0.54525405", "0.54350716", "0.5431105", "0.54280865", "0.5424925", "0.5423479", "0.5412259", "0.5385384", "0.53784686", "0.53740937", "0.5373236", "0.53672874", "0.53585446", "0.5345472", "0.5334738", "0.53082526", "0.52969563", "0.52954113", "0.5293546", "0.5274212", "0.52649266", "0.526479", "0.526479", "0.52565163", "0.5253555", "0.5243998", "0.52416515", "0.52403724" ]
0.67962736
3
The fill status of this pen. If the fill status is True, then the pen will fill the insides of any polygon or circle subsequently traced by its drawLine or drawCircle method. If the attribute changes, it only affects future draw commands, not past ones. Switching this attribute between True and False allows the pen to draw both solid and hollow shapes.
def fill(self): return self._turtle.fill()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def setFilled(self, fill):\n isFilled = fill\n repaint()", "def GetFillAlpha(self):\n return self._attalpha[\"fill\"]", "def fillcolor(self):\n return self._fillcolor", "def setFill(self, fill):\n self.area_show = fill", "def fill(self):\n return self[\"fill\"]", "def fill(self):\n return self[\"fill\"]", "def color(self):\n assert False, 'Pen does not have a color; use pencolor or fillcolor'", "def isFilled(self):\n return self.isFilled", "def fill(self) -> int:\n return self._fill_color", "def filled(\n self,\n filled: FillReturn,\n fill_type: FillType,\n ax: figure | int = 0,\n color: str = \"C0\",\n alpha: float = 0.7,\n ) -> None:\n fig = self._get_figure(ax)\n color = self._convert_color(color)\n xs, ys = filled_to_bokeh(filled, fill_type)\n if len(xs) > 0:\n fig.multi_polygons(xs=[xs], ys=[ys], color=color, fill_alpha=alpha, line_width=0)", "def write_fill(self, fill: FillFormat):\n if self.fill_type is not None:\n self._write_fill_type(fill)", "def update_fill(self, event):\n if event.type == 'FILL':\n self.update_positions_from_fill(event)\n self.update_holdings_from_fill(event)", "def update_fill(self, event):\n if event.type == 'FILL':\n self.update_positions_from_fill(event)\n self.update_holdings_from_fill(event)", "def update_fill(self, event):\r\n\r\n if event.type == 'FILL':\r\n self.update_positions_from_fill(event)\r\n self.update_holdings_from_fill(event)", "def fillcolor(self, *args):\n if args:\n color = self._colorstr(args)\n if color == self._fillcolor:\n return\n self.pen(fillcolor=color)\n else:\n return self._color(self._fillcolor)", "def is_filled(self):\n return(self.order_master.amount==self.order_master.filled)", "def fill_color(self) -> String:\r\n from apysc.type import value_util\r\n self._initialize_fill_color_if_not_initialized()\r\n fill_color: String = value_util.get_copy(value=self._fill_color)\r\n return fill_color", "def getFillColor(self):\n return getColor() if (fillColor == None) else fillColor", "def update_fill(self, event):\n if event.type == 'FILL':\n self.update_positions_from_fill(event)\n self.update_prices_from_fill(event)\n self.update_holdings_from_fill(event)", "def fill(self, value):\n self.fill_color = value", "def filled(self):\n return(self.order_master.filled)", "def fill_color(self, fill_color=None):\n\n if fill_color is None:\n return self._fill_color\n else:\n self._fill_color = process_color(fill_color)", "def fill(self, color):", "def getFill(self):\n return self.area_show", "def setPointFill(self, fill):\n for point in self.points:\n point.fill = fill", "def set_green(self):\n self.fill= Cell.FILLED_COLOR_BG\n self.draw()", "def SetFillAlpha(self, alpha):\n self._attalpha[\"fill\"] = alpha\n self.SetFillColorAlpha(self.GetFillColor(), alpha)", "def show(self):\n stroke(*self.status.value)\n fill(*self.status.value)\n circle((self.position.x, self.position.y), radius = 7)", "def getPointFill(self):\n l = [point.fill for point in self.points]\n if l.count(l[0]) == len(l):\n return l[0]\n else:\n raise ValueError(\"The fill attributes of the points must be the same otherwise it makes no sense.\")", "def _switch(self):\n self.fill= not self.fill", "def flush(self):\n if self.fill:\n self._turtle.fill(False)\n self._turtle.fill(True)", "def filled(self, fill_value):\n sdata = self.data\n new_data = numpy.ma.filled(sdata, fill_value=fill_value)\n if new_data == sdata:\n return self\n else:\n return type(self)(new_data, self.bset)", "def picture_fill_format(self, picture_fill_format):\n self._picture_fill_format = picture_fill_format", "def draw (self, screen):\n drew = bool(self.draw_fn(self, screen, self.dirty))\n self.dirty = False\n return drew", "def setFill(self, color):\n self._reconfig(\"fill\", color)", "def filling(self):\n return isinstance(self._fillpath, list)", "def fill_draw(self):\n self.draw = [x + str(y) for x in COLOR for y in CARD_VALUE]", "def drawmode(self):\n return self._turtle.isdown()", "def setFillColor(self, color):\n fillColor = color\n repaint()", "def fill(self, *args, **kwargs):\n closed = kwargs.pop('closed', True)\n return super(RadarAxes, self).fill(closed=closed, *args, **kwargs)", "def fill(self, *args, **kwargs):\n closed = kwargs.pop('closed', True)\n return super(RadarAxes, self).fill(closed=closed, *args, **kwargs)", "def fill(self):\r\n return self._structure.fill", "def position_fill(self, position_fill):\n allowed_values = [\"OPEN_ONLY\", \"REDUCE_FIRST\", \"REDUCE_ONLY\", \"DEFAULT\"] # noqa: E501\n if position_fill not in allowed_values:\n raise ValueError(\n \"Invalid value for `position_fill` ({0}), must be one of {1}\" # noqa: E501\n .format(position_fill, allowed_values)\n )\n\n self._position_fill = position_fill", "def position_fill(self, position_fill):\n allowed_values = [\"OPEN_ONLY\", \"REDUCE_FIRST\", \"REDUCE_ONLY\", \"DEFAULT\"] # noqa: E501\n if position_fill not in allowed_values:\n raise ValueError(\n \"Invalid value for `position_fill` ({0}), must be one of {1}\" # noqa: E501\n .format(position_fill, allowed_values)\n )\n\n self._position_fill = position_fill", "def sparkline_fill_color(self, sparkline_fill_color):\n\n self._sparkline_fill_color = sparkline_fill_color", "def _stroke_mode(self):\r\n if not self.color:\r\n self._color_change_mode()\r\n self.input_scene.get_stk_color(self.color)\r\n self._mode_select(2)", "def stop_loss_on_fill(self, stop_loss_on_fill):\n\n self._stop_loss_on_fill = stop_loss_on_fill", "def stop_loss_on_fill(self, stop_loss_on_fill):\n\n self._stop_loss_on_fill = stop_loss_on_fill", "def fill(self, *args, **kwargs):\r\n closed = kwargs.pop('closed', True)\r\n return super(RadarAxes, self).fill(closed=closed, *args, **kwargs)", "def fill_px(self, fill_px):\n\n self._fill_px = fill_px", "def set(self, fill_type: FillType = _DO_NOT_CHANGE,\n fore_color_rgb: Union[RGBColor, Tuple[any, any, any], None] = _DO_NOT_CHANGE,\n fore_color_mso_theme: Optional[EnumValue] = _DO_NOT_CHANGE,\n fore_color_brightness: Optional[float] = _DO_NOT_CHANGE,\n back_color_rgb: Union[RGBColor, Tuple[any, any, any], None] = _DO_NOT_CHANGE,\n back_color_mso_theme: Optional[EnumValue] = _DO_NOT_CHANGE,\n back_color_brightness: Optional[float] = _DO_NOT_CHANGE,\n pattern: Optional[MSO_PATTERN_TYPE] = _DO_NOT_CHANGE\n ):\n if fill_type is not _DO_NOT_CHANGE:\n self.fill_type = fill_type\n\n if fore_color_rgb is not _DO_NOT_CHANGE:\n self.fore_color_rgb = fore_color_rgb\n if fore_color_mso_theme is not _DO_NOT_CHANGE:\n self.fore_color_mso_theme = fore_color_mso_theme\n if fore_color_brightness is not _DO_NOT_CHANGE:\n self.fore_color_brightness = fore_color_brightness\n\n if back_color_rgb is not _DO_NOT_CHANGE:\n self.back_color_rgb = back_color_rgb\n if back_color_mso_theme is not _DO_NOT_CHANGE:\n self.back_color_mso_theme = back_color_mso_theme\n if back_color_brightness is not _DO_NOT_CHANGE:\n self.back_color_brightness = back_color_brightness\n\n if pattern is not _DO_NOT_CHANGE:\n self.pattern = pattern", "def add_fill(self, shape, value, name=None):\n return self._build_op('Fill', [shape, value], name=name)", "def shade(self) -> bool:\n return bool(self.GetShade())", "def remove_fill(settings):\r\n if settings.fillstyle == 'fill':\r\n settings.fillstyle = 'none'\r\n\r\n elif settings.fillstyle == 'fill+border':\r\n settings.fillstyle = 'border'\r\n return", "def fill():\n # Switch in edit mode\n bpy.ops.object.mode_set(mode = 'EDIT')\n \n # Fill hole\n bpy.ops.mesh.fill()", "def fill(self, colour: int, /) -> None:", "def draw(self):\n if self.master != None :\n fill = Cell.FILLED_COLOR_BG\n outline = Cell.FILLED_COLOR_BORDER\n\n if not self.fill:\n fill = Cell.EMPTY_COLOR_BG\n outline = Cell.EMPTY_COLOR_BORDER\n walls[self.ord][self.abs] = 0\n else:\n walls[self.ord][self.abs] = 1\n\n\n xmin = self.abs * self.size\n xmax = xmin + self.size\n ymin = self.ord * self.size\n ymax = ymin + self.size\n self.master.create_rectangle(xmin, ymin, xmax, ymax, fill = fill, outline = outline)", "def SetLevelOfFill(self, lev_fill):\n return _hypre.HypreILU_SetLevelOfFill(self, lev_fill)", "def _initialize_fill_color_if_not_initialized(self) -> None:\r\n if hasattr(self, '_fill_color'):\r\n return\r\n self._fill_color = String('')", "def draw(self, surface):\n checked_color = (0, 196, 0) if self.checked else pg.Color(\"white\")\n surface.fill(pg.Color(\"black\"), self.rect)\n surface.fill(self.color, self.rect.inflate(-2,-2))\n surface.fill(pg.Color(\"white\"), self.rect.inflate(-6,-6))\n surface.fill((205,205,205), self.rect.inflate(-8,-8))\n surface.fill(checked_color, self.select_rect)", "def fill_style(stroke_str, fill_str, state):\r\n settings = faint.Settings()\r\n if stroke_str == \"none\" and fill_str != \"none\":\r\n settings.fillstyle = 'f'\r\n settings.fg = parse_color(fill_str, \"1.0\", state)\r\n elif stroke_str != \"none\" and fill_str == \"none\":\r\n settings.fillstyle = 'b'\r\n settings.fg = parse_color(stroke_str, \"1.0\", state)\r\n else:\r\n settings.fillstyle = 'bf'\r\n settings.fg = parse_color(stroke_str, \"1.0\", state)\r\n settings.bg = parse_color(fill_str, \"1.0\", state)\r\n return settings", "def draw(self):\n arcade.draw_circle_filled(self.center.x, self.center.y, BALL_RADIUS, BALL_COLOR)\n return", "def clear_red(self):\r\n self._red = False\r\n self.empty = True\r\n return self.red", "def paint(self, g):\n r = getAWTBounds()\n if isFilled():\n g.setColor(getFillColor())\n g.fillRect(r.x, r.y, r.width, r.height)\n g.setColor(getColor())\n g.drawRect(r.x, r.y, r.width, r.height)", "def setDrawingMode(self):\n pass", "def FloodFill(*args, **kwargs):\n return _gdi_.DC_FloodFill(*args, **kwargs)", "def Status(self, *args):\n return _ShapeUpgrade.ShapeUpgrade_WireDivide_Status(self, *args)", "def set_fill_color(self, color: tuple) -> Rectangle:\n self.fill.color = color\n return self", "def fill(self, color):\n self.format.fill(self, color)", "def fill(self, rgb, alpha=100):\n self.call('fill', rgb, alpha)", "def init_fill_color_checkbox(self):\n self.vars[\"fill_color\"] = BooleanVar(self.frame)\n self.buttons[\"chkbtn_color\"] = Checkbutton(\n self.frame, text='fill color',\n var=self.vars[\"fill_color\"])\n self.buttons[\"chkbtn_color\"].grid(row=6, column=1)", "def GetDrawOption(self):\n return self._drawoption", "def FloodFill(*args, **kwargs):\n return _gdi_.PseudoDC_FloodFill(*args, **kwargs)", "def IsSet(self, *args):\n return _XCAFDoc.XCAFDoc_ColorTool_IsSet(self, *args)", "def isdrawn(self):\n return hasattr(self, 'drawn')", "def draw(self):\n if self.master != None :\n fill = self.fill\n #fill = Cell.FILLED_COLOR_BG\n outline = Cell.EMPTY_COLOR_BORDER\n\n #if not self.fill:\n # fill = Cell.EMPTY_COLOR_BG\n # outline = Cell.EMPTY_COLOR_BORDER\n\n xmin = self.abs * self.size\n xmax = xmin + self.size\n ymin = self.ord * self.size\n ymax = ymin + self.size\n\n self.master.create_rectangle(xmin, ymin, xmax, ymax, fill = fill, outline = outline)", "def draw(self):\n return self._draw", "def fill(self, x, y, color):\n raise NotImplementedError # Override this function in the Solution classes", "def isdown(self):\n return self._drawing", "def setFlatShaded(self):\n for patch in self._patches:\n patch.setFlatShaded()", "def FillPath(*args, **kwargs):\n return _gdi_.GraphicsContext_FillPath(*args, **kwargs)", "def solid(self):\r\n return not not self.prototype.solid", "def test_fill(self, fig_test, fig_ref):\n ax = fig_test.add_subplot(projection=\"ternary\")\n tn0 = [1, 0, 0]\n tn1 = [0, 1, 0]\n tn2 = [0, 0, 1]\n ax.fill(tn0, tn1, tn2, \"b\")\n\n ax = fig_ref.add_subplot(projection=\"ternary\")\n ax.set_fc(\"b\")", "def clear(self, fill = 0x00):\n self._buffer = [ fill ] * ( self.width * self.height )", "def fill_rect(self, x, y, width, height, color):\n # pylint: disable=too-many-arguments, too-many-boolean-expressions\n self.rect(x, y, width, height, color, fill=True)", "def draw_circle_filled(self, x0, y0, r, color=None):\n self._draw_fast_vline(x0, y0 - r, 2 * r + 1, color)\n self._draw_circle_filled_helper(x0, y0, r, 3, 0, color)", "def draw(self):\n if self.master != None :\n outline = Cell.FILLED_COLOR_BORDER if self.fill else Cell.EMPTY_COLOR_BORDER\n\n xmin = self.abs * self.size\n xmax = xmin + self.size\n ymin = self.ord * self.size\n ymax = ymin + self.size\n\n self.master.create_rectangle(xmin, ymin, xmax, ymax, fill = self.fill, outline = outline)", "def Status(self, *args):\n return _ShapeUpgrade.ShapeUpgrade_SplitCurve_Status(self, *args)", "def is_legend_on(self):\n return self._isLegendOn", "def is_brush(self) -> bool:\n # get the brush mode context and return its value\n with self._is_brush.get_lock():\n return self._is_brush.value", "def is_brush(self, new_value: bool) -> None:\n # get the brush mode context and set its value\n with self._is_brush.get_lock():\n self._is_brush.value = new_value", "def toFillPolygon(self, *__args): # real signature unknown; restored from __doc__ with multiple overloads\r\n return QPolygonF", "def legend_enabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"legend_enabled\")", "def legend_enabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"legend_enabled\")", "def legend_enabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"legend_enabled\")", "def legend_enabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"legend_enabled\")", "def legend_enabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"legend_enabled\")", "def legend_enabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"legend_enabled\")", "def legend_enabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"legend_enabled\")", "def legend_enabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"legend_enabled\")" ]
[ "0.68982977", "0.6480413", "0.64142907", "0.61500674", "0.60670793", "0.60670793", "0.60242504", "0.5965748", "0.593573", "0.59075147", "0.5873632", "0.57370543", "0.57370543", "0.5725248", "0.5699221", "0.553684", "0.55129635", "0.549395", "0.5492293", "0.54508805", "0.5449537", "0.5435158", "0.5419864", "0.5395306", "0.5337515", "0.5331456", "0.5316708", "0.5293443", "0.52748305", "0.5247344", "0.52448124", "0.52281046", "0.5133862", "0.5088102", "0.5084394", "0.5079889", "0.50713575", "0.5044393", "0.50078595", "0.5003742", "0.5003742", "0.500105", "0.50001997", "0.50001997", "0.49992523", "0.49957502", "0.4981293", "0.4981293", "0.49796402", "0.49741217", "0.49552286", "0.49520925", "0.49488288", "0.4946403", "0.49357915", "0.49306387", "0.49262455", "0.49193576", "0.4918578", "0.49115014", "0.49053362", "0.48913506", "0.48786238", "0.48552307", "0.48498026", "0.48496336", "0.48409787", "0.48387992", "0.48353362", "0.4826045", "0.48247927", "0.48241204", "0.48162422", "0.4789493", "0.47871634", "0.47703722", "0.47673827", "0.47634196", "0.4738089", "0.47273594", "0.47272012", "0.4727098", "0.4725825", "0.4722584", "0.47166193", "0.4715375", "0.47079867", "0.47040236", "0.46896484", "0.46780443", "0.46733624", "0.46558225", "0.46532863", "0.46532863", "0.46532863", "0.46532863", "0.46532863", "0.46532863", "0.46532863", "0.46532863" ]
0.6480622
1
Silent, unsupported property requested by a beta tester
def color(self): assert False, 'Pen does not have a color; use pencolor or fillcolor'
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_get_property_no_option():\n\n contents = (\"[Info]\\n\"\n \"vmtype = arm64\")\n\n testutils.deploy_config_raw(contents)\n\n with pytest.raises(prop.PropertyError):\n prop.get_prop('info', 'sdk')\n\n testutils.undeploy()\n\n return 0", "def test_test_property():\n\n contents = (\"[Info]\\n\"\n \"sdk = 23\")\n\n testutils.deploy_config_raw(contents)\n\n assert prop.test_prop('info', 'sdk') == 1\n\n testutils.undeploy()\n\n return 0", "def test_properties_get(self):\n pass", "def test_api_valid_property(self) -> None:\n user = self.example_user(\"hamlet\")\n\n self.login_user(user)\n subs = gather_subscriptions(user)[0]\n result = self.api_patch(\n user,\n \"/api/v1/users/me/subscriptions/{}\".format(subs[0][\"stream_id\"]),\n {\"property\": \"color\", \"value\": \"#c2c2c2\"},\n )\n self.assert_json_success(result)", "def check_property(self, descriptor): # pylint: disable=unused-argument\r\n raise SkipTest(\"check_property not defined\")", "def test_properties_evolution_get(self):\n pass", "def test_dev_props(name, properties):\n assert properties['x']\n assert properties['y']", "def the_user_should_not_be_able_to_change_the_property_of_the_device(property,value):\n print(\"Trying to change property with device disconnected\")\n bln_result1 = web_app.change_property_hardassert(property,value)\n assert (bln_result1, False)", "def testGetUnknownObjectProperties(self):\n programID = self.testResources.invalidProgramID\n self.assertRaises(PyOpenCLInterface.error, PyOpenCLInterface.GetProgramProperties, programID)", "def reportProperties():", "def test_api_invalid_property(self) -> None:\n\n user = self.example_user(\"hamlet\")\n\n self.login_user(user)\n subs = gather_subscriptions(user)[0]\n\n result = self.api_patch(\n user,\n \"/api/v1/users/me/subscriptions/{}\".format(subs[0][\"stream_id\"]),\n {\"property\": \"invalid\", \"value\": \"somevalue\"},\n )\n self.assert_json_error(result, \"Unknown subscription property: invalid\")", "def test_serve_user_properties(self):\n pass", "def test_bad_get_property(self):\n s = State(substance=\"water\", T=Q_(400.0, \"K\"), p=Q_(101325.0, \"Pa\"))\n with pytest.raises(AttributeError):\n s.bad_get", "def test_get_property():\n\n sdk = '23'\n contents = (\"[Info]\\n\"\n \"sdk = %s\" % sdk)\n\n testutils.deploy_config_raw(contents)\n\n assert prop.get_prop('info', 'sdk') == sdk\n\n testutils.undeploy()\n\n return 0", "def test_serve_user_property(self):\n pass", "def test_del_property_invalid():\n\n contents = (\"[Info]\\n\"\n \"vmtype = 64\")\n\n testutils.deploy_config_raw(contents)\n\n assert prop.del_prop('info', 'sdk') != 0\n\n testutils.undeploy()\n\n return 0", "def test_set_invalid_property(self) -> None:\n test_user = self.example_user(\"hamlet\")\n self.login_user(test_user)\n subs = gather_subscriptions(test_user)[0]\n result = self.api_post(\n test_user,\n \"/api/v1/users/me/subscriptions/properties\",\n {\n \"subscription_data\": orjson.dumps(\n [{\"property\": \"bad\", \"value\": \"bad\", \"stream_id\": subs[0][\"stream_id\"]}]\n ).decode()\n },\n )\n self.assert_json_error(result, \"Unknown subscription property: bad\")", "def test_default_product_flammability(self):\n prod = Product('Test Product')\n self.assertEqual(prod.flammability, .5)", "def process_property(self, prop):\n NifLog.warn(f\"Unknown property block found : {prop.name}\")\n NifLog.warn(f\"This type isn't currently supported: {type(prop)}\")", "def test_properties_distribution_get(self):\n pass", "def test_default_product_flammability(self):\n prod = Product('Test Product')\n self.assertEqual(prod.flammability, 0.5)", "def test_default_product_flammability(self):\n prod = Product('Test Product')\n self.assertEqual(prod.flammability, 0.5)", "def test_bad_property_setting(self):\n s = State(substance=\"water\")\n with pytest.raises(AttributeError):\n # Should be lowercase p\n s.TP = Q_(400.0, \"K\"), Q_(101325.0, \"Pa\")", "def test_get_property_missing(self):\r\n try:\r\n value = self.config.option2\r\n assert value\r\n except Exception as e:\r\n self.assertIsInstance(e, OptionValueNotSetError)\r\n self.assertNotIn('option2', self.config.values)", "def test_get_property_success(self):\r\n self.assertEqual(self.config.option1, 1337)", "async def test_enum_light_properties_no_key(deconz_light):\n light = await deconz_light({\"config\": {}, \"state\": {}, \"type\": \"Color light\"})\n\n assert light.alert is None\n assert light.color_capabilities is None\n assert light.color_mode is None\n assert light.effect is None\n with pytest.raises(KeyError):\n assert light.fan_speed", "def _showProperty(self):\n pass", "def test_set_existing_property():\n\n value = 'new'\n\n contents = (\"[Info]\\n\"\n \"sdk = old\")\n\n testutils.deploy_config_raw(contents)\n\n prop.set_prop('info', 'sdk', value)\n assert prop.get_prop('info', 'sdk') == value\n\n testutils.undeploy()\n\n return 0", "def test_set_new_property():\n\n value = '1'\n contents = (\"[info]\\n\"\n \"real = not_real\")\n\n testutils.deploy_config_raw(contents)\n\n prop.set_prop('info', 'sdk', value)\n assert prop.get_prop('info', 'sdk') == value\n\n testutils.undeploy()", "def test_list_properties(self):\n pass", "def test_set_property_policy(self):\n\n self._check_deploy_failure(\n self._create_test_app(key=\"test-key\",\n flavor=\"horrible.flavor\"),\n \"bad flavor\")", "def dummyProperties():\n global num_missing_properties\n num_missing_properties += 1 \n return ['main=appinventor.' + DUMMY_USER_NAME + '.' + DUMMY_PROJECT_NAME + '.Screen1\\n',\n 'name=' + DUMMY_PROJECT_NAME + '\\n',\n 'assets=../assets\\n',\n 'source=../src\\n',\n 'build=../build\\n',\n 'versioncode=1\\n',\n 'versionname=1.0\\n',\n 'useslocation=False\\n',\n 'aname=' + DUMMY_PROJECT_NAME + '\\n']", "def test_device_failed_properties(self):\n dev = mock.Mock()\n dev.GetId = mock.Mock(return_value=\"id\")\n dev.GetState = mock.Mock(return_value=AudioDeviceState.Active)\n store = mock.Mock()\n store.GetCount = mock.Mock(return_value=1)\n store.GetAt = mock.Mock(return_value=\"pk\")\n store.GetValue = mock.Mock(side_effect=_ctypes.COMError(None, None, None))\n dev.OpenPropertyStore = mock.Mock(return_value=store)\n with warnings.catch_warnings(record=True) as w:\n AudioUtilities.CreateDevice(dev)\n assert len(w) == 1\n assert \"COMError attempting to get property 0 from device\" in str(w[0].message)", "def test_test_inline_additional_properties(self):\n pass", "def test_patch_property_ro(self):\n v1 = versions.Version(version='1.2.3', name='foo')\n try:\n v1.patch = 234\n except AttributeError:\n passed = True\n else:\n passed = False\n\n self.assertTrue(passed)", "def test_default_product_stealability(self):\n prod = Product('Test Product')\n self.assertEqual(prod.stealability(), \"Kinda stealable.\")", "def test_get_nonexistent_runtime_property(self):\n script_path = self._create_script(\n linux_script='''#! /bin/bash -e\n ctx instance runtime-properties nonexistent\n ''',\n windows_script='''\n ctx instance runtime-properties nonexistent\n ''')\n\n with self.assertRaises(tasks.ProcessException) as cm:\n self._run(script_path=script_path)\n\n self.assertIn(os.path.basename(script_path), cm.exception.command)\n self.assertEqual(cm.exception.exit_code, 1)\n self.assertTrue(string_in_log('RequestError', self._caplog))\n self.assertTrue(string_in_log('nonexistent', self._caplog))", "def _check_property_on_test_context(\n context: \"HookContext\", attr_str: str, user_facing_name: str, param_on_builder: str\n):\n value = getattr(context, attr_str)\n if value is None:\n raise DagsterInvalidPropertyError(\n f\"Attribute '{user_facing_name}' was not provided when \"\n f\"constructing context. Provide a value for the '{param_on_builder}' parameter on \"\n \"'build_hook_context'. To learn more, check out the testing hooks section of Dagster's \"\n \"concepts docs: https://docs.dagster.io/concepts/ops-jobs-graphs/op-hooks#testing-hooks\"\n )\n else:\n return value", "def testGetUnknownKernelObjectProperties(self):\n programID = self.testResources.invalidKernelID\n self.assertRaises(PyOpenCLInterface.error, PyOpenCLInterface.GetProgramProperties, programID)", "def test_del_property():\n\n contents = (\"[Info]\\n\"\n \"sdk = 23\")\n\n testutils.deploy_config_raw(contents)\n\n prop.del_prop('info', 'sdk')\n\n testutils.undeploy()\n\n return 0", "def test_properties_unknown_enum_value(self, mock_logging):\n data = dict(POLL_DATA, State=\"5000\")\n status = DryerStatus(self.dryer, data)\n self.assertEqual(DryerState.UNKNOWN, status.state)\n expected_call = mock.call(\n \"Value `%s` for key `%s` not in options: %s. Values from API: %s\",\n \"5000\",\n \"State\",\n mock.ANY,\n mock.ANY,\n )\n self.assertEqual(expected_call, mock_logging.warning.call_args)", "def test_properties_stats_get(self):\n pass", "def testRequired(self):\n prop = make_prop()\n with self.assertRaises(ValueError):\n prop.interpret(recipe_api.PROPERTY_SENTINEL, {})", "def test_set_subscription_property_incorrect(self) -> None:\n test_user = self.example_user(\"hamlet\")\n self.login_user(test_user)\n subs = gather_subscriptions(test_user)[0]\n\n property_name = \"is_muted\"\n result = self.api_post(\n test_user,\n \"/api/v1/users/me/subscriptions/properties\",\n {\n \"subscription_data\": orjson.dumps(\n [{\"property\": property_name, \"value\": \"bad\", \"stream_id\": subs[0][\"stream_id\"]}]\n ).decode()\n },\n )\n self.assert_json_error(result, f\"{property_name} is not a boolean\")\n\n property_name = \"in_home_view\"\n result = self.api_post(\n test_user,\n \"/api/v1/users/me/subscriptions/properties\",\n {\n \"subscription_data\": orjson.dumps(\n [{\"property\": property_name, \"value\": \"bad\", \"stream_id\": subs[0][\"stream_id\"]}]\n ).decode()\n },\n )\n self.assert_json_error(result, f\"{property_name} is not a boolean\")\n\n property_name = \"desktop_notifications\"\n result = self.api_post(\n test_user,\n \"/api/v1/users/me/subscriptions/properties\",\n {\n \"subscription_data\": orjson.dumps(\n [{\"property\": property_name, \"value\": \"bad\", \"stream_id\": subs[0][\"stream_id\"]}]\n ).decode()\n },\n )\n self.assert_json_error(result, f\"{property_name} is not a boolean\")\n\n property_name = \"audible_notifications\"\n result = self.api_post(\n test_user,\n \"/api/v1/users/me/subscriptions/properties\",\n {\n \"subscription_data\": orjson.dumps(\n [{\"property\": property_name, \"value\": \"bad\", \"stream_id\": subs[0][\"stream_id\"]}]\n ).decode()\n },\n )\n self.assert_json_error(result, f\"{property_name} is not a boolean\")\n\n property_name = \"push_notifications\"\n result = self.api_post(\n test_user,\n \"/api/v1/users/me/subscriptions/properties\",\n {\n \"subscription_data\": orjson.dumps(\n [{\"property\": property_name, \"value\": \"bad\", \"stream_id\": subs[0][\"stream_id\"]}]\n ).decode()\n },\n )\n self.assert_json_error(result, f\"{property_name} is not a boolean\")\n\n property_name = \"email_notifications\"\n result = self.api_post(\n test_user,\n \"/api/v1/users/me/subscriptions/properties\",\n {\n \"subscription_data\": orjson.dumps(\n [{\"property\": property_name, \"value\": \"bad\", \"stream_id\": subs[0][\"stream_id\"]}]\n ).decode()\n },\n )\n self.assert_json_error(result, f\"{property_name} is not a boolean\")\n\n property_name = \"wildcard_mentions_notify\"\n result = self.api_post(\n test_user,\n \"/api/v1/users/me/subscriptions/properties\",\n {\n \"subscription_data\": orjson.dumps(\n [{\"property\": property_name, \"value\": \"bad\", \"stream_id\": subs[0][\"stream_id\"]}]\n ).decode()\n },\n )\n\n self.assert_json_error(result, f\"{property_name} is not a boolean\")\n\n property_name = \"color\"\n result = self.api_post(\n test_user,\n \"/api/v1/users/me/subscriptions/properties\",\n {\n \"subscription_data\": orjson.dumps(\n [{\"property\": property_name, \"value\": False, \"stream_id\": subs[0][\"stream_id\"]}]\n ).decode()\n },\n )\n self.assert_json_error(result, f\"{property_name} is not a string\")", "def test_no_sweeps_property(self):\n expected_values = {\n 'no_timesteps': 1000,\n 'no_sweeps': 10,\n 'no_channels': 4,\n }\n test_rec = rt.Recording(\n np.zeros(\n [\n expected_values['no_channels'],\n expected_values['no_timesteps'],\n expected_values['no_sweeps'],\n ]\n ),\n dt=0.1,\n )\n self.assertEqual(\n test_rec.no_sweeps,\n expected_values['no_sweeps'],\n 'Expected {} for `no_sweeps` property; got {} instead.'.format(\n expected_values['no_sweeps'], test_rec.no_sweeps\n ),\n )", "def test_set_fails_when_setting_non_primitive_type(self):\n with pytest.raises(\n ClickException, match=\"Attribute `behaviours` is not allowed to be updated!\"\n ):\n self.runner.invoke(\n cli,\n [*CLI_LOG_OPTION, \"config\", \"set\", \"skills.dummy.behaviours\", \"value\"],\n standalone_mode=False,\n catch_exceptions=False,\n )", "def test_unknown_setting(self):\n url = '/%s/job-types/validation/' % self.api\n manifest = copy.deepcopy(job_test_utils.COMPLETE_MANIFEST)\n config = copy.deepcopy(self.configuration)\n config['settings'] = {\n 'VERSION': '1.0.0',\n 'DB_HOST': 'scale',\n 'DB_PASS': 'password',\n 'setting': 'extra'\n }\n\n json_data = {\n 'manifest': manifest,\n 'configuration': config\n }\n\n response = self.client.generic('POST', url, json.dumps(json_data), 'application/json')\n self.assertEqual(response.status_code, status.HTTP_200_OK, response.content)\n\n results = json.loads(response.content)\n self.assertTrue(results['is_valid'])\n self.assertEqual(len(results['warnings']), 1)\n self.assertEqual(results['warnings'][0]['name'], 'UNKNOWN_SETTING')", "def test_analysis_pass_remove_property(self):\n qr = QuantumRegister(1, \"qr\")\n circuit = QuantumCircuit(qr, name=\"MyCircuit\")\n property_set = {\"to remove\": \"value to remove\", \"to none\": \"value to none\"}\n\n pass_e = PassN_AP_NR_NP(\"to remove\", \"to none\")\n with self.assertLogs(\"LocalLogger\", level=\"INFO\") as cm:\n result = pass_e(circuit, property_set)\n\n self.assertMessageLog(\n cm,\n [\n \"run analysis pass PassN_AP_NR_NP\",\n \"property to remove deleted\",\n \"property to none noned\",\n ],\n )\n self.assertEqual(property_set, PropertySet({\"to none\": None}))\n self.assertIsInstance(property_set, dict)\n self.assertEqual(circuit, result)", "def _print_properties(self):\n return NotImplemented", "def test_get_nonexistent_runtime_property_json(self):\n script_path = self._create_script(\n linux_script='''#! /bin/bash -e\n ctx -j instance runtime-properties nonexistent\n ''',\n windows_script='''\n ctx -j instance runtime-properties nonexistent\n ''')\n\n with self.assertRaises(tasks.ProcessException) as cm:\n self._run(script_path=script_path)\n\n self.assertIn(os.path.basename(script_path), cm.exception.command)\n self.assertEqual(cm.exception.exit_code, 1)\n self.assertTrue(string_in_log('RequestError', self._caplog))\n self.assertTrue(string_in_log('nonexistent', self._caplog))", "def test_preferences_properties(self):\n with pytest.raises(AssertionError):\n self.preferences.exchange_params_by_currency_id\n with pytest.raises(AssertionError):\n self.preferences.utility_params_by_good_id", "def setSilent(self) -> None:\n ...", "def test_set_invalid_value(self):\n result = self.runner.invoke(\n cli,\n [\n *CLI_LOG_OPTION,\n \"config\",\n \"set\",\n \"agent.agent_name\",\n \"true\",\n \"--type=bool\",\n ],\n standalone_mode=False,\n )\n assert result.exit_code == 1", "def test_remove_user_property(self):\n pass", "def test_should_leave_user_specified_properties(self): # pylint: disable=invalid-name\n self.project.set_property('semver_git_tag_increment_part', 'minor')\n self.project.set_property('semver_git_tag_repo_dir', '/some/dir')\n self.project.set_property('semver_git_tag_changelog',\n 'dir/CHANGELOG.md')\n self.project.set_property('semver_git_tag_version_prefix', 'v')\n initialize_semver_git_tag(self.project)\n self.assertEquals(\n self.project.get_property('semver_git_tag_increment_part'), 'minor')\n self.assertEquals(\n self.project.get_property('semver_git_tag_repo_dir'), '/some/dir')\n self.assertEquals(\n self.project.get_property('semver_git_tag_changelog'),\n 'dir/CHANGELOG.md')\n self.assertEquals(\n self.project.get_property('semver_git_tag_version_prefix'), 'v')", "def property( self, prop ):\n raise NotImplementedError(\"property\")", "def test_get_empty_config():\n\n testutils.deploy_config_raw(\"\")\n\n with pytest.raises(prop.PropertyError):\n prop.get_prop('info', 'sdk')\n\n testutils.undeploy()\n\n return 0", "def test_build_property_ro(self):\n v1 = versions.Version(version='1.2.3', name='foo')\n try:\n v1.build = 9001\n except AttributeError:\n passed = True\n else:\n passed = False\n\n self.assertTrue(passed)", "def properties(self):\n raise NotImplementedError", "def test_valid_settings() -> None:\n SwaggerTesterSettings()", "def test_analysis_pass_property_set(self):\n qr = QuantumRegister(1, \"qr\")\n circuit = QuantumCircuit(qr, name=\"MyCircuit\")\n property_set = PropertySet({\"another_property\": \"another_value\"})\n\n pass_e = PassE_AP_NR_NP(\"value\")\n with self.assertLogs(\"LocalLogger\", level=\"INFO\") as cm:\n result = pass_e(circuit, property_set)\n\n self.assertMessageLog(cm, [\"run analysis pass PassE_AP_NR_NP\", \"set property as value\"])\n self.assertEqual(\n property_set, PropertySet({\"another_property\": \"another_value\", \"property\": \"value\"})\n )\n self.assertIsInstance(property_set, PropertySet)\n self.assertEqual(circuit, result)", "def testArguments(self):\n with self.assertRaises(TypeError):\n Manager.Properties.Version.Get(get_object(TOP_OBJECT), {})", "def testFunctionName(self):\n with self.assertRaises(AttributeError):\n Manager.Properties.Version.Set(get_object(TOP_OBJECT), {})", "def test_put_user_property(self):\n pass", "def test_minor_property_ro(self):\n v1 = versions.Version(version='1.2.3', name='foo')\n try:\n v1.minor = 33\n except AttributeError:\n passed = True\n else:\n passed = False\n\n self.assertTrue(passed)", "def retrieve_properties(self):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)", "def test_api_object_failed_property(self, api_object):\n api_object.status = 'FAILED'\n assert api_object.failed\n assert not api_object.creating", "def test_read_props(self):\n basic_test_runner(self, 'read_props')", "def test_tap_config_valid_if_properties_is_none(self):\n self._assert_tap_config(config=self.valid_json_file, properties=None, state=self.valid_json_file)", "def testPsychDifficulties(self):\n attr = self.session.create_visit_attr()\n\n self.util.intTypeTest(self, attr, \"difficulties\")\n\n self.util.intPropertyTest(self, attr, \"difficulties\")", "def testShowEnv(self):\n\n self.inv._filters = {'targets': ''}\n self.inv._exclusions = {'xtargets': ''}\n self.assertEqual(\n 'Inventory:\\n'\n ' Max Targets: 50\\n'\n ' Filters:\\n'\n ' Targets: , XTargets: ',\n self.inv._ShowEnv())", "def test_not_supported():\n assert get_accessory(None, State('demo.demo', 'on'), 2, config=None) \\\n is None", "def test_get_mt_settings(self):\n pass", "def test_change_name_of_the_devicefalse():", "def test_major_property_ro(self):\n v1 = versions.Version(version='1.2.3', name='foo')\n try:\n v1.major = 12\n except AttributeError:\n passed = True\n else:\n passed = False\n\n self.assertTrue(passed)", "def test_change_color_of_the_device__false():", "def testGetattr(self):\n patch = self.pd.main\n\n obj = patch.Lt_(0.5)\n self.assertEquals('<~', obj.name)\n self.assertEquals(('<~', 0.5), obj.args)\n\n alternate = patch.Obj('<~', 0.5)\n self.assertEquals('<~', alternate.name)\n self.assertEquals(('<~', 0.5), alternate.args)\n\n bang = patch.Bang()\n self.assertTrue(isinstance(bang, pdctl.Bang))", "def test_stealable(self):\r\n prod = Product(name='Test Product',\r\n weight=100, price=1,\r\n flammability=0.5)\r\n self.assertEqual(prod.stealability(), \"Not so stealable...\")", "def test_get_proposal_demand(self):\n pass", "def getProperty(propname):", "def properties(self):", "def properties(self):", "def properties(self):", "def test_variablepresentations_get(self):\n pass", "def test_custom_parameter_vfo_not_set():\n source_path = os.path.join(\"tests\", \"data\", \"CustomParameterVFO.glyphs\")\n font = GSFont(source_path)\n\n # mock up source for this test with a source file from another test\n del font.customParameters[\"Variable Font Origin\"]\n del font.customParameters[\"Variation Font Origin\"]\n assert font.customParameters[\"Variable Font Origin\"] is None\n assert font.customParameters[\"Variation Font Origin\"] is None\n default_master = get_regular_master(font)\n assert default_master.name == \"Regular Text\"", "def test_no_metaclass_get(self):\n obj = BadTestObject()\n with self.assertRaises(TypeError):\n x = obj.test_setting", "def test_store_property_after_reconnecting_to_the_device():", "def test_test_property_casing():\n\n sdk = '23'\n contents = (\"[Info]\\n\"\n \"sdk = %s\" % sdk)\n\n testutils.deploy_config_raw(contents)\n\n assert prop.test_prop('info', 'sdk') == 1\n\n testutils.undeploy()\n\n return 0", "def test_show_correctness_default(self):\n assert ShowCorrectness.correctness_available()", "def test_001_validate_with_bad_properties(self):\n m = schematics_flexible.BaseFlexible(\n {'code': '06',\n 'properties': {\"a\": \"this is test\"}},\n store_handler=get_mock())\n try:\n m.validate()\n except schematicsValidationError:\n pass\n else:\n self.assertTrue(False,\n 'Model must raise exception when validate raise')", "def test_change_brightness_of_the_device_false():", "def device_properties(self):\n return DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)", "def getProperties():", "def test_set_property_invalid(self):\r\n try:\r\n initial_value = self.config.values['option1']\r\n self.config.option1 = 'invalid'\r\n except Exception as e:\r\n self.assertIsInstance(e, InvalidOptionValueError)\r\n self.assertEqual(self.config.values['option1'], initial_value)", "def hasVeryTrustedValue(self):", "def test_ignored_parameters_in_subscriptions_properties_endpoint(self) -> None:\n test_user = self.example_user(\"hamlet\")\n self.login_user(test_user)\n\n subs = gather_subscriptions(test_user)[0]\n sub = subs[0]\n result = self.api_post(\n test_user,\n \"/api/v1/users/me/subscriptions/properties\",\n {\n \"subscription_data\": orjson.dumps(\n [\n {\n \"property\": \"wildcard_mentions_notify\",\n \"stream_id\": sub[\"stream_id\"],\n \"value\": True,\n }\n ]\n ).decode(),\n \"invalid_parameter\": orjson.dumps(\n [{\"property\": \"pin_to_top\", \"stream_id\": sub[\"stream_id\"], \"value\": False}]\n ).decode(),\n },\n )\n\n self.assert_json_success(result, ignored_parameters=[\"invalid_parameter\"])", "def test_patch_hyperflex_capability_info(self):\n pass", "def test3SetAuditorProperties( self ):\n\n from GaudiAud.GaudiAudConf import NameAuditor\n from AthenaCommon.AppMgr import ServiceMgr, theAuditorSvc\n from AthenaCommon.Constants import FATAL\n\n import AthenaCommon.AtlasUnixStandardJob\n\n theAuditorSvc += NameAuditor()\n self.assertTrue( theAuditorSvc.NameAuditor.getFullName()\\\n in ServiceMgr.AuditorSvc.Auditors )\n\n theAuditorSvc.NameAuditor.OutputLevel = FATAL\n\n theAuditorSvc.setup()\n\n self.assertTrue( JobOptionsSvc.verify( 'AuditorSvc', 'Auditors', str(theAuditorSvc.Auditors) ) )\n self.assertTrue( JobOptionsSvc.verify( 'NameAuditor', 'OutputLevel', str(FATAL) ) )", "def test_get_prop(self):\n device_name = self.device.name\n self.device.close()\n fire_manager_instance = fire_manager.FireManager()\n try:\n fire_manager_instance.get_prop(device_name)\n finally:\n fire_manager_instance.close()", "def disability_specify(self, instance):\r\n return instance.user.profile.disability_specify", "def test_light_no_data(self):\n light = Light({})\n\n assert light.warning is None\n assert light.off is None" ]
[ "0.6675322", "0.6273189", "0.6186023", "0.61577505", "0.6114807", "0.60973996", "0.60565835", "0.6008967", "0.59908164", "0.59744656", "0.5964651", "0.5957363", "0.5925965", "0.59228987", "0.5908135", "0.5854864", "0.5826655", "0.58242005", "0.5764281", "0.57537925", "0.57380533", "0.57380533", "0.572347", "0.56809855", "0.5673724", "0.567024", "0.56625605", "0.5564792", "0.55560774", "0.5555269", "0.5549493", "0.550322", "0.54915327", "0.54868793", "0.5484401", "0.5482964", "0.54825485", "0.5477651", "0.5449802", "0.5441401", "0.5430044", "0.54273903", "0.54143304", "0.54104763", "0.5399339", "0.5389532", "0.5378188", "0.5374863", "0.53350353", "0.53280705", "0.53199834", "0.53151137", "0.53052896", "0.5301046", "0.52965945", "0.5264491", "0.5255794", "0.52458215", "0.5243427", "0.52203065", "0.5217837", "0.52157784", "0.52078766", "0.5203393", "0.5178604", "0.51580906", "0.5143833", "0.51392084", "0.5138809", "0.51381505", "0.51368636", "0.5128358", "0.51260376", "0.5125896", "0.5124172", "0.51235986", "0.51225376", "0.5119157", "0.5118139", "0.51152724", "0.51114905", "0.51114905", "0.51114905", "0.5104539", "0.5098988", "0.50890285", "0.5088356", "0.50874", "0.5076722", "0.5071088", "0.5068987", "0.5061229", "0.5057304", "0.5054342", "0.5049408", "0.5046601", "0.50417304", "0.50319844", "0.50290215", "0.50244385", "0.5019194" ]
0.0
-1
The pen color of this pen. The pen color is used for drawing lines and circles. All subsequent draw commands draw using this color. If the color changes, it only affects future draw commands, not past ones. This color is only used for lines and the border of circles. It is not the color used for filling in solid areas (if the ``fill`` attribute is True). See the attribute ``fillcolor`` for solid shapes.
def pencolor(self): return self._pencolor
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def penColor( self ):\n return self._penColor", "def color(self):\n assert False, 'Pen does not have a color; use pencolor or fillcolor'", "def pencolor(self, *args):\n if args:\n color = self._colorstr(args)\n if color == self._pencolor:\n return\n self.pen(pencolor=color)\n else:\n return self._color(self._pencolor)", "def set_pen_color(self, color: tuple) -> Rectangle:\n self.pen.color = color\n return self", "def setPenColor( self, color ):\n self._penColor = QColor(color)\n self.setDirty()", "def color(self):\n return self.__color", "def color(self):\n return self._color", "def color(self):\n return self._color", "def get_color(self):\n\n return self._color", "def get_color(self):\n\n return self.color", "def get_color(self):\r\n return self.__color", "def get_color(self):\n return self._color", "def get_color(self):\n return self._color", "def getColor(self):\n return self.__color", "def getColor(self):\n return self.__color", "def getColor(self):\n return self.__color", "def get_color(self):\r\n return self._color", "def get_color(self):\n return self.color", "def getColor(self):\n return self.color", "def line_color(self) -> String:\r\n from apysc.type import value_util\r\n self._initialize_line_color_if_not_initialized()\r\n line_color: String = value_util.get_copy(value=self._line_color)\r\n return line_color", "def getColor(self):\r\n return self.color", "def get_color(self) -> str:\n return self.color", "def color(self):\n return self.COLOR", "def color(self):\n return self['color']", "def get_color(self):\r\n if self.color:\r\n return \"RED\"\r\n else:\r\n return \"BLACK\"", "def get_color(self) -> str:\r\n return self.color", "def stroke_style(self, color=None):\n self._impl.stroke_style(color)", "def color(self) -> Optional[str]:\n return self.colour", "def color(self):\n if \"color\" in self._prop_dict:\n return self._prop_dict[\"color\"]\n else:\n return None", "def get_color(self, point):\n return self._color.dup()", "def fillcolor(self):\n return self._fillcolor", "def base_color(self):\n return curses.color_pair(3) if self.cycling else curses.color_pair(1)", "def getColor(self):\n return qt.QColor.fromRgbF(*self._color)", "def getColor(self):\n\n return self.pktColor", "def GetColour(self):\r\n\r\n return self._colour", "def color(self, *args):\n if args:\n l = len(args)\n if l == 1:\n pcolor = fcolor = args[0]\n elif l == 2:\n pcolor, fcolor = args\n elif l == 3:\n pcolor = fcolor = args\n pcolor = self._colorstr(pcolor)\n fcolor = self._colorstr(fcolor)\n self.pen(pencolor=pcolor, fillcolor=fcolor)\n else:\n return self._color(self._pencolor), self._color(self._fillcolor)", "def color(self):\n if self._simplecell:\n self.fetch()\n return self._color", "def getColor(self):\n return self.side_color", "def get_colour(self):\n return self.colour", "def color(self):\n return self.container['color']", "def color(self):\n return self.settings['color']", "def GetConnectionPen(self):\r\n\r\n return self._dottedPen", "def highlight_color(self):\n return curses.color_pair(4) if self.cycling else curses.color_pair(2)", "def getColor(self):\n return self._l[2]", "def color(self):\n return 0x2f3136", "def get_color(self):\n return \"yellow\"", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def color(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"color\")", "def rgb_color(self):\n return self._color", "def stroke_color(stroke):\n if stroke ==0:\n return \"cornflowerblue\" \n elif stroke ==1:\n return \"limegreen\"\n elif stroke==2:\n return \"orange\"\n else:\n return \"limegreen\"", "def get_color(self) -> Optional[str]:\n return self.color", "def fill_color(self) -> String:\r\n from apysc.type import value_util\r\n self._initialize_fill_color_if_not_initialized()\r\n fill_color: String = value_util.get_copy(value=self._fill_color)\r\n return fill_color", "def pen(self, pen=None, **pendict):\n _pd = {\"shown\" : self._shown,\n \"pendown\" : self._drawing,\n \"pencolor\" : self._pencolor,\n \"fillcolor\" : self._fillcolor,\n \"pensize\" : self._pensize,\n \"speed\" : self._speed,\n \"resizemode\" : self._resizemode,\n \"stretchfactor\" : self._stretchfactor,\n \"outline\" : self._outlinewidth,\n \"tilt\" : self._tilt\n }\n\n if not (pen or pendict):\n return _pd\n\n if isinstance(pen, dict):\n p = pen\n else:\n p = {}\n p.update(pendict)\n\n _p_buf = {}\n for key in p:\n _p_buf[key] = _pd[key]\n\n if self.undobuffer:\n self.undobuffer.push((\"pen\", _p_buf))\n\n newLine = False\n if \"pendown\" in p:\n if self._drawing != p[\"pendown\"]:\n newLine = True\n if \"pencolor\" in p:\n if isinstance(p[\"pencolor\"], tuple):\n p[\"pencolor\"] = self._colorstr((p[\"pencolor\"],))\n if self._pencolor != p[\"pencolor\"]:\n newLine = True\n if \"pensize\" in p:\n if self._pensize != p[\"pensize\"]:\n newLine = True\n if newLine:\n self._newLine()\n if \"pendown\" in p:\n self._drawing = p[\"pendown\"]\n if \"pencolor\" in p:\n self._pencolor = p[\"pencolor\"]\n if \"pensize\" in p:\n self._pensize = p[\"pensize\"]\n if \"fillcolor\" in p:\n if isinstance(p[\"fillcolor\"], tuple):\n p[\"fillcolor\"] = self._colorstr((p[\"fillcolor\"],))\n self._fillcolor = p[\"fillcolor\"]\n if \"speed\" in p:\n self._speed = p[\"speed\"]\n if \"resizemode\" in p:\n self._resizemode = p[\"resizemode\"]\n if \"stretchfactor\" in p:\n sf = p[\"stretchfactor\"]\n if isinstance(sf, (int, float)):\n sf = (sf, sf)\n self._stretchfactor = sf\n # if \"shearfactor\" in p:\n # self._shearfactor = p[\"shearfactor\"]\n if \"outline\" in p:\n self._outlinewidth = p[\"outline\"]\n if \"shown\" in p:\n self._shown = p[\"shown\"]\n if \"tilt\" in p:\n self._tilt = p[\"tilt\"]\n \n self._update()", "def disabledPenColor( self ):\n return self._disabledPenColor", "def colour(self) -> Optional[str]:\n return self._colour", "def get_color(self):\n return COLOR_DICT[self.element]", "def baseColor( self ):\n return self._baseColor", "def get_colour(self) -> str:\n return self.colour", "def fillcolor(self, *args):\n if args:\n color = self._colorstr(args)\n if color == self._fillcolor:\n return\n self.pen(fillcolor=color)\n else:\n return self._color(self._fillcolor)", "def getColorModel(self):\n return self.getModel().getColorModel()", "def default_colour(self):\n colour = self.DEFAULT_COLOUR\n return colour", "def highlightColor( self ):\n return self._highlightColor", "def getCurrentColor(self):\n if self.__currentnode__ is None:\n return None\n else:\n return self.__currentnode__.getPlayer().getColor()", "def line_color(self, line_color=None):\n\n if line_color is None:\n return self._line_color\n else:\n self._line_color = process_color(line_color)", "def get_color(self):\n _lib.caca_get_dither_color.argtypes = [_Dither]\n _lib.caca_get_dither_color.restype = ctypes.c_char_p\n\n return _lib.caca_get_dither_color(self)", "def color(self):\n return self._zoom.color", "def getFillColor(self):\n return getColor() if (fillColor == None) else fillColor", "def get_graph_color ( self, object ):\n return self.graph_color_", "def get_color(self):\n return self._io.last_state['color']['front-center']", "def GetColor(self, *args):\n return _XCAFDoc.XCAFDoc_ColorTool_GetColor(self, *args)", "def conseguir_color(self):\n return self.pluma.conseguir_color()", "def rgb_color(self):\n return self._COLORS[self._mystate]", "def _get_color(self):\n return self.__color", "def _get_color(self):\n return self.__color" ]
[ "0.81438965", "0.7675859", "0.7551927", "0.6896309", "0.67128396", "0.6509215", "0.64415", "0.64415", "0.64305776", "0.6405034", "0.6389262", "0.6373281", "0.6373281", "0.6361102", "0.6361102", "0.6361102", "0.6358667", "0.63095975", "0.62246627", "0.62037814", "0.6115269", "0.6089257", "0.6041361", "0.6026057", "0.6005723", "0.5990957", "0.5971471", "0.5970854", "0.593137", "0.59016216", "0.58966166", "0.589524", "0.58889043", "0.5877786", "0.58638555", "0.58575714", "0.5833473", "0.5821522", "0.5811192", "0.5803138", "0.5773324", "0.57663137", "0.57641554", "0.5741215", "0.573511", "0.57230407", "0.57091886", "0.57091886", "0.57091886", "0.57091886", "0.57091886", "0.57091886", "0.57091886", "0.57091886", "0.57091886", "0.57091886", "0.57091886", "0.57091886", "0.57091886", "0.57091886", "0.57091886", "0.57091886", "0.57091886", "0.57091886", "0.57091886", "0.57091886", "0.57091886", "0.57091886", "0.57091886", "0.57091886", "0.57091886", "0.57091886", "0.57091886", "0.57091886", "0.56898767", "0.5667253", "0.5661912", "0.5657287", "0.56525236", "0.56506276", "0.5627357", "0.56229687", "0.56113493", "0.55868775", "0.5576066", "0.5572571", "0.5568931", "0.5561819", "0.5552018", "0.5512468", "0.550576", "0.5485086", "0.5482452", "0.5473262", "0.5457485", "0.5416199", "0.5396959", "0.53817993", "0.53769386", "0.53769386" ]
0.7982437
1
The fill color of this turtle. The fill color is used for filling in solid shapes. If the ``fill`` attribute is True, all subsequent draw commands fill their insides using this color. If the color changes, it only affects future draw commands, not past ones. This color is only used for filling in the insides of solid shapes. It is not the color used for the shape border. See the attribute ``pencolor`` for the border color.
def fillcolor(self): return self._fillcolor
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def fill_color(self, fill_color=None):\n\n if fill_color is None:\n return self._fill_color\n else:\n self._fill_color = process_color(fill_color)", "def fillcolor(self, *args):\n if args:\n color = self._colorstr(args)\n if color == self._fillcolor:\n return\n self.pen(fillcolor=color)\n else:\n return self._color(self._fillcolor)", "def fill_color(self) -> String:\r\n from apysc.type import value_util\r\n self._initialize_fill_color_if_not_initialized()\r\n fill_color: String = value_util.get_copy(value=self._fill_color)\r\n return fill_color", "def setFill(self, color):\n self._reconfig(\"fill\", color)", "def set_fill_color(self, color: tuple) -> Rectangle:\n self.fill.color = color\n return self", "def getFillColor(self):\n return getColor() if (fillColor == None) else fillColor", "def fill(self) -> int:\n return self._fill_color", "def sparkline_fill_color(self, sparkline_fill_color):\n\n self._sparkline_fill_color = sparkline_fill_color", "def setFillColor(self, color):\n fillColor = color\n repaint()", "def fill(self):\n return self._turtle.fill()", "def fill(self, value):\n self.fill_color = value", "def setFilled(self, fill):\n isFilled = fill\n repaint()", "def fill(self, color):", "def fill_style(self, color=None):\n self._impl.fill_style(color)", "def write_fill(self, fill: FillFormat):\n if self.fill_type is not None:\n self._write_fill_type(fill)", "def GetFillAlpha(self):\n return self._attalpha[\"fill\"]", "def setFill(self, fill):\n self.area_show = fill", "def color(self):\n assert False, 'Pen does not have a color; use pencolor or fillcolor'", "def fill(self, color):\n self.format.fill(self, color)", "def setPointFill(self, fill):\n for point in self.points:\n point.fill = fill", "def fill(self, color: Union[int, Tuple[int, int, int]]) -> None:\n self._fill_color = color\n if color is None:\n self._palette[0] = 0x00\n self._palette.make_transparent(0)\n else:\n self._palette[0] = color\n self._palette.make_opaque(0)", "def position_fill(self, position_fill):\n allowed_values = [\"OPEN_ONLY\", \"REDUCE_FIRST\", \"REDUCE_ONLY\", \"DEFAULT\"] # noqa: E501\n if position_fill not in allowed_values:\n raise ValueError(\n \"Invalid value for `position_fill` ({0}), must be one of {1}\" # noqa: E501\n .format(position_fill, allowed_values)\n )\n\n self._position_fill = position_fill", "def position_fill(self, position_fill):\n allowed_values = [\"OPEN_ONLY\", \"REDUCE_FIRST\", \"REDUCE_ONLY\", \"DEFAULT\"] # noqa: E501\n if position_fill not in allowed_values:\n raise ValueError(\n \"Invalid value for `position_fill` ({0}), must be one of {1}\" # noqa: E501\n .format(position_fill, allowed_values)\n )\n\n self._position_fill = position_fill", "def fill(self, color):\n self.fill_rect(0, 0, self.width, self.height, color)", "def SetFillAlpha(self, alpha):\n self._attalpha[\"fill\"] = alpha\n self.SetFillColorAlpha(self.GetFillColor(), alpha)", "def SetLevelOfFill(self, lev_fill):\n return _hypre.HypreILU_SetLevelOfFill(self, lev_fill)", "def fill(self):\n return self[\"fill\"]", "def fill(self):\n return self[\"fill\"]", "def fillColor(c, mode='RGB'):\n \n # if we are using a color object (defined above), use the object's fill method\n if hasattr(c, 'colorMode'):\n \tc.setFill()\n \n # if we are passed an integer or float, set it as a simple RGB.\n elif isinstance(c, (int, float)):\n \tfill(c, c, c)\n \t\n # if we are dealing with a CMYKA tuple, set it\n elif len(c) == 5:\n cmykFill(c[0], c[1], c[2], c[3], c[4])\n \n # if we have a CMYK tuple and mode is set to CMYK\n elif len(c) and mode.upper() == 'CMYK':\n \tcmykFill(c[0], c[1], c[2], c[3])\n \t\n # otherwise we will assume that four-item tuples are RGBA\n elif len(c) == 4:\n fill(c[0], c[1], c[2], c[3])\n \n # last but not least, RGB!\n elif len(c) == 3:\n fill(c[0], c[1], c[2])", "def _initialize_fill_color_if_not_initialized(self) -> None:\r\n if hasattr(self, '_fill_color'):\r\n return\r\n self._fill_color = String('')", "def fill(self, x, y, color):\n raise NotImplementedError # Override this function in the Solution classes", "def begin_fill(*args,**kwargs):\n if(len(args)==2 and len(kwargs)==0):\n turtleTmp.begin_fill(args[0], args[1])\n elif (len(args)==1 and len(kwargs)==0):\n turtleTmp.begin_fill(args[0], turtleTmp.color)\n elif (len(args)==0 and len(kwargs)==1):\n if(\"borderColor\" in kwargs):\n turtleTmp.begin_fill(kwargs[\"borderColor\"], turtleTmp.color)\n else:\n turtleTmp.begin_fill(turtleTmp.color, kwargs[\"fillColor\"])\n elif (len(args)==1 and len(kwargs)==1):\n if(\"borderColor\" in kwargs):\n turtleTmp.begin_fill(kwargs[\"borderColor\"], args[0])\n else:\n turtleTmp.begin_fill(args[0], kwargs[\"fillColor\"])\n elif(len(kwargs)==2 and len(args)==0):\n turtleTmp.begin_fill(kwargs[\"borderColor\"], kwargs[\"fillColor\"])\n else:\n turtleTmp.begin_fill(turtleTmp.color, turtleTmp.color)", "def fill(self, colour: int, /) -> None:", "def filled(\n self,\n filled: FillReturn,\n fill_type: FillType,\n ax: figure | int = 0,\n color: str = \"C0\",\n alpha: float = 0.7,\n ) -> None:\n fig = self._get_figure(ax)\n color = self._convert_color(color)\n xs, ys = filled_to_bokeh(filled, fill_type)\n if len(xs) > 0:\n fig.multi_polygons(xs=[xs], ys=[ys], color=color, fill_alpha=alpha, line_width=0)", "def fill(self, color):\n color = spyral.color._determine(color)\n self._surf.fill(color)", "def color(self, *args):\n if args:\n l = len(args)\n if l == 1:\n pcolor = fcolor = args[0]\n elif l == 2:\n pcolor, fcolor = args\n elif l == 3:\n pcolor = fcolor = args\n pcolor = self._colorstr(pcolor)\n fcolor = self._colorstr(fcolor)\n self.pen(pencolor=pcolor, fillcolor=fcolor)\n else:\n return self._color(self._pencolor), self._color(self._fillcolor)", "def fill(self, rgb, alpha=100):\n self.call('fill', rgb, alpha)", "def set_green(self):\n self.fill= Cell.FILLED_COLOR_BG\n self.draw()", "def begin_fill(\r\n self, color: StrOrString,\r\n alpha: Union[float, Number] = 1.0) -> None:\r\n from apysc.color import color_util\r\n from apysc.converter import cast\r\n from apysc.validation import color_validation\r\n from apysc.validation import number_validation\r\n self._initialize_fill_color_if_not_initialized()\r\n self._initialize_fill_alpha_if_not_initialized()\r\n if color != '':\r\n color = color_util.complement_hex_color(\r\n hex_color_code=color)\r\n self._fill_color.value = color\r\n number_validation.validate_num(num=alpha)\r\n if not isinstance(alpha, Number):\r\n alpha = cast.to_float_from_int(int_or_float=alpha)\r\n color_validation.validate_alpha_range(alpha=alpha)\r\n if isinstance(alpha, Number):\r\n self._fill_alpha.value = alpha.value\r\n else:\r\n self._fill_alpha.value = alpha", "def set(self, fill_type: FillType = _DO_NOT_CHANGE,\n fore_color_rgb: Union[RGBColor, Tuple[any, any, any], None] = _DO_NOT_CHANGE,\n fore_color_mso_theme: Optional[EnumValue] = _DO_NOT_CHANGE,\n fore_color_brightness: Optional[float] = _DO_NOT_CHANGE,\n back_color_rgb: Union[RGBColor, Tuple[any, any, any], None] = _DO_NOT_CHANGE,\n back_color_mso_theme: Optional[EnumValue] = _DO_NOT_CHANGE,\n back_color_brightness: Optional[float] = _DO_NOT_CHANGE,\n pattern: Optional[MSO_PATTERN_TYPE] = _DO_NOT_CHANGE\n ):\n if fill_type is not _DO_NOT_CHANGE:\n self.fill_type = fill_type\n\n if fore_color_rgb is not _DO_NOT_CHANGE:\n self.fore_color_rgb = fore_color_rgb\n if fore_color_mso_theme is not _DO_NOT_CHANGE:\n self.fore_color_mso_theme = fore_color_mso_theme\n if fore_color_brightness is not _DO_NOT_CHANGE:\n self.fore_color_brightness = fore_color_brightness\n\n if back_color_rgb is not _DO_NOT_CHANGE:\n self.back_color_rgb = back_color_rgb\n if back_color_mso_theme is not _DO_NOT_CHANGE:\n self.back_color_mso_theme = back_color_mso_theme\n if back_color_brightness is not _DO_NOT_CHANGE:\n self.back_color_brightness = back_color_brightness\n\n if pattern is not _DO_NOT_CHANGE:\n self.pattern = pattern", "def fill(self, color=WHITE):\n # Error checking and data munging to resolve the 'color' input\n if isinstance(color, str):\n if color.upper() in COLOR_LIST:\n color = globals()[color.upper()]\n else:\n raise ValueError(\n f\"The color name: {color} is not supported. \"\n f\"Please use one of {COLOR_LIST}\")\n elif isinstance(color, tuple):\n valid = valid_color_tuple(color)\n if not valid:\n raise ValueError(f\"A non RGB color tuple was provided: {color}\")\n elif isinstance(color, int):\n if color > 255 or color < 0:\n raise ValueError(f\"A value of '{color}' for color cannot be used for RGB, \"\n \"please use a number in the range 0-255\")\n else:\n color = (color, color, color)\n self.np.fill(color)\n self.np.show()\n return True", "def set_color(self, color, filled):\n for cell in filled:\n self.board[cell[0], cell[1]] = color", "def proc_fill_color(self, tokens):\n\n self.pen.fill_color = self._proc_color(tokens)\n return []", "def fill_style(stroke_str, fill_str, state):\r\n settings = faint.Settings()\r\n if stroke_str == \"none\" and fill_str != \"none\":\r\n settings.fillstyle = 'f'\r\n settings.fg = parse_color(fill_str, \"1.0\", state)\r\n elif stroke_str != \"none\" and fill_str == \"none\":\r\n settings.fillstyle = 'b'\r\n settings.fg = parse_color(stroke_str, \"1.0\", state)\r\n else:\r\n settings.fillstyle = 'bf'\r\n settings.fg = parse_color(stroke_str, \"1.0\", state)\r\n settings.bg = parse_color(fill_str, \"1.0\", state)\r\n return settings", "def fill_px(self, fill_px):\n\n self._fill_px = fill_px", "def getPointFill(self):\n l = [point.fill for point in self.points]\n if l.count(l[0]) == len(l):\n return l[0]\n else:\n raise ValueError(\"The fill attributes of the points must be the same otherwise it makes no sense.\")", "def flush(self):\n if self.fill:\n self._turtle.fill(False)\n self._turtle.fill(True)", "def FloodFill(*args, **kwargs):\n return _gdi_.PseudoDC_FloodFill(*args, **kwargs)", "def fill(framebuf, color):\n if color:\n fill = 0xFF\n else:\n fill = 0x00\n for i in range(len(framebuf.buf)): # pylint: disable=consider-using-enumerate\n framebuf.buf[i] = fill", "def fill(framebuf, color):\n if color:\n fill = 0xFF\n else:\n fill = 0x00\n for i in range(len(framebuf.buf)): # pylint: disable=consider-using-enumerate\n framebuf.buf[i] = fill", "def FloodFill(*args, **kwargs):\n return _gdi_.DC_FloodFill(*args, **kwargs)", "def fill(framebuf, color):\n if color:\n bits = color & 0b11\n fill = (bits << 6) | (bits << 4) | (bits << 2) | (bits << 0)\n else:\n fill = 0x00\n\n framebuf.buf = [fill for i in range(len(framebuf.buf))]", "def filled(self, fill_value):\n sdata = self.data\n new_data = numpy.ma.filled(sdata, fill_value=fill_value)\n if new_data == sdata:\n return self\n else:\n return type(self)(new_data, self.bset)", "def clear(self, fill = 0x00):\n self._buffer = [ fill ] * ( self.width * self.height )", "def draw_circle_filled(self, x0, y0, r, color=None):\n self._draw_fast_vline(x0, y0 - r, 2 * r + 1, color)\n self._draw_circle_filled_helper(x0, y0, r, 3, 0, color)", "def __fill_color(self, uol_c, uil_c, lol_c, lil_c):\n self.__fill_lip_lines(uol_c, uil_c)\n self.__fill_lip_lines(lol_c, lil_c)\n self.__add_color(1)\n self.__fill_lip_solid(uol_c, uil_c)\n self.__fill_lip_solid(lol_c, lil_c)\n self.__smoothen_color(uol_c, uil_c)\n self.__smoothen_color(lol_c, lil_c)", "def update_fill(self, event):\n if event.type == 'FILL':\n self.update_positions_from_fill(event)\n self.update_holdings_from_fill(event)", "def update_fill(self, event):\n if event.type == 'FILL':\n self.update_positions_from_fill(event)\n self.update_holdings_from_fill(event)", "def update_fill(self, event):\r\n\r\n if event.type == 'FILL':\r\n self.update_positions_from_fill(event)\r\n self.update_holdings_from_fill(event)", "def init_fill_color_checkbox(self):\n self.vars[\"fill_color\"] = BooleanVar(self.frame)\n self.buttons[\"chkbtn_color\"] = Checkbutton(\n self.frame, text='fill color',\n var=self.vars[\"fill_color\"])\n self.buttons[\"chkbtn_color\"].grid(row=6, column=1)", "def fill_rectangle(min_x: float, min_y: float, max_x: float, max_y: float, color: Color):\n turtle.goto(min_x, min_y)\n turtle.fillcolor(color)\n turtle.begin_fill()\n turtle.goto(max_x, min_y)\n turtle.goto(max_x, max_y)\n turtle.goto(min_x, max_y)\n turtle.end_fill()", "def fill(framebuf, color):\n fill = (color >> 16) & 255, (color >> 8) & 255, color & 255\n for i in range(0, len(framebuf.buf), 3):\n framebuf.buf[i : i + 3] = bytes(fill)", "def end_fill():\n turtleTmp.end_fill()", "def FloodFillPoint(*args, **kwargs):\n return _gdi_.DC_FloodFillPoint(*args, **kwargs)", "def FloodFillPoint(*args, **kwargs):\n return _gdi_.PseudoDC_FloodFillPoint(*args, **kwargs)", "def FillPath(*args, **kwargs):\n return _gdi_.GraphicsContext_FillPath(*args, **kwargs)", "def fill(self, color: int) -> None:\n red = (color >> 16) & 0xFF\n green = (color >> 8) & 0xFF\n blue = color & 0xFF\n for x in range(24):\n offset = unpack_from(\">HHH\", self.ledmap_bytes, x * 6)\n self._is31[offset[self.r_offset]] = red\n self._is31[offset[self.g_offset]] = green\n self._is31[offset[self.b_offset]] = blue", "def fill(self, color: int) -> None:\n red = (color >> 16) & 0xFF\n green = (color >> 8) & 0xFF\n blue = color & 0xFF\n for x in range(24):\n offset = unpack_from(\">HHH\", self.ledmap_bytes, x * 6)\n self._is31[offset[self.r_offset]] = red\n self._is31[offset[self.g_offset]] = green\n self._is31[offset[self.b_offset]] = blue", "def fill_colour_of_layer(layer_id):\n if layer_id in layer_fill_colors:\n return layer_fill_colors[layer_id]\n else:\n return 'grey'", "def update_fill(self, event):\n if event.type == 'FILL':\n self.update_positions_from_fill(event)\n self.update_prices_from_fill(event)\n self.update_holdings_from_fill(event)", "def add_fill(self, shape, value, name=None):\n return self._build_op('Fill', [shape, value], name=name)", "def picture_fill_format(self, picture_fill_format):\n self._picture_fill_format = picture_fill_format", "def fill(a, b=None, c=None, along=\"x\", baseline=None, fill=None, colormap=None, palette=None, opacity=1.0, title=None, style=None, id=None, xmin=None, xmax=None, ymin=None, ymax=None, label=None, xlabel=None, ylabel=None, xscale=\"linear\", yscale=\"linear\", padding=10, width=None, height=None, canvas_style=None):\n canvas = Canvas(width=width, height=height, style=canvas_style)\n axes = canvas.axes(xmin=xmin, xmax=xmax, ymin=ymin, ymax=ymax, label=label, xlabel=xlabel, ylabel=ylabel, xscale=xscale, yscale=yscale, padding=padding)\n mark = axes.fill(a=a, b=b, c=c, along=along, baseline=baseline, fill=fill, colormap=colormap, palette=palette, opacity=opacity, title=title, style=style, id=id)\n return canvas, axes, mark", "def set_color(self, c, color, draw=True):\n \n if c == self.maze.get_start_cell() or c == self.maze.get_end_cell():\n return\n self.cvs.itemconfig(self.cvs_cells[c], fill=color)\n\n if draw: self.draw()", "def fill(self, arr, color=None):\n\n for point in self.points:\n arr[point.x][point.y] = color if color is not None else point.color", "def fill_color(self, _col):\n for x in xrange(0, self.__resolution[0], 1):\n for y in xrange(0, self.__resolution[1], 1):\n self.__framebuffer[(x, y)] = _col", "def on_fill(self, oid, body):\n\t\tlogger.info('Consuming filled Order')\n\t\tfill = body['fill']\n\n\t\t# update the position first\n\t\tself.pos[fill.symbol].on_fill(fill)\n\n\t\t# getting data from the fill event\n\t\tQ = fill.quantity\n\t\tK, D, C = fill.fill_cost, fill.fill_type, fill.commission\n\n\t\tcost = D.value * K * Q\n\n\t\tself.commission += C\n\t\tself.cash -= cost + C", "def fill(self, screen=None, colour=None):\n if not screen:\n screen = self.screen\n if not colour:\n colour = self.backgroundColour\n screen.fill(colour)", "def stop_loss_on_fill(self, stop_loss_on_fill):\n\n self._stop_loss_on_fill = stop_loss_on_fill", "def stop_loss_on_fill(self, stop_loss_on_fill):\n\n self._stop_loss_on_fill = stop_loss_on_fill", "def to_fill(self, color:list): \n return {\n 'bbox' : list(self.bbox), \n 'color': rgb_value(color)\n }", "def fill():\n # Switch in edit mode\n bpy.ops.object.mode_set(mode = 'EDIT')\n \n # Fill hole\n bpy.ops.mesh.fill()", "def update_portfolio_on_fill(self, fill: FillEvent):\n self._portfolio.add_new_fill(fill)", "def fill_qty(self, fill_qty):\n\n self._fill_qty = fill_qty", "def fill_circle(self, x0, y0, r, color):\n f = 1 - r\n dx = 1\n dy = -r - r\n x = 0\n y = r\n self.vline(x0, y0 - r, 2 * r + 1, color)\n while x < y:\n if f >= 0:\n y -= 1\n dy += 2\n f += dy\n x += 1\n dx += 2\n f += dx\n self.vline(x0 + x, y0 - y, 2 * y + 1, color)\n self.vline(x0 - x, y0 - y, 2 * y + 1, color)\n self.vline(x0 - y, y0 - x, 2 * x + 1, color)\n self.vline(x0 + y, y0 - x, 2 * x + 1, color)", "def circle(self, xo: int, yo: int, radius: int, color: int, fill=False):\n for x in range(xo - radius, xo + radius + 1):\n square = sqrt(radius ** 2 - (x - xo) ** 2)\n y = yo + square\n self.pixel(x, floor(y), color)\n y = yo - square\n self.pixel(x, floor(y), color)\n for y in range(yo - radius, yo + radius + 1):\n square = sqrt(radius ** 2 - (y - yo) ** 2)\n x = xo + square\n self.pixel(floor(x), y, color)\n x = xo - square\n self.pixel(floor(x), y, color)\n if fill:\n if radius > 1:\n self.circle(xo, yo, radius - 1, color, True)\n else:\n self.circle(xo, yo, radius - 1, color, False)", "def fill(self, color):\n self._surf.fill(color)\n self._version += 1\n spyral.util.scale_surface.clear(self._surf)\n return self", "def fillcircle(draw, centrex, centrey, radius, color=\"#AAAAAAFF\") -> None:\n # convert cartesian centre to pixel centre\n cx, cy = pixelcoord(centrex, centrey)\n # top left and bottom right coordinates, must never reverse\n rect = [(cx-radius, cy-radius), (cx+radius, cy+radius)]\n # draw, same color for outline and fill\n draw.ellipse(rect, color, color)", "def fill(surface, color):\n w, h = surface.get_size()\n r, g, b, _ = color\n for x in range(w):\n for y in range(h):\n a = surface.get_at((x, y))[3]\n surface.set_at((x, y), pygame.Color(r, g, b, a))", "def fill(\n a,\n b=None,\n c=None,\n along=\"x\",\n baseline=None,\n color=None,\n opacity=1.0,\n title=None,\n style=None,\n filename=None,\n xmin=None,\n xmax=None,\n ymin=None,\n ymax=None,\n show=True,\n xshow=True,\n yshow=True,\n label=None,\n xlabel=None,\n ylabel=None,\n xscale=\"linear\",\n yscale=\"linear\",\n padding=10,\n width=None,\n height=None,\n ):\n canvas = Canvas(width=width, height=height)\n axes = canvas.cartesian(\n xmin=xmin,\n xmax=xmax,\n ymin=ymin,\n ymax=ymax,\n show=show,\n xshow=xshow,\n yshow=yshow,\n label=label,\n xlabel=xlabel,\n ylabel=ylabel,\n xscale=xscale,\n yscale=yscale,\n padding=padding)\n mark = axes.fill(\n a=a,\n b=b,\n c=c,\n along=along,\n baseline=baseline,\n color=color,\n opacity=opacity,\n title=title,\n style=style,\n filename=filename)\n return canvas, axes, mark", "def render_ellipse_filled(shape, center_x, center_y, color, angle=0):\n # Set color\n if len(color) == 4:\n GL.glColor4ub(shape.color[0], shape.color[1], shape.color[2],\n shape.color[3])\n GL.glEnable(GL.GL_BLEND)\n GL.glBlendFunc(GL.GL_SRC_ALPHA, GL.GL_ONE_MINUS_SRC_ALPHA)\n elif len(color) == 3:\n GL.glDisable(GL.GL_BLEND)\n GL.glColor4ub(shape.color[0], shape.color[1], shape.color[2], 255)\n\n GL.glBindBuffer(GL.GL_ARRAY_BUFFER, shape.vbo_id)\n GL.glVertexPointer(2, GL.GL_FLOAT, 0, 0)\n\n GL.glLoadIdentity()\n GL.glTranslatef(center_x, center_y, 0)\n if angle:\n GL.glRotatef(angle, 0, 0, 1)\n\n GL.glDrawArrays(GL.GL_TRIANGLE_FAN, 0, shape.size)", "def stroke_color(stroke):\n if stroke ==0:\n return \"cornflowerblue\" \n elif stroke ==1:\n return \"limegreen\"\n elif stroke==2:\n return \"orange\"\n else:\n return \"limegreen\"", "def handlerSelectFillColor(self):\r\n dialog = QtGui.QColorDialog(self.layerSymbolFillColor)\r\n \r\n if dialog.exec_():\r\n selectedColor = dialog.selectedColor()\r\n sender = self.sender()\r\n if sender.objectName() == 'buttonLayerSymbolFillColor':\r\n self.layerSymbolFillColor = selectedColor\r\n elif sender.objectName() == 'buttonStyleCategorizedFillColor':\r\n self.styleCategorizedColor = selectedColor\r\n elif sender.objectName() == 'buttonStyleGraduatedFillColor':\r\n self.styleGraduatedColor = selectedColor\r\n elif sender.objectName() == 'buttonStyleRuleBasedFillColor':\r\n self.styleRuleBasedColor = selectedColor\r\n sender.setStyleSheet('background-color: {0};'.format(selectedColor.name()))", "def fill(self, *args, **kwargs):\n closed = kwargs.pop('closed', True)\n return super(RadarAxes, self).fill(closed=closed, *args, **kwargs)", "def fill(self, *args, **kwargs):\n closed = kwargs.pop('closed', True)\n return super(RadarAxes, self).fill(closed=closed, *args, **kwargs)", "def fill(self, *args, **kwargs):\r\n closed = kwargs.pop('closed', True)\r\n return super(RadarAxes, self).fill(closed=closed, *args, **kwargs)", "def _set_fill(self, _var_name, _list_index, _operation):\r\n self.command_stack.do(model.structure.SetFill(self._structure, bool(self._fill_var.get())))", "def reset(self):\n self._turtle.clear()\n self._turtle.setposition((0,0)) \n try:\n self._turtle.shape('pen.gif')\n except:\n self._turtle.shape('classic')\n self._turtle.color('red')\n self.speed = 0\n \n #pair = self._turtle.color()\n self._pencolor = self._turtle.color()[0]\n self._fillcolor = self._turtle.color()[0]", "def getFill(self):\n return self.area_show", "def _stroke_mode(self):\r\n if not self.color:\r\n self._color_change_mode()\r\n self.input_scene.get_stk_color(self.color)\r\n self._mode_select(2)" ]
[ "0.8201772", "0.7598433", "0.7313783", "0.67560655", "0.6748844", "0.6730906", "0.6724899", "0.67112285", "0.6703533", "0.6412003", "0.63272303", "0.6276365", "0.6182439", "0.6151799", "0.6137227", "0.61257184", "0.59977", "0.59752995", "0.59577733", "0.5939774", "0.58236915", "0.5753857", "0.5753857", "0.57025933", "0.5681763", "0.568141", "0.56135905", "0.56135905", "0.5579686", "0.5577562", "0.55658513", "0.5558589", "0.5547953", "0.5530666", "0.550911", "0.54862565", "0.5459196", "0.53902435", "0.538376", "0.53620535", "0.53525054", "0.5328295", "0.5278362", "0.5276518", "0.5245875", "0.51479256", "0.51458776", "0.5119852", "0.50971127", "0.50971127", "0.5076764", "0.5068013", "0.5052705", "0.50485903", "0.5033745", "0.5024091", "0.49552163", "0.49552163", "0.49467033", "0.49380273", "0.49217573", "0.48719308", "0.48613068", "0.48595744", "0.4853733", "0.4843829", "0.483235", "0.483235", "0.48302913", "0.48301658", "0.48280457", "0.482605", "0.48207194", "0.47758916", "0.47753328", "0.4765494", "0.47573668", "0.4747289", "0.46939686", "0.46939686", "0.46869022", "0.4667647", "0.46606725", "0.46576557", "0.46494654", "0.46360734", "0.46274528", "0.4614583", "0.45904395", "0.45821187", "0.4523304", "0.44990444", "0.44943064", "0.4478887", "0.4478887", "0.4474032", "0.4466877", "0.44540575", "0.44539905", "0.44464883" ]
0.76650685
1
Indicates whether the pen's icon is visible. Drawing commands will still work while the pen icon is hidden. There will just be no indication of the pen's current location on the screen.
def visible(self): return self._turtle.isvisible()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_visible(self):\n return self._visible", "def is_visible(self):\n return self.proto.display_type == DISPLAY_TYPE.Visible.value", "def is_visible(self):\n return self.container['is_visible']", "def is_visible(self):\n return self.rect.x < self.screen_rect.width", "def is_visible(self):", "def isVisible( self ):\n layer = self.layer()\n if ( layer and not layer.isVisible() ):\n return False\n# \n# if ( self.isIsolateHidden() ):\n# return False\n# \n return self._visible", "def GetGripperVisible(self):\r\n\r\n return self._gripper_visible", "def is_visible(self):\n return self._currently_shown", "def isVisible(self):\n\t\treturn True", "def is_visible(self, position, size=0):\n # return True\n size /= self.scale # size is in pixel\n in_x = (self.focus.x + self.offset.x / self.scale - size <=\n position.x <=\n self.focus.x - self.offset.x / self.scale + size)\n in_y = (self.focus.y + self.offset.y / self.scale - size <=\n position.y <=\n self.focus.y - self.offset.y / self.scale + size)\n # if name == \"earth\":\n # print(\"{:+e} {:+e} {}\".format(self.focus.y + self.offset2.y\n # , position.y, in_y))\n # print(\"{:+e} {:+e}\".format(self.focus.x, self.focus.y))\n return in_x and in_y", "def is_visible(self, path):\n return True", "def visible(self):\n return self._visible", "def visible(self):\n return self._visible", "def is_element_visible(self):\n if self.web_element.is_displayed():\n return True\n else:\n return False", "def is_ruler_visible(self):\n return self.container['is_ruler_visible']", "def visible(self):\n return -PipePair.WIDTH < self.x < WIN_WIDTH", "def get_visible(self):\n return self._visible", "def is_visible(self):\n return self.real > 0", "def is_visible(self):\n return self.window.active_panel() == self.full_name", "def is_visible(self):\n try:\n return self.element.is_displayed()\n except (NoSuchElementException,\n ElementNotVisibleException,\n StaleElementReferenceException):\n return False", "def _is_visible(self, key) -> bool:\n return self._get_DecoSetting(key).visible", "def is_outline_shown(self):\n return self.container['is_outline_shown']", "def visible(self):\n return ctypes.windll.user32.IsWindowVisible(self.hwnd)", "def IsShown(self):\r\n\r\n return self._shown", "def isShown(self):\n return self.shown", "def is_visible(self, x, y) :\n\t\tres_x = (x > self.x_min) and (x < self.x_max)\n\t\t# print 'res_x : {0}, x : {1}, x_min : {2}, x_max:{3}'.format(res_x, x, self.x_min, self.x_max)\n\t\tres_y = (y > self.y_min) #and (y < self.y_max)\n\t\treturn res_x and res_y", "def _is_visible(self, point):\n return point[0] > 0 and point[0] < 1 and point[1] > 0 and point[1] < 1", "def isdrawn(self):\n return hasattr(self, 'drawn')", "def is_alive(self):\r\n return self.visible", "def inspectedNodeIsVisible(self):\n return self._inspected_node_is_visible", "def IsVisible(self, *args):\n return _XCAFDoc.XCAFDoc_ColorTool_IsVisible(self, *args)", "def is_visible ( self ):\n return not self.is_hidden and (\n self.priority is None or self.priority >= 0\n )", "def isVisible(self, p_int): # real signature unknown; restored from __doc__\n return False", "def isVisible(self, p_int): # real signature unknown; restored from __doc__\n return False", "def set_visible(self):\n\t\tself.hide()\n\t\tself.__sys_tray_icon.setVisible(True)", "def IsVisible(self, *args):\n return _XCAFDoc.XCAFDoc_LayerTool_IsVisible(self, *args)", "def visible(self, hipid):\n s = self.hip_stars[hipid]\n if s[3]<min(self.inner_dec, self.outer_dec): return False\n return s[3]<=max(self.inner_dec, self.outer_dec)", "def XPIsWidgetVisible(inWidget):\n pass", "def isstart(self) -> bool:\n if len(self._pile) != self._pos + 1:\n return False\n visible_count = 0\n hidden_count = 0\n for c_card in self._pile:\n if c_card.visible:\n visible_count += 1\n else:\n hidden_count += 1\n return hidden_count == self._pos and visible_count == 1", "def is_visible(self):\n return self.visible_date < timezone.now()", "def visible(self):\r\n return self.column.visible", "def show(self):\r\n if self.visible == 1 and time() - self.lastMotion > self.delay:\r\n self.visible = 2\r\n if self.visible == 2:\r\n self.deiconify()", "def show(self):\r\n if self.visible == 1 and time() - self.lastMotion > self.delay:\r\n self.visible = 2\r\n if self.visible == 2:\r\n self.deiconify()", "def show(self):\n if self.visible == 1 and time() - self.lastMotion > self.delay:\n self.visible = 2\n if self.visible == 2:\n self.deiconify()", "def isObscuredBy(self, QGraphicsItem): # real signature unknown; restored from __doc__\n return False", "def IsInstanceVisible(self, *args):\n return _XCAFDoc.XCAFDoc_ColorTool_IsInstanceVisible(self, *args)", "def is_visible(self):\n if self._namespace and self._namespace.is_anonymous():\n return True\n return self._rawdoc.get_inherited_visibility() != DocType.none", "def off_screen(self):\n return self._x < 0", "def isdown(self):\n return self._drawing", "def GetGUIOpen(self):\n return bool(self.gui_open)", "def is_indicator():\n return True", "def is_gridlines_visible(self):\n return self.container['is_gridlines_visible']", "def __bool__(self):\n return _osgAnimation.mapVertexInfluence___bool__(self)", "def visible(self) -> bool:\n try:\n return bool(self.driver.wait_until_all_visible(*self.ROOT_LOCATOR))\n except WebDriverException:\n return False", "def has_visible_entity(self):\n ret = False\n for e in self:\n if e.is_visible() == True:\n ret = True\n break\n return ret", "def conditionsAreMetForDrawing(self):\n\t\tcurrentController = self.controller.view().window().windowController()\n\t\tif currentController:\n\t\t\ttool = currentController.toolDrawDelegate()\n\t\t\ttextToolIsActive = tool.isKindOfClass_( NSClassFromString(\"GlyphsToolText\") )\n\t\t\thandToolIsActive = tool.isKindOfClass_( NSClassFromString(\"GlyphsToolHand\") )\n\t\t\tif not textToolIsActive and not handToolIsActive: \n\t\t\t\treturn True\n\t\treturn False", "def IsShown(self):\r\n \r\n return not self.HasFlag(self.optionHidden)", "def IsPaneButtonVisible(self, part):\r\n\r\n captionRect = wx.Rect()\r\n\r\n for temp_part in self._uiparts:\r\n if temp_part.pane == part.pane and \\\r\n temp_part.type == AuiDockUIPart.typeCaption:\r\n captionRect = temp_part.rect\r\n break\r\n\r\n return captionRect.ContainsRect(part.rect)", "def get_active(self):\n if hasattr(self, 'canvas'):\n return True\n else:\n return False", "def is_on(self):\n return self.wink.state()", "def GetTitleBarVisible(self):\n return self._title_bar_visible", "def is_visible(self, url=''):\n return bool(url)", "def show( self ):\n if self.visible == 1:#ohnheiser hack and time() - self.lastMotion > self.delay:\n self.visible = 2\n if self.visible == 2:\n self.deiconify()", "def is_win_dispute_button_present(self):\n return self.is_element_present(self.win_dispute_button_locator)", "def IsVisible(self, item):\r\n\r\n # An item is only visible if it's not a descendant of a collapsed item\r\n parent = item.GetParent()\r\n\r\n while parent:\r\n \r\n if not parent.IsExpanded():\r\n return False\r\n \r\n parent = parent.GetParent()\r\n \r\n startX, startY = self.GetViewStart()\r\n clientSize = self.GetClientSize()\r\n\r\n rect = self.GetBoundingRect(item)\r\n \r\n if not rect:\r\n return False\r\n if rect.GetWidth() == 0 or rect.GetHeight() == 0:\r\n return False\r\n if rect.GetBottom() < 0 or rect.GetTop() > clientSize.y:\r\n return False\r\n if rect.GetRight() < 0 or rect.GetLeft() > clientSize.x:\r\n return False\r\n\r\n return True", "def is_visible(self, name):\n return self.q(css=\"div.{}\".format(name)).first.visible", "def IsHidden(self):\r\n\r\n return self._hidden", "def IsVisualMode(self):\n return self.mode == ViKeyHandler.VISUAL", "def is_brush(self) -> bool:\n return len(self.solids) > 0", "def visible(self, show):", "def is_attribute_visible(self, key):\n if self.has_key(key):\n attribute_status = getattr(self, key)\n if isinstance(attribute_status, bool) and attribute_status == True:\n return True\n elif isinstance(attribute_status, self.__class__) and \\\n attribute_status.are_any_attributes_visible():\n return True\n\n return False", "def is_visible(self, locator, timeout=15):\n try:\n ui.WebDriverWait(self.driver, timeout).until(EC.visibility_of_element_located((By.CSS_SELECTOR, locator)))\n return True\n except TimeoutException:\n return False", "def display_enabled(self):\n return self._display_mode == _LCD_DISPLAYON", "def is_on(self):\n return self.coordinator.data[self.info_type] == \"red\"", "def drawmode(self):\n return self._turtle.isdown()", "def is_disp(self) -> bool:\n return self.disp_power > 0", "def operator_visible(self):\n return self.data.get('operator_visible', False)", "def operator_visible(self):\n return self.data.get('operator_visible', False)", "def IsItemVisible(self, item):\r\n\r\n # An item is only visible if it's not a descendant of a collapsed item\r\n parent = item.GetParent()\r\n\r\n while parent:\r\n \r\n if not parent.IsExpanded():\r\n return False\r\n \r\n parent = parent.GetParent()\r\n \r\n startX, startY = self.GetViewStart()\r\n clientSize = self.GetClientSize()\r\n\r\n rect = self.GetBoundingRect(item)\r\n \r\n if not rect:\r\n return False\r\n if rect.GetWidth() == 0 or rect.GetHeight() == 0:\r\n return False\r\n if rect.GetBottom() < 0 or rect.GetTop() > clientSize.y:\r\n return False\r\n if rect.GetRight() < 0 or rect.GetLeft() > clientSize.x:\r\n return False\r\n\r\n return True", "def ison(self):\n return bool(self.pin.state) if self.pinishigh else not bool(self.pin.state)", "def show( self ):\n if self.visible == 1 and time() - self.lastMotion > self.delay:\n self.visible = 2\n if self.visible == 2 and self.msgVar.get()!='':\n self.deiconify()", "def is_hidden():\n return False", "def is_hidden():\n return False", "def check_visibility(self):\r\n\r\n for gs in self.ground_stations:\r\n if self.visible ^ (elevation_dot_product(self.r_ecef,self.ground_stations[gs][1],self.earth) > 0.0):\r\n self.visible ^= 1\r\n self.gs_id = self.ground_stations[gs][0]\r\n return True", "def is_spinner_invisible(self):\n self.q(css='#spinner').first.click()\n self.wait_for_element_invisibility('#anim', 'Button Output is Visible')", "def set_visible(self, visible):\n self.ec._win.set_mouse_visible(visible)\n self.ec._win.set_mouse_platform_visible(visible) # Pyglet workaround\n self._visible = visible", "def IsWindowVisible(hwnd):\r\n return bool(__IsWindowVisible(hwnd))", "def is_hidden(self):\n return self.has_label(HIDDEN_LABEL)", "def vis(self):\n \treturn self._vis", "def has_display(self) -> bool:\r\n return KebaService.DISPLAY in self.services", "def PinButton(self, visible=True):\r\n \r\n return self.SetFlag(self.buttonPin, visible)", "def is_on(self):\n return self._controller.is_on", "def is_displayed(self):\n return len(self._find_all_by_locator()) > 0", "def is_on(self) -> bool:\n return self._device.is_on", "def public(self):\n return not self.hidden", "def is_graphic_driver(self):\n if self.class_id == \"0x03\":\n return True\n else:\n return False", "def get_active(self):\n try:\n return not (self.jframe.getExtendedState() & JFrame.ICONIFIED)\n except AttributeError:\n if self.jframe:\n return True\n else:\n return False", "def is_visible(self, timeout=None):\n try:\n self.visibility_of_element_located(timeout)\n except TimeoutException:\n return False\n return True", "def is_element_display(self, selector):\n return True if self.get_element(selector).is_displayed() else False" ]
[ "0.70938164", "0.7088974", "0.7023082", "0.6997821", "0.68921804", "0.6873909", "0.68134916", "0.67763966", "0.66723704", "0.6664352", "0.6607723", "0.660474", "0.660474", "0.6599054", "0.6597098", "0.6593233", "0.6585666", "0.65637505", "0.65612817", "0.65610343", "0.6546338", "0.6461928", "0.64561045", "0.6427342", "0.6420023", "0.64048123", "0.6392349", "0.6287439", "0.62764007", "0.6264809", "0.623295", "0.6227374", "0.61989987", "0.61989987", "0.6133531", "0.61295295", "0.6124351", "0.6065559", "0.6056792", "0.604151", "0.60182476", "0.59708625", "0.59708625", "0.59388703", "0.5909432", "0.5907391", "0.58730125", "0.5859893", "0.58553517", "0.58481026", "0.5847668", "0.5846059", "0.5841599", "0.5838845", "0.5829599", "0.58137494", "0.5809885", "0.5805901", "0.57925117", "0.5767701", "0.57622623", "0.5747535", "0.5742652", "0.57345766", "0.57065666", "0.5706104", "0.5692797", "0.5676518", "0.5675862", "0.56711644", "0.56577677", "0.5647388", "0.56387174", "0.5630835", "0.56105494", "0.5600667", "0.5583286", "0.5583286", "0.55741376", "0.5567026", "0.55585665", "0.55553216", "0.55553216", "0.5547585", "0.5547328", "0.55398315", "0.5534703", "0.5529433", "0.5515639", "0.5505738", "0.5502803", "0.55027694", "0.55001014", "0.5499103", "0.5494546", "0.54921156", "0.54881066", "0.54862314", "0.5483714" ]
0.72434735
0
Represents the pen origin in the draw window. This property is used by the Window to reset the pen. This is a "friend" property and the invariant is not enforced.
def origin(self): return self._origin
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def GetConnectionPen(self):\r\n\r\n return self._dottedPen", "def penColor( self ):\n return self._penColor", "def pencolor(self):\n return self._pencolor", "def _set_origin(self):\n self += helper.circle(cx=self.__dict__['x'], cy=self.__dict__['y'], r=2, fill=\"black\", stroke=\"black\", style=\"fill-opacity: 50%\")\n self += helper.text(\"(0,0)\", x=self.__dict__['x']+5, y=self.__dict__['y']-5, style=\"fill-opacity: 50%\")", "def SetConnectionPen(self, pen):\r\n\r\n self._dottedPen = pen\r\n self._dirty = True", "def origin(self):\n return getattr(self.canvas.layout, 'origin', Stacked._origin)", "def GetBorderPen(self):\r\n\r\n return self._borderPen", "def pen(self, pen=None, **pendict):\n _pd = {\"shown\" : self._shown,\n \"pendown\" : self._drawing,\n \"pencolor\" : self._pencolor,\n \"fillcolor\" : self._fillcolor,\n \"pensize\" : self._pensize,\n \"speed\" : self._speed,\n \"resizemode\" : self._resizemode,\n \"stretchfactor\" : self._stretchfactor,\n \"outline\" : self._outlinewidth,\n \"tilt\" : self._tilt\n }\n\n if not (pen or pendict):\n return _pd\n\n if isinstance(pen, dict):\n p = pen\n else:\n p = {}\n p.update(pendict)\n\n _p_buf = {}\n for key in p:\n _p_buf[key] = _pd[key]\n\n if self.undobuffer:\n self.undobuffer.push((\"pen\", _p_buf))\n\n newLine = False\n if \"pendown\" in p:\n if self._drawing != p[\"pendown\"]:\n newLine = True\n if \"pencolor\" in p:\n if isinstance(p[\"pencolor\"], tuple):\n p[\"pencolor\"] = self._colorstr((p[\"pencolor\"],))\n if self._pencolor != p[\"pencolor\"]:\n newLine = True\n if \"pensize\" in p:\n if self._pensize != p[\"pensize\"]:\n newLine = True\n if newLine:\n self._newLine()\n if \"pendown\" in p:\n self._drawing = p[\"pendown\"]\n if \"pencolor\" in p:\n self._pencolor = p[\"pencolor\"]\n if \"pensize\" in p:\n self._pensize = p[\"pensize\"]\n if \"fillcolor\" in p:\n if isinstance(p[\"fillcolor\"], tuple):\n p[\"fillcolor\"] = self._colorstr((p[\"fillcolor\"],))\n self._fillcolor = p[\"fillcolor\"]\n if \"speed\" in p:\n self._speed = p[\"speed\"]\n if \"resizemode\" in p:\n self._resizemode = p[\"resizemode\"]\n if \"stretchfactor\" in p:\n sf = p[\"stretchfactor\"]\n if isinstance(sf, (int, float)):\n sf = (sf, sf)\n self._stretchfactor = sf\n # if \"shearfactor\" in p:\n # self._shearfactor = p[\"shearfactor\"]\n if \"outline\" in p:\n self._outlinewidth = p[\"outline\"]\n if \"shown\" in p:\n self._shown = p[\"shown\"]\n if \"tilt\" in p:\n self._tilt = p[\"tilt\"]\n \n self._update()", "def get_origin(self) -> Vec:\n if self.is_brush():\n bbox_min, bbox_max = self.get_bbox()\n return (bbox_min + bbox_max) / 2\n else:\n return Vec.from_str(self['origin'])", "def x_origin(self):\n return self._x_origin", "def get_origin(self):\n return self.zero", "def SetPen(*args, **kwargs):\n return _gdi_.DC_SetPen(*args, **kwargs)", "def get_origin(self):\n return self.coord_cls(x=0, y=0, system=self)", "def penup(self):\n if not self._drawing:\n return\n self.pen(pendown=False)", "def SetPen(*args):\n return _gdi_.GraphicsContext_SetPen(*args)", "def get_origin(self):\n return self.coord_cls(x=0, y=0, z=0, system=self)", "def SetPen(*args, **kwargs):\n return _gdi_.PseudoDC_SetPen(*args, **kwargs)", "def origin(self):\r\n\r\n return self.ox, self.oy, self.oz", "def _drawOrigin(self):\n screen_coords = self._posToScreenCoords(Vec2())\n\n if not self._isInScreen(screen_coords):\n return\n\n pygame.draw.line(\n self.screen,\n (150, 150, 150),\n (screen_coords[0] - 3, screen_coords[1]),\n (screen_coords[0] + 3, screen_coords[1]),\n )\n pygame.draw.line(\n self.screen,\n (150, 150, 150),\n (screen_coords[0], screen_coords[1] - 3),\n (screen_coords[0], screen_coords[1] + 3),\n )", "def origin_x(self):\n return self._origin[0]", "def reset(self):\n self.x_pos = 10\n self.y_pos = 10\n self.line_height = 15", "def get_center_scr(self):\r\n return self.rect.center", "def __reset_crosshair(self):\n self.lhor.set_ydata(self.y_coord)\n self.lver.set_xdata(self.x_coord)", "def SetBorderPen(self, pen):\r\n\r\n self._borderPen = pen\r\n self.RefreshSelected()", "def draw(self):\n return self._draw", "def reset(self):\n self.cur_pos = self._get_current_pos_in_1d()\n\n return self.cur_pos", "def DrawCurrent(self):\r\n \r\n x1, y1 = self._currentX, 0\r\n x1, y1 = self.ClientToScreen((x1, y1))\r\n x2 = self._currentX-1\r\n if wx.Platform == \"__WXMSW__\":\r\n x2 += 1 # but why ????\r\n\r\n y2 = 0\r\n dummy, y2 = self._owner.GetClientSize()\r\n x2, y2 = self._owner.ClientToScreen((x2, y2))\r\n\r\n dc = wx.ScreenDC()\r\n dc.SetLogicalFunction(wx.INVERT)\r\n dc.SetPen(wx.Pen(wx.BLACK, 2, wx.SOLID))\r\n dc.SetBrush(wx.TRANSPARENT_BRUSH)\r\n\r\n self.AdjustDC(dc)\r\n dc.DrawLine (x1, y1, x2, y2)\r\n dc.SetLogicalFunction(wx.COPY)", "def GetPen(*args, **kwargs):\n return _gdi_.StockGDI_GetPen(*args, **kwargs)", "def reset_pos(self):\n\n return self.pos(1, 1)", "def color(self):\n assert False, 'Pen does not have a color; use pencolor or fillcolor'", "def get_current_origin(self):\n return self.prevKs[-1]", "def GetPen(*args, **kwargs):\n return _gdi_.DC_GetPen(*args, **kwargs)", "def dimscr(self):\n return (self.startx, self.starty, self.endx - self.startx, self.endy - self.starty)", "def correct(self):\n self.parent.copyCurrentWinState(self.pltw)\n self.pltw.blklst[self.blkno][self.ypos] = self.data[1] - self.data[2]\n self.pltw.updatePlot()\n self.pltw.dirty = True\n self.pltw.activecurv = self.cpos\n self.parent.updateUI()\n self.hide()", "def _draw_current_pos(self, painter):\n\t\tif self.current_pos is None:\n\t\t\treturn\n\t\tpx = self.map_stamp_to_x(self.current_pos)\n\t\tpw, ph = self._current_pos_pointer_size\n\n\t\t# Line\n\t\tpainter.setPen(QtGui.QPen(self._current_pos_color))\n\t\tpainter.setBrush(QtGui.QBrush(self._current_pos_color))\n\t\tpainter.drawLine(px, self._history_top - 1, px, self._history_bottom + 2)\n\n\t\t# Upper triangle\n\t\tpy = self._history_top - ph\n\t\tpainter.drawPolygon(\n\t\t QtGui.QPolygonF(\n\t\t [QtCore.QPointF(px, py + ph),\n\t\t QtCore.QPointF(px + pw, py),\n\t\t QtCore.QPointF(px - pw, py)]\n\t\t )\n\t\t)\n\n\t\t# Lower triangle\n\t\tpy = self._history_bottom + 1\n\t\tpainter.drawPolygon(\n\t\t QtGui.QPolygonF(\n\t\t [QtCore.QPointF(px, py),\n\t\t QtCore.QPointF(px + pw, py + ph),\n\t\t QtCore.QPointF(px - pw, py + ph)]\n\t\t )\n\t\t)\n\n\t\tpainter.setBrush(self._default_brush)\n\t\tpainter.setPen(self._default_pen)", "def StockGDI_GetPen(*args, **kwargs):\n return _gdi_.StockGDI_GetPen(*args, **kwargs)", "def _get_origin(self):\n return self.__origin", "def get_drawing_offset(self) -> Tuple2IntType:\n return self._drawing_offset", "def reset(self):\n self._x = 0\n self._y = 0", "def restore_geometry(self):\n return stools.SETTINGS.get(\"waveformWidget/geometry\")", "def set_origin( self, x, y ):\n\t\tself.x_offset = x\n\t\tself.y_offset = y\n\t\tself.width = (self._raw_width-x)\n\t\tself.height = (self._raw_height-y)", "def get_origin(self):\n return self.origin", "def __init__(self, glyphSet):\n super(DecomposingPen, self).__init__()\n self.glyphSet = glyphSet", "def draw_o(self):\r\n pen.down()\r\n pen.forward(40)\r\n pen.left(90)\r\n pen.forward(40)\r\n pen.left(90)\r\n pen.forward(40)\r\n pen.left(90)\r\n pen.forward(40)\r\n pen.left(90)\r\n pen.up()\r\n pen.forward(50)", "def GetPoint(self):\r\n\r\n return self._pointDrag", "def switch_origin(self):\n self.origin = 'bottom' if self.origin == 'top' else 'top'", "def draw_s(self):\r\n pen.down()\r\n pen.forward(40)\r\n pen.left(90)\r\n pen.forward(20)\r\n pen.left(90)\r\n pen.forward(40)\r\n pen.right(90)\r\n pen.forward(20)\r\n pen.right(90)\r\n pen.forward(40)\r\n pen.up()\r\n pen.back(40)\r\n pen.right(90)\r\n pen.forward(40)\r\n pen.left(90)\r\n pen.forward(50)", "def clone(self):\n return _libsbml.Point_clone(self)", "def pos(self):\n x = (self.ec._win._mouse_x -\n self.ec._win.width / 2.) / (self.ec._win.width / 2.)\n y = (self.ec._win._mouse_y -\n self.ec._win.height / 2.) / (self.ec._win.height / 2.)\n return np.array([x, y])", "def __originate__(self):\n self.pos_to_num = deepcopy(self.o_pos_to_num)\n self.num_to_pos = deepcopy(self.o_num_to_pos)", "def orig_obj(self):\n return self._orig_obj", "def orig_obj(self):\n return self._orig_obj", "def orig_obj(self):\n return self._orig_obj", "def get_position(self):\n return self._border.get_position()", "def origin(self):\n return (self._x_origin, self._y_origin)", "def update_pos(self):\n s = self\n s.rpos = s.rects[0].inf\n s.pos = s.physics.scl_coord_res(s.rpos)", "def test_set_pen(self):\n painter = biotracker.QPainter()\n painter.setPen(100, 50, 30, 33)\n self.assertEqual(\"p(100,50,30,33)\", painter.to_msg())", "def getOrigin(self):\n return self.data", "def start_stroke(self) -> Stroke: # pylint: disable=no-self-use\n return HANDSTROKE", "def pensize(self, width):\n self._penwidth = width", "def clone(self):\n return Point(self.x, self.y)", "def clone(self):\n return Point(self.x, self.y)", "def reflect_x(self):\n\n return Point(self.x, - self.y)", "def set_origin(self, origin):\n self.origin = origin\n self.__symbol_set.update(origin)", "def GetSnapPosition(self):\r\n\r\n snap, hAlign, vAlign, monitor = self._is_docked\r\n \r\n display = wx.Display(monitor)\r\n area = display.GetClientArea()\r\n size = self.GetManagedWindow().GetSize()\r\n \r\n pos = wx.Point()\r\n if hAlign == wx.LEFT:\r\n pos.x = area.x\r\n elif hAlign == wx.CENTER:\r\n pos.x = area.x + (area.width - size.x)/2\r\n else:\r\n pos.x = area.x + area.width - size.x\r\n\r\n if vAlign == wx.TOP:\r\n pos.y = area.y\r\n elif vAlign == wx.CENTER:\r\n pos.y = area.y + (area.height - size.y)/2\r\n else:\r\n pos.y = area.y + area.height - size.y\r\n\r\n return pos", "def GetOrigin(self):\n ...", "def GetOrigin(self):\n ...", "def GetOrigin(self):\n ...", "def neutral(self):\n\t\treturn AffineCurvePoint(None, None, self)", "def pos(self):\n pos = self.widget.pos()\n return Pos(pos.x(), pos.y())", "def wm(self):\n return self.position", "def __init__(self, *args, **kwargs):\n _gdi_.GraphicsPen_swiginit(self,_gdi_.new_GraphicsPen(*args, **kwargs))", "def setPen(self, *args, **kwargs):\n if kwargs == {} and (args == () or args == ('default',)):\n self.opts['pen'] = fn.mkPen(getConfigOption('foreground'))\n else:\n self.opts['pen'] = fn.mkPen(*args, **kwargs)\n\n self.picture = None\n self.update()", "def get_attach_point_bot(self):\n return self._point_draw.mapToGlobal(self._point_draw.rect().center())", "def position(self):\n return self._position.copy()", "def y_origin(self):\n return self._y_origin", "def reset(self):\r\n self.x = self.initX\r\n self.y = self.initY\r\n self.dir= self.initDir", "def coord(self):\r\n return self.model.coord", "def __repr__ (self) :\n print (self.Win)\n print (self.Whid)\n print (self.Wout)\n plt.plot (range (len (self.xvec)), self.xvec [:])\n plt.show ()\n return \"\"", "def previous_line():\r\n set_point(point().previous_line())", "def _raw_graph_window_dim(self):\n # self.y + 10: 1 for border, 9 for scrollbar\n return self.x+1., self.y+1.+9., self.w-2., self.h-2.-9.", "def get_rect (self) :\n return self.rect.copy()", "def getCurveExplicitlySet(self):\n return _libsbml.ReferenceGlyph_getCurveExplicitlySet(self)", "def get_graphics_state(self):\n return GraphicsState(\n dash_array=self.dash_array,\n line_cap=self.line_cap,\n line_join=self.line_join,\n miter_limit=self.miter_limit,\n stroke_transparency=self._get_stroke_transparency(),\n fill_transparency=self._get_fill_transparency(),\n )", "def x(self):\n return _libsbml.Point_x(self)", "def up(self):\r\n self.brush_on = False", "def get_origin(self) -> Vec:\n size_min, size_max = self.get_bbox()\n origin = (size_min + size_max) / 2\n return origin", "def _getCurrentPoint(self):\n return self.__currentPoint", "def calcPath(self) -> QPainterPath:\n path = QPainterPath(QPointF(self.owner.posSource[0], self.owner.posSource[1]))\n path.lineTo(self.owner.posDestination[0], self.owner.posDestination[1])\n return path", "def getPoint(self):\n return self.point", "def getPoint(self):\n return self.point", "def clear(self):\n self._x_prev = None\n self._y_prev = None", "def getCurveExplicitlySet(self):\n return _libsbml.ReactionGlyph_getCurveExplicitlySet(self)", "def __init__(self):\n self.x = 0\n self.y = 0", "def __init__(self):\n self.x = 0\n self.y = 0", "def set_origin(self, origin_x, origin_y):\r\n self.x = origin_x - (self.rect.width / 2)\r\n self.y = origin_y + self.rect.height\r\n self._update_rect()", "def _removePen(self,pen):\n if pen in self._pencils:\n self._pencils.remove(pen)" ]
[ "0.6315457", "0.6157232", "0.5957476", "0.5798523", "0.5758914", "0.574672", "0.5725725", "0.57172686", "0.5713631", "0.5709259", "0.5697349", "0.56255484", "0.56203943", "0.5529922", "0.5509871", "0.5480417", "0.5455918", "0.5441902", "0.5405522", "0.5344211", "0.52710193", "0.5267352", "0.5253851", "0.52508456", "0.5231383", "0.5221449", "0.5211929", "0.52019244", "0.51737154", "0.51557875", "0.5145028", "0.5139142", "0.51272494", "0.51181245", "0.51059455", "0.5103002", "0.5099393", "0.5094673", "0.5070395", "0.506309", "0.50567687", "0.5047474", "0.5028431", "0.5026042", "0.5002937", "0.50024766", "0.4994322", "0.49771705", "0.49595645", "0.49579817", "0.49552566", "0.49552566", "0.49552566", "0.49513406", "0.49473408", "0.49471828", "0.49394187", "0.4912566", "0.49084878", "0.48857293", "0.4884223", "0.4884223", "0.48841318", "0.48777503", "0.4876891", "0.48739216", "0.48739216", "0.48739216", "0.4850207", "0.48483455", "0.4848116", "0.48453784", "0.48442316", "0.4841834", "0.48305365", "0.4827753", "0.48198155", "0.48125377", "0.48112643", "0.48105106", "0.48044908", "0.4803819", "0.47980556", "0.47974098", "0.47973037", "0.47893044", "0.4786657", "0.47816673", "0.4781029", "0.47786072", "0.47786072", "0.47779542", "0.4770635", "0.47699252", "0.47699252", "0.4768205", "0.47662073" ]
0.545472
20
The xcoordinate of this pen. To change the x coordinate, use one of the drawing methods. This attribute may not be (directly) altered
def x(self): return self._turtle.xcor()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_x(self, new_x):\r\n self.x = new_x", "def x(self):\n return _libsbml.Point_x(self)", "def get_x(self):\n return self.posX", "def setX(self, *args):\n return _libsbml.Point_setX(self, *args)", "def setX(self, x):\n self.position.setX(x)", "def setX(self, x):\r\n\t\tself._x=x", "def Getxcoord(self):\n return self.x_coord", "def set_x(self, x):\n self.scene.set_x_loc(x)\n self.redraw()", "def set_x(self, x):\n self._x = x", "def setX(self, x):\n self.x = x\n pass", "def SetX(self, x):\r\n\r\n self._x = x", "def set_x(self, x: float):\n self.x = x", "def get_pos_x(self):\n return self.__pos_x", "def get_x_position(self):\n return self.rect.x", "def getXCoordinate(self) -> float:\n return self.x_coord", "def setXPos(self,newXPos):\n self.xPos=newXPos", "def _get_x(self):\n return self.position.x", "def x(self):\n if self._x is None:\n self.compute_coordinates()\n return self._x", "def x_coord(self):\n\n return self.x0 + np.arange(self.nx) * self.dx", "def get_x(self):\n return self.coords[0]", "def set_new_pos_in_x(self, new_pos):\n self.__pos_x = new_pos", "def setXOffset(self, *args):\n return _libsbml.Point_setXOffset(self, *args)", "def set_axis_x(self, new_axis_point):\r\n self.__x_axis = new_axis_point", "def x(self):\n return self._coords[0]", "def x(self):\n return self.coords[0]", "def setX(self, value):\n self.position[0] = value", "def get_x(self) -> int:\n return self.__x", "def x(self):\r\n return self.position.x", "def getX(self):\n return self.__x", "def reflect_x(self):\n\n return Point(self.x, - self.y)", "def set_x(self, state_value):\n val = state_value / self.space_subdivisions + self.unit\n epsilon = 1e-6\n if not self.unit <= val <= 1.0 - self.unit + epsilon:\n raise AttributeError(\"Value out of bounds\")\n self.pos_x = val", "def GetX(self):\r\n\r\n return self._x", "def getX(self):\n return self.x", "def getX(self):\r\n\t\treturn self._x", "def get_axis_x(self):\r\n return self.__x_axis", "def pos_x(self, *args, **kwargs) -> Any:\n pass", "def offset_x(self, x: int):\n self.tk_ref.geometry(f'{self.width}x{self.height}+{x}+{self.offset_y}')", "def xaxis ( self ) :\n return self.__xaxis", "def xaxis ( self ) :\n return self.__xaxis", "def reflect_x(self):\n r_x = self.x\n r_y = self.y *-1\n\n return (Point(r_x,r_y))", "def __get_x__(self):\n return self.Direction['x']", "def xaxis ( self ) :\n return self.__xaxis", "def setX(self, *args):\n return _libsbml.BoundingBox_setX(self, *args)", "def get_x_position(self):\n return self.actual_coordinates[0]", "def x(self, value):\n self.data_validator(\"x\", value)\n self.__x = value", "def getXOffset(self):\n return _libsbml.Point_getXOffset(self)", "def set_pos(self, x):\n self._pos = x", "def x ( self ) :\n return self.xvar", "def get_ship_x(self):\n return self.x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self) -> int:\n return self._x", "def getX(self):\n return self.position.getX()", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def xaxis(self):\n return self._xaxis", "def x(self, value):\n self.validate_input(x=value)\n self.__x = value", "def get_x(self):\n\t\treturn self._collision_rect.x + 14", "def x(self, x=None):\n\n if x is None:\n return self._x\n else:\n if not isinstance(x, int) and not isinstance(x, float):\n raise TypeError(\"x must be numeric, not '%s'\" % x)\n self._x = x", "def x(self, x=None):\n\n if x is None:\n return self._x\n else:\n if not isinstance(x, int) and not isinstance(x, float):\n raise TypeError(\"x must be numeric, not '%s'\" % x)\n self._x = x", "def get_alien_x(self):\n return self.x", "def offset_x(self) -> int:\n self.tk_ref.update()\n return self.tk_ref.winfo_x()", "def x(self):\n return self.x", "def xax(self):\n return self.__xax", "def x_origin(self):\n return self._x_origin", "def origin_x(self):\n return self._origin[0]", "def setX(self, value):\n self.components[0] = value", "def setX(self, value):\n self.components[0] = value", "def x(self):\n return self._kml['x']", "def show_x(self):\n print(self.x)", "def to_x_coordinates(self):\n self.plotter.to_x_coordinates(self.ax)\n self.plotter.replot(self.ax)\n self.plotter.cells.draw(self.ax)\n self.x_label.set_text(self.plotter.plot_xlabel)\n self.fig.canvas.draw()", "def getXPoint(self, x):\n # Find the correct parameter\n t = (x - self.p0.x) / self.d.x\n return self.point(t)", "def OffsetX(*args, **kwargs):\n return _gdi_.AlphaPixelData_Accessor_OffsetX(*args, **kwargs)", "def x(self, value):\n if not isinstance(value, int):\n raise TypeError(\"x must be an integer\")\n if value < 0:\n raise ValueError(\"x must be >= 0\")\n self.__x = value", "def OffsetX(*args, **kwargs):\n return _gdi_.NativePixelData_Accessor_OffsetX(*args, **kwargs)", "def x(self, value: int):\n if not (0 < value < SCREEN_WIDTH - self.width):\n self.dir_x = -self.dir_x\n self._x += abs(self._x - value) * self.dir_x" ]
[ "0.76575345", "0.75913805", "0.7514827", "0.7482172", "0.7476596", "0.74066937", "0.73871195", "0.7383479", "0.7373789", "0.7351436", "0.73434395", "0.7271025", "0.72605467", "0.7225597", "0.71805465", "0.71784794", "0.71032804", "0.70959073", "0.69787765", "0.69621176", "0.6958475", "0.6944134", "0.69390005", "0.69375783", "0.6911012", "0.68966424", "0.68700576", "0.6824428", "0.68166983", "0.6800977", "0.6795617", "0.67830616", "0.67555064", "0.67535573", "0.67369187", "0.67246425", "0.67182654", "0.6694681", "0.6694681", "0.6687605", "0.66748536", "0.6662387", "0.66545796", "0.6633922", "0.65685236", "0.6537247", "0.64959884", "0.6487496", "0.645599", "0.64505255", "0.64505255", "0.64505255", "0.64505255", "0.64505255", "0.64505255", "0.64505255", "0.64505255", "0.64505255", "0.64505255", "0.64505255", "0.64505255", "0.64505255", "0.64505255", "0.64505255", "0.64505255", "0.64271104", "0.6426167", "0.6425809", "0.6425809", "0.6425809", "0.6425809", "0.6425809", "0.6425809", "0.6425809", "0.6425809", "0.6425809", "0.6425809", "0.6425809", "0.64142996", "0.64141005", "0.63986504", "0.63984394", "0.63984394", "0.63923293", "0.63882285", "0.6383004", "0.6380416", "0.6362246", "0.6361156", "0.63583755", "0.63583755", "0.634953", "0.63443553", "0.63019216", "0.6299676", "0.62973404", "0.6274397", "0.6266352", "0.62398994" ]
0.6752364
34
The ycoordinate of this pen. To change the y coordinate, use one of the drawing methods. This attribute may not be (directly) altered
def y(self): return self._turtle.ycor()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def y(self):\n return _libsbml.Point_y(self)", "def set_y(self, new_y):\r\n self.y = new_y", "def setY(self, y):\n self.y = y\n pass", "def setY(self, y):\r\n\t\tself._y=y", "def getY(self):\n return self.__y", "def setY(self, *args):\n return _libsbml.Point_setY(self, *args)", "def SetY(self, y):\r\n\r\n self._y = y", "def set_y(self, y):\n self._y = y", "def set_y(self, y: float):\n self.y = y", "def getY(self):\r\n\t\treturn self._y", "def getY(self):\n return self.y", "def getY(self):\n return self.y", "def y_coord(self):\n\n return self.y0 + np.arange(self.ny) * self.dy", "def GetY(self):\r\n\r\n return self._y", "def get_y(self):\n return self.__y", "def getYCoordinate(self) -> float:\n return self.y_coord", "def getY(self):\n y = self.getAttribute('y')\n kind = self.getKind()\n self._y = y if kind == 'pie' else None\n return self._y", "def set_y(self,Y):\n self.posY = Y", "def __get_y__(self):\n return self.Direction['y']", "def get_y(self):\n return self.posY", "def getY(self):\n return self.position.getY()", "def setY(self, y):\n self.position.setY(y)", "def _get_y(self):\n return self.position.y", "def getYpos(self):\n return self.y", "def get_pos_y(self):\n return self.__pos_y", "def setYPos(self,newYPos):\n self.yPos=newYPos", "def set_y(self, y):\n self.scene.set_y_loc(y)\n self.redraw()", "def get_y_position(self): \n return self.rect.y", "def setY(self, value):\n self.position[1] = value", "def setY(self, *args):\n return _libsbml.BoundingBox_setY(self, *args)", "def get_alien_y(self):\n return self.y", "def y(self):\n return self._coords[1]", "def get_y(self):\n return self.coords[1]", "def setYOffset(self, *args):\n return _libsbml.Point_setYOffset(self, *args)", "def y(self):\n return self.coords[1]", "def y(self):\n return self.__y", "def y(self):\n return self.__y", "def y(self):\n return self.__y", "def y(self):\n return self.__y", "def y(self):\n return self.__y", "def y(self):\n return self.__y", "def y(self):\n return self.__y", "def y(self):\n return self.__y", "def y(self):\n return self.__y", "def y(self):\n return self.__y", "def getY(self):\n return self.position[1]", "def y ( self ) :\n return self.yvar", "def y(self):\n if self._y is None:\n self.compute_coordinates()\n return self._y", "def y(self):\r\n return self.position.y", "def getY(self):\n return _libsbml.BoundingBox_getY(self)", "def setY(self, value):\n self.components[1] = value", "def setY(self, value):\n self.components[1] = value", "def y(self):\n return self._y", "def y(self):\n return self._y", "def y(self):\n return self._y", "def y(self):\n return self._y", "def y(self):\n return self._y", "def y(self):\n return self._y", "def y(self):\n return self._y", "def y(self):\n return self._y", "def y(self):\n return self._y", "def y(self):\n return self._y", "def y(self):\n return self.y", "def Y(self, value):\n self._Y = value", "def get_y_position(self):\n return self.actual_coordinates[1]", "def getYOffset(self):\n return _libsbml.Point_getYOffset(self)", "def getYLabel(self): \n return self.__y_label__", "def get_axis_y(self):\r\n return self.__y_axis", "def yax(self):\n return self.__yax", "def Y(self):\n return self.y\n pass", "def y(self,) -> int:\n return self._y", "def pos_y(self, *args, **kwargs) -> Any:\n pass", "def set_ly(self):\r\n self._ly = self.dy * self.ny - self.oy", "def y(self):\n return (self.__y)", "def set_axis_y(self, new_axis_point):\r\n self.__y_axis = new_axis_point", "def _get_y(self):\n enabled = self.num_enabled\n\n if self.heart_enabled:\n self._heart_y = 45*(self.num_enabled - enabled) + 75\n enabled -= 1\n if self.speed_enabled:\n self._speed_y = 45*(self.num_enabled - enabled) + 75\n enabled -= 1\n if self.cadence_enabled:\n self._cadence_y = 45*(self.num_enabled - enabled) + 75\n enabled -= 1\n if self.ams_enabled:\n self._ams_y = 45*(self.num_enabled - enabled) + 75\n enabled -= 1", "def Y(self):\n return self._Y", "def set_y(self, state_value):\n val = state_value / self.space_subdivisions + self.unit\n epsilon = 1e-6\n if not self.unit <= val <= 1.0 - self.unit + epsilon:\n raise AttributeError(\"Value out of bounds\")\n self.pos_y = val", "def set_new_pos_in_y(self, new_pos):\n self.__pos_y = new_pos", "def y(self, value):\n if not (0 < value < SCREEN_HEIGHT - self.height):\n self.dir_y = -self.dir_y\n self._y += abs(self._y - value) * self.dir_y", "def y(self):\n return self[\"y\"]", "def findY(self):\n return self.y", "def y(self) -> int:\n return self.data.y_centre >> 4", "def getY(self):\n return self.components[1]", "def getY(self):\n return self.components[1]", "def y(self, value):\n self.validate_input(y=value)\n self.__y = value", "def yaxis ( self ) :\n return self.__yaxis", "def yaxis ( self ) :\n return self.__yaxis", "def y(self, y=None):\n\n if y is None:\n return self._y\n else:\n if not isinstance(y, int) and not isinstance(y, float):\n raise TypeError(\"y must be numeric, not '%s'\" % y)\n self._y = y", "def y(self, y=None):\n\n if y is None:\n return self._y\n else:\n if not isinstance(y, int) and not isinstance(y, float):\n raise TypeError(\"y must be numeric, not '%s'\" % y)\n self._y = y", "def origin_y(self):\n return self._origin[1]", "def y(self):\n return self._translation[1, 0]", "def get_y(self, x):\n p, y = self.get_p_y(x)\n return y", "def y0(self):\n return self._y0", "def y(self, value):\n self.data_validator(\"y\", value)\n self.__y = value", "def getY(self):\n return self.labels[0]", "def get_y(self):\n\t\treturn self._collision_rect.y + 25", "def getY(self):\n return self.proj.getY()", "def y(self):\n return self._kml['y']" ]
[ "0.7824647", "0.7789776", "0.77398807", "0.7738483", "0.7698442", "0.7642297", "0.7619385", "0.76075387", "0.76014006", "0.7586667", "0.7569661", "0.7569661", "0.7532064", "0.7500866", "0.74680847", "0.7445015", "0.7398983", "0.73577964", "0.7341188", "0.72738236", "0.7256707", "0.7250614", "0.72472525", "0.7228462", "0.7203362", "0.7198547", "0.71891683", "0.7163994", "0.7163395", "0.71553856", "0.7140468", "0.71333766", "0.7127189", "0.7111622", "0.7096176", "0.70920277", "0.70920277", "0.70920277", "0.70920277", "0.70920277", "0.70920277", "0.70920277", "0.70920277", "0.70920277", "0.70920277", "0.7091585", "0.7085508", "0.7069303", "0.70627147", "0.70569766", "0.70358896", "0.70358896", "0.70288604", "0.70288604", "0.70288604", "0.70288604", "0.70288604", "0.70288604", "0.70288604", "0.70288604", "0.70288604", "0.70288604", "0.7004475", "0.7000756", "0.6967087", "0.69644123", "0.69349366", "0.69181246", "0.69103825", "0.6905928", "0.68988454", "0.6876514", "0.6873107", "0.68724084", "0.68716097", "0.6857381", "0.6840798", "0.68389004", "0.6801816", "0.67855453", "0.67820317", "0.6773251", "0.6766838", "0.67527705", "0.67527705", "0.6751741", "0.67294264", "0.67294264", "0.6684309", "0.6684309", "0.66757095", "0.6671227", "0.6644819", "0.66248107", "0.66099364", "0.6603685", "0.6596532", "0.65915877", "0.65835345" ]
0.68330735
78
Deletes this pen object.
def __del__(self): self._screen._removePen(self) del self._turtle
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete(self):\n # exit contains our clean up code\n self.exit()\n GenericAnimatedProp.GenericAnimatedProp.delete(self)", "def delete(self):\n self.graph._del(handle=self.handle)", "def delete(self):\n del self.shx.atoms[self.index]", "def __del__(self):\n\n # Delete sprite (if it has been defined)\n try:\n self.canvas.delete(self.sprite)\n except AttributeError:\n pass\n except tk.TclError:\n pass", "def delete(self):\n\t\tself.canvas.delete('node_'+self.identifier)\n\t\tself.canvas.tag_unbind('node_'+self.identifier,\"<Any>\")", "def delete(self) -> None:\n self.pop()", "def delete(self):\n\n raise NotImplementedError('Must be implemented by subclasses')", "def delete(self):\n self._vertex_list.delete()\n self._vertex_list = None", "def delX(self):\n del self.components[0]", "def delX(self):\n del self.components[0]", "def delete_current_shape(self):\n print(\"deleting shape!\")\n self.shapes.remove(self.current_shape)\n self.current_shape = None\n self.changed()", "def remove(self):\r\n\t\tself._delete()", "def __delitem__(self, key):\n self.deleteCurve(key)", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def del_curve(self, key):\n del self[key]\n del self._labels[key]", "def _removePen(self,pen):\n if pen in self._pencils:\n self._pencils.remove(pen)", "def delete(self):\n self.parent.delete_node(self)", "def __del__(self) -> None:\n self.delete()", "def hdel(self):\n return self.delete()", "def delete(self):\n del self.characters[self.cursor.position]", "def delete(self):\n if self.shape is not None:\n self.shape.delete()\n if self in shared.obstacles:\n shared.obstacles.remove(self)", "def __del__(self):\n self.clear()\n self._screen._removeTurtle(self)\n del self._turtle", "def delete(self):\n return self._finalize()", "def delete(self):\n return self._finalize()", "def delete(self):\n return self._finalize()", "def delete(self):\n return self._finalize()", "def delete(self):\n return self._finalize()", "def delete(self):\n return self._finalize()", "def delete(self):\n return self._finalize()", "def delete(self):\n return self._finalize()", "def delete(self):\n return self._finalize()", "def delete(self):\n\n # TODO find a way to remove this when sub-classing in HCRoot\n self.parent.del_child(self)", "def delete(self):\n os.system(\"rm \"+self._name)", "def erase(self):\n pass", "def delete(self):\r\n if self.__abstract__:\r\n raise ThunderdomeException('cant delete abstract elements')\r\n if self.eid is None:\r\n return self\r\n query = \"\"\"\r\n g.removeVertex(g.v(eid))\r\n g.stopTransaction(SUCCESS)\r\n \"\"\"\r\n results = execute_query(query, {'eid': self.eid})", "def delete(self):\n\n raise NotImplementedError()", "def removeFromParentAndDelete(self):\n return _libsbml.KineticLaw_removeFromParentAndDelete(self)", "def remove_object_from_canvas(self, tk_object):\n self.canvas.delete(tk_object)", "def remove(self):\n self.node.destroy()", "def delete(self):\r\n self.domain.delete_item(self)", "def __delitem__(self, key):\n\n del self._vertices[key]", "def __delete__(self):\n pass", "def delete(self):\n pass", "def delete(self):\n pass", "def delete(self):\n pass", "def delete(self):\n pass", "def delete_selection(self):\n if self.selected_point_index is not None:\n del self.current_shape[self.selected_point_index]\n self.selected_point_index = None\n self.changed()", "def delete(self, index):\n try:\n self.shapes.pop(index)\n self.records.pop(index)\n except IndexError:\n print('No record found at index: {}'.format(index))", "def __del__(self):\n # Only an integer is passed to the call\n self.ph.remove(self.ID)\n # No new references were created, nothing retained", "def delete(self):\n subprocess.run([\"axicli\", \"--mode\", \"manual\", \"-M\", \"enable_xy\"])\n subprocess.run([\"axicli\", \"--mode\", \"manual\", \"-M\", \"raise_pen\"])\n subprocess.run([\"axicli\", \"--mode\", \"manual\", \"-M\", \"disable_xy\"])\n\n return self.get()", "def destroy(self):\n gameengine.GameEngine().game_objects.remove(self)", "def removeFromParentAndDelete(self):\n return _libsbml.StoichiometryMath_removeFromParentAndDelete(self)", "def delete(self):\n Texture2D.delete_glo(self._ctx, self._glo)\n self._glo.value = 0", "def delete_ball(self):\r\n self.movement = \"\"\r\n self.canvas.delete(self.ball)", "def del_points(self):\r\n del self._points", "def delete(self):\n self.log.info('Deleting')\n self._state = PonPort.State.DELETING\n self._cancel_deferred()", "def delete(self,pos):\n pos.next = pos.next.next", "def delete(self):\n self.data = None", "def delete(self, *args):\n if self.cur == Win.left:\n self.commands.delpl([])\n else:\n cur_song = self.rightwin.highlighted()\n\n self.rightwin.delete(cur_song)\n\n if not self.rightwin.data:\n self.switch_view_left()", "def remove(self):\n\n\t\t\t\tself.parent.thing.remove_sheet(self.thing)\n\t\t\t\tdel self.parent[self.label]", "def removeFromParentAndDelete(self):\n return _libsbml.SBaseRef_removeFromParentAndDelete(self)", "def delete(self):\n raise NotImplementedError", "def Delete(self):\n self.__context.builder.BlipDelete(self.GetWaveId(),\n self.GetWaveletId(),\n self.GetId())\n return self.__context.RemoveBlip(self.GetId())", "def delete(self):\n self.manager.delete(self.name)", "def delete(self):\n self.manager.delete(self.name)", "def destroy(self):\r\n self._obj.destroy()\r\n self._obj = None", "def delete(self):\n self.id = uuid4()\n DataStore.remove_instance(self)", "def delete(self, obj):\n raise NotImplementedError", "def removeFromParentAndDelete(self):\n return _libsbml.Model_removeFromParentAndDelete(self)", "def __delete__(self, instance):\n self._lib_vscf_ecc.vscf_ecc_delete(self.ctx)", "def delete(self):\n return self.parent.delete_instance(self.name)", "def delete(self):\n if Model.data_connector:\n with Model.data_connector.u_lock:\n Model.data_connector.remove_object(self)", "def delete(self, obj=None):\n pass", "def __del__(self):\n try:\n self._frame._destroy()\n except:\n pass\n self._turtles = []\n self._pencils = []\n del self._frame", "def delete(self):\n self._client.delete(self)", "def delete(self):\n self._instance.delete()\n self._instance = None\n self._data_defs = []", "def delete(self, item):\n # eg. node=item to attrs, telling item type to Graphviz._setattr\n self.graph._del(self.parent.handle, **{self.type: item})", "def remove(self):\n if self._parent:\n self._parent.removeChild(self)\n else:\n self.clear()", "def delete(self):\n _unset_related_objects_relations(self)\n self.deleted = now()\n self.save()\n\n return self", "def deleteItem(self):\n for item in self.scene.selectedItems():\n if isinstance(item, DiagramItem):\n # dont remove the start diagram\n if item.diagramType == DiagramItem.StartEnd:\n break\n item.removeArrows() \n else:#Arrow\n item.startItem().removeArrow(item)\n item.endItem().removeArrow(item)\n self.scene.removeItem(item)\n self.onItemInserted()\n \n self.setDefault()", "def __del__(self):\n Library.functions.delete_(self._book)", "def delY(self):\n del self.components[1]", "def delY(self):\n del self.components[1]", "def __delete__(self, obj):\n self._instances.pop(obj, None)", "def delete(self):\n with self.locked():\n self.path.delete()", "def delete(self):\n with self.locked():\n self.path.delete()", "def delete(self):\n pdbox._args.get(\"dryrun\") or os.remove(self.path)\n pdbox.info(\"Deleted %s\" % self.path)" ]
[ "0.6839339", "0.6739376", "0.66108614", "0.65926075", "0.6585168", "0.6571784", "0.65023196", "0.64651775", "0.6461599", "0.6461599", "0.63839555", "0.63446337", "0.63371813", "0.63204235", "0.63204235", "0.63204235", "0.63204235", "0.63204235", "0.63204235", "0.63204235", "0.63204235", "0.63204235", "0.63204235", "0.63204235", "0.63204235", "0.63204235", "0.63204235", "0.6317959", "0.6292306", "0.62916356", "0.6285754", "0.6278748", "0.6276993", "0.62559575", "0.62274325", "0.6223995", "0.6223995", "0.6223995", "0.6223995", "0.6223995", "0.6223995", "0.6223995", "0.6223995", "0.6223995", "0.61865515", "0.6175374", "0.6100598", "0.60690165", "0.60651726", "0.6058077", "0.60575944", "0.6055939", "0.60489005", "0.6032324", "0.60305095", "0.60250086", "0.60250086", "0.60250086", "0.60250086", "0.6010161", "0.5993214", "0.59915745", "0.59598434", "0.5956288", "0.5954832", "0.5940653", "0.5939446", "0.5934577", "0.5905201", "0.58968407", "0.5890214", "0.5886588", "0.5885046", "0.58723885", "0.58692276", "0.58654857", "0.5865435", "0.5865435", "0.58635783", "0.5848777", "0.58432806", "0.5837756", "0.5835655", "0.5829277", "0.5824494", "0.58237284", "0.5822213", "0.5821859", "0.5815477", "0.5815378", "0.58142066", "0.5812382", "0.5798777", "0.5795365", "0.5789992", "0.5789992", "0.57898873", "0.57891035", "0.57891035", "0.5785338" ]
0.71101624
0
Moves the pen to given position without drawing.
def move(self,x,y): assert (type(x) in [int, float]), "parameter x:%s is not a valid number" % `x` assert (type(y) in [int, float]), "parameter y:%s is not a valid number" % `y` fstate = self._turtle.fill() if fstate: # only need to do this if in mid-fill self._turtle.fill(False) self._turtle.penup() self._turtle.setposition(x,y) self._turtle.pendown() if fstate: # only need to do this if in mid-fill self._turtle.fill(True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def penup(self):\n if not self._drawing:\n return\n self.pen(pendown=False)", "def give_space(self):\r\n pen.forward(20)", "def move(self, x, y):\r\n if self.brush_on:\r\n for lx, ly in line(self.pos_x, self.pos_y, x, y):\r\n self.set(lx, ly)\r\n\r\n self.pos_x = x\r\n self.pos_y = y", "def moveToPosition(self, pos):\n if pos != 1:\n prevPos = pos - 1\n self.setPointColor(prevPos, self.completedColor)\n if pos != self.num_points+1:\n self.setPointColor(pos, self.currentColor)", "def set_stroke_move(self, use_stroke=True):\r\n self.board.set_stroke_move(use_stroke)", "def move(self, p):\r\n self.position.setvalue(p)", "def move_silent(self, pos):\n self.move(pos, silent=True)", "def mouse_move(self, pos):\n if (self.setup_type == \"position\"):\n x, y = pos\n self.canvas.move(x, y)", "def move_to(self, ypos, xpos):\n # the screen's coordinates are 1 based, but the command is 0 based\n xpos -= 1\n ypos -= 1\n self.exec_command(\"MoveCursor({0}, {1})\".format(ypos, xpos).encode(\"utf-8\"))", "def relmoveto(self, x = 0, y = 0):\n self.cur_x += x\n self.cur_y += y\n if x < 0:\n self.out.write(self.csi + \"%sD\" % -x)\n elif x > 0:\n self.out.write(self.csi + \"%sC\" % x)\n if y < 0:\n self.out.write(self.csi + \"%sA\" % -y)\n elif y > 0:\n self.out.write(self.csi + \"%sB\" % y)", "def __moveTo(self, x, y):\n newbox = (x, y, self.currentBox[2], self.currentBox[3])\n self.__drawAndErase(boxToDraw=newbox, boxToErase=self.currentBox)\n self.currentBox = newbox", "def move(self):\n if self.x_pos < const.screenwidth:\n self.x_pos += 1\n self.x_pos = self.x_pos\n\n self.draw()\n return", "def draw_o(self):\r\n pen.down()\r\n pen.forward(40)\r\n pen.left(90)\r\n pen.forward(40)\r\n pen.left(90)\r\n pen.forward(40)\r\n pen.left(90)\r\n pen.forward(40)\r\n pen.left(90)\r\n pen.up()\r\n pen.forward(50)", "def reset_position(self):\n self.goto(STARTING_POSITION)", "def _moveTo(self, pt):\n self._handleAnchor()\n t = \"M%s\" % (pointToString(pt))\n self._commands.append(t)\n self._lastCommand = \"M\"\n self._lastX, self._lastY = pt", "def move(self,x,y):\n assert (type(x) in [int, float]), \"parameter x:%s is not a valid number\" % `x`\n assert (type(y) in [int, float]), \"parameter y:%s is not a valid number\" % `y`\n d = self._turtle.isdown()\n if d:\n self._turtle.penup()\n self._turtle.setposition(x,y)\n if d:\n self._turtle.pendown()", "def set_position(self, x, y):\n self.tx = -x\n self.ty = -y", "def draw_s(self):\r\n pen.down()\r\n pen.forward(40)\r\n pen.left(90)\r\n pen.forward(20)\r\n pen.left(90)\r\n pen.forward(40)\r\n pen.right(90)\r\n pen.forward(20)\r\n pen.right(90)\r\n pen.forward(40)\r\n pen.up()\r\n pen.back(40)\r\n pen.right(90)\r\n pen.forward(40)\r\n pen.left(90)\r\n pen.forward(50)", "def reset_position(self, x, y):\n\t\tself.grid[x][y] = self.terminal", "def move(self, pos):\n self.widget.move(*pos)", "def move(self):\n \n self.position = self.explore()", "def moveTo(self, pt: Tuple[float, float]) -> None:\n raise NotImplementedError", "def update(self):\n self.pos_x -=1", "def pendown(self):\n if self._drawing:\n return\n self.pen(pendown=True)", "def position(self, position):\n self.move_to(position)", "def move(self):\n\n if self.range > 0:\n self.dirty = 1\n self.rect.move_ip([self.x * self.speed, self.y * self.speed])\n self.range -= self.speed\n else:\n self.kill()", "def moveTo(self, x: int, y: int):\n raise NotImplementedError", "def move_to(self, x, y):\r\n self.__current_room = x, y", "def move_to(self, x, y):\n self.x = x\n self.y = y", "def move_to(self, x, y):\n self.x = x\n self.y = y", "def __window_moveTo(self, x, y):\n pass", "def move_to(self, position):\n raise NotImplementedError", "def moveTo(self, x, y):\n\n\t\tif x < 0:\n\t\t\tself.x = 0\n\t\telif x > self.maxX:\n\t\t\tself.x = self.maxX\n\t\telse:\n\t\t\tself.x = x\n\n\t\tif y < 0:\n\t\t\tself.y = 0\n\t\telif y > self.maxY:\n\t\t\tself.y = self.maxY\n\t\telse:\n\t\t\tself.y = y \n\n #print self.x, self.y\n\t\tautopy.mouse.move(self.x,self.y)", "def move(self,x,y):\n self.pos.x = x\n self.pos.y = y", "def MoveToPoint(*args):\n return _gdi_.GraphicsPath_MoveToPoint(*args)", "def do_paint(self):\r\n curses.curs_set(0)\r\n if self.win:\r\n self.paint()\r\n self.done_paint()", "def _move(self, pos):\n self.put_par(\"drive\", pos)", "def SetConnectionPen(self, pen):\r\n\r\n self._dottedPen = pen\r\n self._dirty = True", "def move_curr_piece(self, delta, point=False):\n if self.over: return\n elif self.can_move_curr_piece(delta):\n self.curr_piece.confirm_move(delta)\n if point: self.increment_score(1)\n elif delta == (0,1): # \"illegal\" down move\n self.lock_curr_piece()\n self.queue_draw()", "def undo_king_piece(self):\n self.king = False\n if self.symbol == '%':\n self.symbol = 'X'\n else:\n self.symbol = 'O'", "def goto(x, y):\n turtleTmp.setposition(x, y)", "def move(self):\n \n self.position = self.wander()", "def move_turtle(self, x, y):\n tortuga = self.turtle\n if self.capture_mode:\n tortuga.setheading(tortuga.towards(x, y))\n tortuga.setpos(x, y)\n self.add_punto(Punto(x, y))", "def stop(self):\n self.move(0, 0)", "def move(self, x, y):\n\n\t\tself._window.move(x, y)", "def move_focus(self, pos_x, pos_y):\n factor = self.offset.x * -0.005 / self.scale\n pos_x *= factor\n pos_y *= factor\n self.focus += (pos_x, pos_y)", "def draw_n(self):\r\n pen.down()\r\n pen.left(90)\r\n pen.forward(40)\r\n pen.right(135)\r\n pen.forward(1.414*40)\r\n pen.left(135)\r\n pen.forward(40)\r\n pen.up()\r\n pen.back(40)\r\n pen.right(90)\r\n pen.back(40)\r\n pen.forward(50)", "def set_move(self, x, y):\n self.pieces[x + (y * self.width)].set_move()", "def update(self):\n pygame.event.pump()\n self.pos_x -= 1.5", "def drawTo(self, x, y):\n assert (type(x) in [int, float]), \"parameter x:%s is not a valid number\" % `x`\n assert (type(y) in [int, float]), \"parameter y:%s is not a valid number\" % `y`\n self._turtle.setposition(x, y)", "def backward_character():\r\n set_point(point().offset(-1))", "def SetCurrentPosition(self,pos):\n\n if self.Reverse: pos*=-1\n self.Bus.Transaction(chr(self.Address)+chr(0x40)+struct.pack('@l',pos))", "def stop(self):\n self.change_x = 0\n self.change_y = 0", "def erase(self, x, y):\n self.console.draw_char(x, y, ' ', bg=None)", "def move_dial(self, pos):\n return self.move(pos, dial=True)", "def up():\n global y, canvas # y é modificado\n canvas.create_line(x, y, x, y - 10)\n y -= 10", "def stop(self):\n self.move(None)", "def finishStroke(self, pos):\n windowX = pos[0] - self.window.rect[0]\n windowY = pos[1] - self.window.rect[1]\n endPos = (windowX, windowY)\n if self.strokePos:\n self.strokes.append( (self.strokePos, endPos, self.color) )\n self.setDirty()\n self.strokePos = endPos", "def _on_key_press(self, event):\n # Remove the pending vertex if entering the 'move_vertex' or\n # 'move_all' mode\n if (not self._selection_completed\n and ('move_vertex' in self._state or\n 'move_all' in self._state)):\n self._xys.pop()\n self._draw_polygon()", "def moveCursor(self):\n\n\t\tself._before = self.rect.center\n\t\tself.rect.center = self._pos", "def joystick_move(self, emphasis=1):\n step = int(20*emphasis)\n self.display.ship.move_vertical(step=step)", "def up(self):\r\n self.brush_on = False", "def goto(self, x, y):\n # note that the snake can get outside of the canvas!\n if(self._gridmode):\n self._x = round(x)\n self._y = round(y)\n else:\n self._x = round(x, 2)\n self._y = round(y, 2)\n \n self._appendCurrentState()", "def move_to(xy):\n (x,y) = xy\n win32api.SetCursorPos((x,y))", "def move_turtle(self):\n self.forward(self.move_speed)", "def unindent(self):\n self.x_pos -= 10", "def up():\n turtleTmp.penup()", "def down():\n global y, canvas # y é modificado\n canvas.create_line(x, y, x, y + 10)\n y += 10", "def up(self):\n self.move(0, 1)", "def backToMiddlePos():\n\tprogMode(True) # Active le couple de servos\n\taxDriver.goToPosition(axDriver.BROADCASTID, 0x1FF) # Renvoie a la position 0x1FF", "def moveBy(self, x, y):\n\t\tself.moveTo(self.x + x, self.y + y)", "def move_back(self):\r\n self.center_x, self.center_y = self.save_pos", "def change_pos(self, direction):\n if direction == Direction.UP:\n self._y_pos -= 1\n elif direction == Direction.DOWN:\n self._y_pos += 1\n elif direction == Direction.LEFT:\n self._x_pos -= 1\n elif direction == Direction.RIGHT:\n self._x_pos += 1\n self._coordinates = self.coordinates()", "def move_character(self, old_y, old_x, y_pos, x_pos):\n self.map[old_y][old_x] = ' '\n self.map[y_pos][x_pos] = 'G'", "def erase(self):\r\n self.in_arrow = None\r\n self.out_arrow = None", "def down(self):\r\n self.brush_on = True", "def reset_movement(self):\n self.direction = [0, 0]", "def toggle(self, y, x):\n\tif x<0 or self.X<=x or y<0 or self.Y<=y:\n\t raise ValueError, \"Coordinates out of range %i,%i\"% (y,x)\n\tif self.state.has_key( (x,y) ): \n\t del self.state[x,y]\n\t self.scr.addch(y+1, x+1, ' ')\n\telse:\n\t self.state[x,y]=1\n\t self.scr.addch(y+1, x+1, self.char)\n\tself.scr.refresh()", "def setPos(self,pos):\n self.Xpos,self.Ypos=pos", "def reset_position(self):\n self.rect.left, self.rect.top = self.start_pos", "def draw_x(game_board: dict, board_position: str, writing_cursor: 'Turtle') -> None:\n writing_cursor.goto(game_board[board_position][0][0], game_board[board_position][0][1])\n writing_cursor.pendown()\n writing_cursor.right(45)\n writing_cursor.forward(15)\n writing_cursor.backward(30)\n writing_cursor.forward(15)\n writing_cursor.left(90)\n writing_cursor.forward(15)\n writing_cursor.backward(30)\n writing_cursor.right(45)\n writing_cursor.penup()\n writing_cursor.goto(0, 0)", "def paddle_reset_position(self, mouse):\n if (0 + self.paddle.width / 2) <= mouse.x <= (self.window.width - self.paddle.width / 2):\n self.paddle_x = mouse.x - self.paddle.width / 2\n self.window.add(self.paddle, self.paddle_x, self.paddle_y)", "def move_down(self):\n self.move_measurement(1)", "def move_down(self, step: int = 1) -> None:\n if self.cursor_pos.x < self.height - 1:\n self.cursor_pos = Point(self.cursor_pos.x+step, self.cursor_pos.y)\n else:\n self.cursor_pos = Point(0, self.cursor_pos.y)", "def restorepos(self):\n self.out.write(self.csi + \"u\")", "def _move(self, pos):\n self._set_block(self._pos, _AIR)\n self._set_block(self._pos + _Vec3(0, 1, 0), _AIR)\n self._set_block(pos, self._BOT_BLOCK)\n self._set_block(pos + _Vec3(0, 1, 0), self._BOT_BLOCK)\n self._pos = pos", "def move_to_position2(self):", "def move_to_position1(self):", "def move(x,y):\r\n pass", "def reset_position(self):\n self.set_position(copy.deepcopy(self.ab_pos))", "def unmakeMove(self, move):", "def move_to(self, x, y):\n self._impl.move_to(x, y)", "def set_position(self, position):\n self.gripper_io.set_signal_value(\"position_m\", position)", "def AeroMove(self, pos):\r\n\r\n pass", "def make_move(self, move):\n self.board[int(move) - 1] = self.nplayer", "def forward(self, step):\r\n x = self.pos_x + math.cos(math.radians(self.rotation)) * step\r\n y = self.pos_y + math.sin(math.radians(self.rotation)) * step\r\n prev_brush_state = self.brush_on\r\n self.brush_on = True\r\n self.move(x, y)\r\n self.brush_on = prev_brush_state", "def set_pos(self, x):\n self._pos = x", "def move(self, num_tower, position):\n #Keep track of the location of the disk\n self.peg = num_tower\n self.position_on_peg = position\n \n #Change coordenates in graphical representation of the disk\n #Calling base clase move method\n super().move( Disk.get_center_position( num_tower, position) )", "def handle_motion(self, x, y):\n if self.pressed_flag:\n self.last_point = (x, y)\n\n # trigger canvas to redraw itself\n self.redraw()", "def set_position(self, position):\n self.set_current_position(position)" ]
[ "0.66753656", "0.64267194", "0.6406833", "0.635348", "0.6280323", "0.62788904", "0.6258426", "0.6257431", "0.6155344", "0.6149044", "0.6132005", "0.6049534", "0.60103166", "0.59875387", "0.5973302", "0.5959287", "0.5955407", "0.5939342", "0.5917403", "0.589879", "0.5859435", "0.5832315", "0.58004266", "0.5770623", "0.5743472", "0.5737395", "0.5731642", "0.57223505", "0.57117873", "0.57117873", "0.57064795", "0.56981504", "0.5696003", "0.56872404", "0.56752956", "0.56420213", "0.5638324", "0.56306034", "0.56225073", "0.5596795", "0.55731446", "0.55646175", "0.5564334", "0.5537058", "0.55357736", "0.55220735", "0.5519887", "0.55145884", "0.5513949", "0.55123115", "0.54932564", "0.5488297", "0.5481029", "0.54746836", "0.54729277", "0.54697067", "0.5464826", "0.5464323", "0.54638565", "0.5461632", "0.5443604", "0.54375404", "0.5433605", "0.54319715", "0.54310703", "0.5424931", "0.5419538", "0.5414958", "0.5412885", "0.5407994", "0.54005855", "0.5400078", "0.53960747", "0.53908813", "0.53908414", "0.5385103", "0.53680843", "0.53636634", "0.5362297", "0.5359024", "0.5358829", "0.5352225", "0.53516686", "0.5350966", "0.5350464", "0.53458685", "0.533532", "0.5332539", "0.5327422", "0.5326969", "0.5325906", "0.5322698", "0.5308048", "0.5304504", "0.53028953", "0.5292209", "0.52909875", "0.5287416", "0.5280096", "0.5269636" ]
0.58909005
20
Draws a line segment (dx,dy) from the current pen position
def drawLine(self, dx, dy): assert (type(dx) in [int, float]), "parameter x:%s is not a valid number" % `dx` assert (type(dy) in [int, float]), "parameter y:%s is not a valid number" % `dy` x = self._turtle.xcor() y = self._turtle.ycor() self._turtle.setposition(x+dx, y+dy)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __draw_line(display, color, ball_pos, dx, dy):\n pygame.draw.line(display, color, ball_pos, (ball_pos[0] + dx, ball_pos[1] + dy), 2)", "def draw_line():\n global y1, y2\n canvas.create_line(x1, y1, x2, y2, width=2, fill=color)\n y1 -= 10\n y2 += 10", "def draw_line(self, x):\n self.PDF.setStrokeColor(black01)\n self.PDF.setLineWidth(1)\n self.PDF.line(75, x, 550, x)\n self.PDF.setStrokeColor(\"black\")", "def draw(self):\n # s1 = ShowPoint(self.cnv, self.p1.xpt, self.p1.ypt)\n # s2 = ShowPoint(self.cnv, self.p2.xpt, self.p2.ypt)\n # s1.draw()\n # # s2.draw()\n self.cnv.create_line(self.p1.xpt, self.p1.ypt, self.p2.xpt, self.p2.ypt)", "def draw(x,y,x1,y1,d,color=1):\n d.add(dxf.line((x,y),(x1,y1),color=color, layer='LINES',thickness=0.01))", "def draw_line(x1, y1, x2, y2):\r\n #global _canvas\r\n #global _current_line_thickness\r\n #global _current_color\r\n if _canvas == None:\r\n raise RuntimeError(\"Canvas is not open yet.\")\r\n else:\r\n path = Path(Point(x1, y1), Point(x2, y2))\r\n path.setBorderWidth(_current_line_thickness)\r\n path.setBorderColor(_current_color)\r\n _canvas.add(path)", "def draw_line(color, start_pos, end_pos, width=1):\n pygame.draw.line(screen, color, start_pos, end_pos, width)", "def drawLine(self,start,stop):\n startX = int(self.vert[start][0]*self.scale + self.size/2)\n startY = int(self.vert[start][1]*self.scale + self.size/2)\n endX = int(self.vert[stop][0]*self.scale + self.size/2)\n endY = int(self.vert[stop][1]*self.scale + self.size/2)\n \n self.canvas.create_line(startX,startY,endX,endY,fill='white')", "def draw_line(self, x0, y0, x1, y1, color=Color['white']):\n pygame.draw.line(self.display, color, (x0, y0), (x1, y1))", "def dline(x, y):\n glClear(GL_COLOR_BUFFER_BIT)\n glColor3f(0.0, 0.0, 1.0)\n glPointSize(10.0)\n glBegin(GL_POINTS)\n while (x <= y):\n glVertex2f(x, x)\n x += 0.05\n glEnd()\n glFlush()", "def drawSlope(self):\n length = sqrt(1 + self.slope**2) # Length of the line segment over 1 x-unit\n xOffset = (segmentLength / length) / 2 # Figures out how many times the length of the 1 unit length fits into the desired length\n # then divides by 2 becuase half is on the left and half on the right of the center\n\n\n # Left end point\n xLeft = self.x - xOffset\n yLeft = (self.slope * (xLeft - self.x)) + self.y\n\n # Right end point\n xRight = self.x + xOffset\n yRight = (self.slope * (xRight - self.x)) + self.y\n\n\n # Converts the left and right end points from cartesian coordinates to screen coordinates\n left = cartesianToScreen(xLeft , yLeft)\n right = cartesianToScreen(xRight, yRight)\n\n\n pygame.draw.aaline(display, self.color, left, right, 1) # DRAWS THE LINE AHHHHHHHHHHHHHHHHHH :P", "def _defLine(self):\n self._dline=GPath(points = [0,100,GAME_WIDTH,100], linewidth = 1.5,\n linecolor = 'cyan')", "def startLineDrawing(self, startPos):\n self.line = LineNodePath(render2d, thickness=2, colorVec=(0.8,0.8,0.8,1))\n self.line.moveTo(startPos)\n t = taskMgr.add(self.drawLineTask, \"drawLineTask\")\n t.startPos = startPos", "def DrawLine(*args, **kwargs):\n return _gdi_.PseudoDC_DrawLine(*args, **kwargs)", "def draw_path(self):\r\n if len(self.path) > 1:\r\n for i in range(1, len(self.path)):\r\n pg.draw.line(self.screen, (0, 150, 0),\r\n self.path[i - 1], self.path[i], 1)\r\n elif len(self.path) == 1:\r\n pg.draw.circle(self.screen, (0, 150, 0),\r\n (int(self.path[0].x), int(self.path[0].y)), 1)", "def DrawLinePoint(*args, **kwargs):\n return _gdi_.PseudoDC_DrawLinePoint(*args, **kwargs)", "def draw_line(self, pt0, pt1, color):\n steep = False\n if abs(pt0[0]-pt1[0]) < abs(pt0[1]-pt1[1]):\n pt0[0], pt0[1] = pt0[1], pt0[0]\n pt1[0], pt1[1] = pt1[1], pt1[0]\n steep = True\n\n if pt0[0] > pt1[0]:\n pt0[0], pt1[0] = pt1[0], pt0[0]\n pt0[1], pt1[1] = pt1[1], pt0[1]\n\n if pt0[1] > pt1[1]:\n dy = pt0[1] - pt1[1]\n inc_y = -1\n else:\n dy = pt1[1] - pt0[1]\n inc_y = 1\n\n dx = pt1[0] - pt0[0]\n d = 2 * dy - dx\n incr_e = 2 * dy\n incr_ne = 2 * (dy - dx)\n x = pt0[0]\n y = pt0[1]\n\n if not steep:\n self.buffer.set_pixel((x, y), color)\n while x < pt1[0]:\n if d <= 0:\n d = d + incr_e\n x = x + 1\n else:\n d = d + incr_ne\n x = x + 1\n y = y + inc_y\n self.buffer.set_pixel((x, y), color)\n else:\n self.buffer.set_pixel((y, x), color)\n while x < pt1[0]:\n if d <= 0:\n d = d + incr_e\n x = x + 1\n else:\n d = d + incr_ne\n x = x + 1\n y = y + inc_y\n self.buffer.set_pixel((y, x), color)", "def wdraw_line(self, wx0, wy0, wx1, wy1, color, arrow):\r\n dx0, dy0 = self.w_to_d(wx0, wy0)\r\n dx1, dy1 = self.w_to_d(wx1, wy1)\r\n self.canvas.create_line(dx0, dy0, dx1, dy1, fill=color, arrow=arrow)", "def draw_line(self, gray=0, nextline=0):\n\n self.fontsize = 4\n if nextline:\n self.nextline()\n else:\n self.linespace(8)\n self.resetx()\n c = self.canvas\n c.setStrokeGray(gray)\n c.setLineWidth(1)\n #self.y = self.y + self.linespacing + (self.fontsize/2)\n c.line(self.x, self.y, self.width - self.x, self.y)\n self.y = self.y + (self.linespacing)", "def __draw_path(\n self, x_path, y_path, opt_line, opt_marker,\n opt_colour, thickness=0.05):\n # Get colour\n colour = self.__get_colour_from_string(opt_colour)\n\n # For every point in the list, draw a line to the next one\n # (excluding last point)\n for point in range(0, len(x_path)):\n # Get point 1\n x1 = x_path[point]\n y1 = y_path[point]\n p1 = vector(x1, y1, 0)\n\n # If at end / only coordinate - draw a marker\n if point == len(x_path) - 1:\n create_marker(self.scene, x1, y1, opt_marker, colour)\n return\n\n # Get point 2\n x2 = x_path[point + 1]\n y2 = y_path[point + 1]\n p2 = vector(x2, y2, 0)\n\n if opt_line == '':\n # Only one marker to avoid double-ups\n create_marker(self.scene, x1, y1, opt_marker, colour)\n elif opt_line == '-':\n create_line(\n p1, p2, self.scene, colour=colour, thickness=thickness)\n # Only one marker to avoid double-ups\n create_marker(self.scene, x1, y1, opt_marker, colour)\n elif opt_line == '--':\n create_segmented_line(\n p1, p2, self.scene, 0.3, colour=colour,\n thickness=thickness)\n # Only one marker to avoid double-ups\n create_marker(self.scene, x1, y1, opt_marker, colour)\n elif opt_line == ':':\n create_segmented_line(\n p1, p2, self.scene, 0.05, colour=colour,\n thickness=thickness)\n # Only one marker to avoid double-ups\n create_marker(self.scene, x1, y1, opt_marker, colour)\n elif opt_line == '-.':\n raise NotImplementedError(\"Other line types not implemented\")\n else:\n raise ValueError(\"Invalid line type given\")", "def draw_lines(self):\n # draw x lines\n y = self.step_y\n while y <= self.height:\n x = 0\n while x <= self.width:\n self.canvas.create_line(x, y, x+3.5, y)\n self.canvas.update()\n x += 3.5\n y += self.step_y\n \n # draw y lines\n x = self.step_x\n while x <= self.width:\n y = 0\n while y <= self.height:\n self.canvas.create_line(x, y, x, y+3.5)\n self.canvas.update()\n y += 3.5\n x += self.step_x\n \n self.is_operating = False", "def draw_line(self, DISP, side:str, indizes:tuple, pink = False):\r\n offset = 1 #< Just to draw the line nicely\r\n pos = (indizes[0] - 1) * self.grid_size, indizes[1] * self.grid_size\r\n # Check if it's a pink line\r\n if pink:\r\n start_pos = pos[0], pos[1] + self.grid_size // 2\r\n end_pos = pos[0] + self.grid_size, pos[1] + self.grid_size // 2\r\n # Check if the line should be vertically. u for up\r\n elif side == 'u':\r\n start_pos = pos[0] + self.width - offset + self.grid_size // 2, pos[1] + self.grid_size // 2\r\n end_pos = pos[0] + self.grid_size + offset + self.grid_size // 2 - self.width, pos[1] + self.grid_size // 2\r\n # Check if the line should be horizontally. l for left\r\n elif side == 'l':\r\n start_pos = pos[0] + self.grid_size // 2, pos[1] + self.width - offset + self.grid_size // 2\r\n end_pos = pos[0] + self.grid_size // 2, pos[1] - self.width + self.grid_size + offset + self.grid_size // 2\r\n if not pink:\r\n pg.draw.line(DISP, Colors.colors['BLACK'], start_pos,end_pos, self.width + 2 * offset) \r\n else:\r\n pg.draw.line(DISP, Colors.colors['PINK'], start_pos,end_pos, self.width + 2 * offset)", "def draw_line(self, start_p, end_p, color, thickness: float):\n line_seg = LineSegs(\"interface\")\n line_seg.setColor(*color)\n line_seg.moveTo(start_p[0] * self.w_scale, 0, start_p[1] * self.h_scale)\n line_seg.drawTo(end_p[0] * self.w_scale, 0, end_p[1] * self.h_scale)\n line_seg.setThickness(thickness)\n line_np = self.aspect2d.attachNewNode(line_seg.create(False))\n return line_np", "def drawPath(self):\r\n bgl.glColor4f(0.8,0.8,0.9,0.01)\r\n bgl.glLineWidth(0.01)\r\n\r\n bgl.glBegin(bgl.GL_LINES)\r\n bgl.glVertex3f(self.p1[0],self.p1[1],self.p1[2])\r\n bgl.glVertex3f(self.p2[0],self.p2[1],self.p2[2])\r\n bgl.glEnd()\r\n\r\n bgl.glNormal3f(0.0,0.0,1.0)\r\n bgl.glShadeModel(bgl.GL_SMOOTH);", "def draw_line_segment(\n x1: float, y1: float, x2: float, y2: float, color: C3F\n ) -> None:\n pyglet.graphics.draw(\n 2,\n pyglet.gl.GL_LINE_STRIP,\n (GeoDrawer._VERTEX_MODE, [x1, y1, x2, y2]),\n (GeoDrawer._COLOR_MODE, color * 2),\n )", "def line(self, x, y):\n self.call('line', x, y)", "def DrawLinePoint(*args, **kwargs):\n return _gdi_.DC_DrawLinePoint(*args, **kwargs)", "def DrawDottedLine(self, dc, point, length, vertical):\r\n\r\n for i in xrange(0, length, 2):\r\n dc.DrawPoint(point.x, point.y)\r\n if vertical:\r\n point.y += 2\r\n else:\r\n point.x += 2", "def DrawLine(*args, **kwargs):\n return _gdi_.DC_DrawLine(*args, **kwargs)", "def draw_point(self, p):\n length = 3\n self.set_line_width(0.1)\n self.set_source_rgba(0, 0, 1, 1)\n self.move_to(p.x + length, p.y)\n self.line_to(p.x - length, p.y)\n self.stroke()\n self.move_to(p.x, p.y + length)\n self.line_to(p.x, p.y - length)\n self.stroke()", "def add_line(self, x0, y0, x1, y1, style=None):\n style = self.__prepare_style(style, 'o')\n if x0 > x1:\n # swap A and B\n x1, x0 = x0, x1\n y1, y0 = y0, y1\n # get delta x, y\n dx = x1 - x0\n dy = y1 - y0\n # if a length of line is zero just add point\n if dx == 0 and dy == 0:\n if self.check_coord_in_range(x0, y0):\n self.canvas[y0][x0] = style\n return\n # when dx >= dy use fill by x-axis, and use fill by y-axis otherwise\n if abs(dx) >= abs(dy):\n for x in range(x0, x1 + 1):\n y = y0 if dx == 0 else y0 + int(round((x - x0) * dy / float((dx))))\n if self.check_coord_in_range(x, y):\n self.canvas[y][x] = style\n else:\n if y0 < y1:\n for y in range(y0, y1 + 1):\n x = x0 if dy == 0 else x0 + int(round((y - y0) * dx / float((dy))))\n if self.check_coord_in_range(x, y):\n self.canvas[y][x] = style\n else:\n for y in range(y1, y0 + 1):\n x = x0 if dy == 0 else x1 + int(round((y - y1) * dx / float((dy))))\n if self.check_coord_in_range(x, y):\n self.canvas[y][x] = style", "def drawSeg(self, seg, sfill=SFILL):\n x, y = seg.getStartPoint()\n X, Y = seg.getEndPoint()\n go = self.can.create_line(x, y, X, Y, width=3, fill=sfill)\n seg.addGraphicObject(go)", "def draw_line(self, x1, y1, x2, y2, color):\n painter = QPainter()\n painter.begin(self.lbFFmpeg.pixmap())\n painter.setPen(QColor(color))\n painter.drawLine(x1, y1, x2, y2)\n painter.end()\n self.lbFFmpeg.update()", "def _draw_line(event, x, y, flags, params):\n global img, source_img\n global p1, p2\n if event == cv2.EVENT_LBUTTONDOWN:\n img = source_img.copy()\n p1 = (x, y)\n elif event == cv2.EVENT_LBUTTONUP:\n p2 = (x, y)\n img = source_img.copy()\n text = 'position: %d' % p2[0]\n cv2.putText(img, text, (100, 100),\n cv2.FONT_HERSHEY_SIMPLEX, 3, DrawingShapeUtils.COLOR, \n DrawingShapeUtils.LINE_THICKNESS)\n cv2.line(img, (x, y+100), (x, y-100), DrawingShapeUtils.COLOR,\n DrawingShapeUtils.LINE_THICKNESS)", "def draw(self, draw_line):\n #draw_rect(self.color, (self.position, (self.size, self.size)))\n line_start = (int(self.position[0]), int(self.position[1] - self.size/2))\n line_end = (int(line_start[0] + self.size), line_start[1])\n draw_line(self.color, line_start, line_end, self.size)\n\n gun_start = (int(self.position[0] + self.size/2), line_start[1])\n gun_end = (int(gun_start[0] + math.cos(self.angle) * self.barrel), int(gun_start[1] - math.sin(self.angle) * self.barrel))\n draw_line(self.color, gun_start, gun_end, 5)", "def line(self, drawer, canvas):\n start_width = random.randint(\n self._width / 8, self._width / 4)\n start_height = random.randint(\n self._height / 4, self._height * 3 / 4)\n stop_width = random.randint(\n self._width * 3 / 4, self._width * 7 / 8)\n stop_height = random.randint(\n self._height / 4, self._height * 3 / 4)\n drawer.line(\n (start_width,\n start_height,\n stop_width,\n stop_height),\n fill=random.randint(128, 155),\n width=3\n )", "def draw_shape_line(self, line, xform, colour):\n pts = [xform.chain(p) for p in (line.p1, line.p2)]\n self.canvas.line([(p.x, p.y) for p in pts], fill=colour)", "def _draw_current_pos(self, painter):\n\t\tif self.current_pos is None:\n\t\t\treturn\n\t\tpx = self.map_stamp_to_x(self.current_pos)\n\t\tpw, ph = self._current_pos_pointer_size\n\n\t\t# Line\n\t\tpainter.setPen(QtGui.QPen(self._current_pos_color))\n\t\tpainter.setBrush(QtGui.QBrush(self._current_pos_color))\n\t\tpainter.drawLine(px, self._history_top - 1, px, self._history_bottom + 2)\n\n\t\t# Upper triangle\n\t\tpy = self._history_top - ph\n\t\tpainter.drawPolygon(\n\t\t QtGui.QPolygonF(\n\t\t [QtCore.QPointF(px, py + ph),\n\t\t QtCore.QPointF(px + pw, py),\n\t\t QtCore.QPointF(px - pw, py)]\n\t\t )\n\t\t)\n\n\t\t# Lower triangle\n\t\tpy = self._history_bottom + 1\n\t\tpainter.drawPolygon(\n\t\t QtGui.QPolygonF(\n\t\t [QtCore.QPointF(px, py),\n\t\t QtCore.QPointF(px + pw, py + ph),\n\t\t QtCore.QPointF(px - pw, py + ph)]\n\t\t )\n\t\t)\n\n\t\tpainter.setBrush(self._default_brush)\n\t\tpainter.setPen(self._default_pen)", "def draw_s(self):\r\n pen.down()\r\n pen.forward(40)\r\n pen.left(90)\r\n pen.forward(20)\r\n pen.left(90)\r\n pen.forward(40)\r\n pen.right(90)\r\n pen.forward(20)\r\n pen.right(90)\r\n pen.forward(40)\r\n pen.up()\r\n pen.back(40)\r\n pen.right(90)\r\n pen.forward(40)\r\n pen.left(90)\r\n pen.forward(50)", "def line(self, x0, y0, x1, y1, char): # noqa: C901, PLR0912\n # pylint: disable=too-many-arguments, too-many-branches\n if x0 > x1:\n x1, x0 = x0, x1\n y1, y0 = y0, y1\n\n dx = x1 - x0\n dy = y1 - y0\n\n if dx == 0 and dy == 0:\n self.point(x0, y0, char)\n elif abs(dx) >= abs(dy):\n for x in range(x0, x1 + 1):\n if dx == 0:\n y = y0\n else:\n y = y0 + int(round((x - x0) * dy / float(dx)))\n self.point(x, y, char)\n elif y0 < y1:\n for y in range(y0, y1 + 1):\n if dy == 0:\n x = x0\n else:\n x = x0 + int(round((y - y0) * dx / float(dy)))\n self.point(x, y, char)\n else:\n for y in range(y1, y0 + 1):\n if dy == 0:\n x = x0\n else:\n x = x1 + int(round((y - y1) * dx / float(dy)))\n self.point(x, y, char)", "def drawPoints(self, qp):\n\n# pen = self.pen\n\n\n size = self.size()\n self.yOffset = [size.height()*0.2 + size.height()*0.618/self.NUM_CHANNEL * y for y in xrange(self.NUM_CHANNEL) ]\n\n for ix in xrange(self.NUM_CHANNEL):\n self.pen.setStyle(Qt.SolidLine)\n self.pen.setWidth(2)\n self.pen.setBrush(self.PEN_COLOR[ix])\n self.pen.setCapStyle(Qt.RoundCap)\n self.pen.setJoinStyle(Qt.RoundJoin)\n qp.setPen(self.pen)\n\n qp.drawLine(self.x - 2, self.yOffset[ix] - \\\n self.data_1[ix] * self.DISPLAY_SCALING[ix],\\\n self.x , self.yOffset[ix] - \\\n self.data[ix] * self.DISPLAY_SCALING[ix])", "def StrokeLine(*args, **kwargs):\n return _gdi_.GraphicsContext_StrokeLine(*args, **kwargs)", "def draw_line(self, frame, rect):\n print(\"x0, y0, x1, y1\", self.x0, self.y0, self.x1, self.y1)\n print(\"cross_x, cross_y\", self.cross_x, self.cross_y)\n left, top, right, bottom = rect\n # 枠内では線を表示しないようにしてやる\n if top<self.y1<bottom and left<self.x1<right:\n return\n # フレームと線の交点\n if (self.x1 >= right or self.x1 <= left or self.y1 <= top or self.y1 >= bottom) and self.cross_x == 0:\n self.cross_x = self.x1\n self.cross_y = self.y1\n return\n draw = ImageDraw.Draw(frame)\n draw.line((self.cross_x, self.cross_y, self.x1, self.y1), fill=(255, 255, 255), width=3)", "def draw_line(self, point1, point2, line_width, line_color):\n line_color = check_color(line_color)\n STline.line(self.canvas, (point1, point2), line_width, line_color)", "def __drawSegment(self, p1, p2, color):\n pygame.draw.aaline(self.screen, color, p1, p2)", "def draw_line(self, coords, smooth=False, **options):\n # NOTE: Outline does not work because uses paths instead of normal line method.\n # TODO: Add volume param, containing a list of linewidths same length as line\n # or as a function that calculates the width at each node\n # Result is a flow line with varying thickness at each node\n # Have to calculate left/right xy at each node, and use symbol curveto()\n # Easy and really cool...DO IT!\n options = self._check_options(options)\n \n if not hasattr(coords[0], \"__iter__\"):\n coords = _grouper(coords, 2)\n else: coords = (point for point in coords)\n \n # get drawing tools from options\n args = []\n if options[\"fillcolor\"]:\n pen = aggdraw.Pen(options[\"fillcolor\"], options[\"fillsize\"])\n args.append(pen)\n\n if smooth:\n\n # Note: Creation of the aggdraw.Symbol object here can be\n # very slow for long lines; Path is much faster but due\n # to a bug it does not correctly render curves, hence the use\n # of Symbol\n \n pathstring = \"\"\n \n # begin\n coords = _pairwise(coords)\n (startx,starty),(endx,endy) = next(coords)\n pathstring += \" M%s,%s\" %(startx, starty)\n \n # draw straight line to first line midpoint\n midx,midy = (endx + startx) / 2.0, (endy + starty) / 2.0\n pathstring += \" L%s,%s\" %(midx, midy)\n oldmidx,oldmidy = midx,midy\n \n # for each line\n for line in coords:\n # curve from midpoint of first to midpoint of second\n (startx,starty),(endx,endy) = line\n midx,midy = (endx + startx) / 2.0, (endy + starty) / 2.0\n pathstring += \" Q%s,%s,%s,%s\" %(startx, starty, midx, midy)\n oldmidx,oldmidy = midx,midy\n \n # draw straight line to endpoint of last line\n pathstring += \" L%s,%s\" %(endx, endy)\n\n # make into symbol object\n symbol = aggdraw.Symbol(pathstring)\n\n # draw the constructed symbol\n self.drawer.symbol((0,0), symbol, *args)\n\n else:\n\n path = aggdraw.Path()\n \n # begin\n startx,starty = next(coords)\n path.moveto(startx, starty)\n \n # connect to each successive point\n for nextx,nexty in coords:\n path.lineto(nextx, nexty)\n\n # draw the constructed path\n self.drawer.path((0,0), path, *args)", "def draw_n(self):\r\n pen.down()\r\n pen.left(90)\r\n pen.forward(40)\r\n pen.right(135)\r\n pen.forward(1.414*40)\r\n pen.left(135)\r\n pen.forward(40)\r\n pen.up()\r\n pen.back(40)\r\n pen.right(90)\r\n pen.back(40)\r\n pen.forward(50)", "def draw(self, base, level):\n\n a = base.a\n b = base.b\n\n if level > 0:\n delta = base.b - base.a\n px = a.x + delta.x / 3\n py = a.y + delta.y / 3\n rx = a.x + 2 * delta.x / 3\n ry = a.y + 2 * delta.y / 3\n p = Point(px, py)\n r = Point(rx, ry)\n q = Point(rx, ry)\n q.rotate_deg(60, p)\n self.draw(Line(a,p), level-1)\n self.draw(Line(p,q), level-1)\n self.draw(Line(q,r), level-1)\n self.draw(Line(r,b), level-1)\n else:\n self.container.window.create_line(a.x, a.y, b.x, b.y)", "def _lineTo(self, pt):\n x, y = pt\n # duplicate point\n if x == self._lastX and y == self._lastY:\n return\n # vertical line\n elif x == self._lastX:\n cmd = \"V\"\n pts = str(y)\n # horizontal line\n elif y == self._lastY:\n cmd = \"H\"\n pts = str(x)\n # previous was a moveto\n elif self._lastCommand == \"M\":\n cmd = None\n pts = \" \" + pointToString(pt)\n # basic\n else:\n cmd = \"L\"\n pts = pointToString(pt)\n # write the string\n t = \"\"\n if cmd:\n t += cmd\n self._lastCommand = cmd\n t += pts\n self._commands.append(t)\n # store for future reference\n self._lastX, self._lastY = pt", "def draw_step(ax, line, sizes):\n x1, y1, x2, y2 = line\n\n # Clear & Resize\n ax.cla()\n size = np.sum(sizes) + 1\n g.axis([-size, size, -size, size])\n g.autoscale(False)\n\n # Plot step\n ax.plot([0, x1, x2], [0, y1, y2], lw=2, c='k')\n ax.add_patch(Circle((0, 0), 0.05, fc='k', zorder=10))\n ax.add_patch(Circle((x1, y1), 0.08, fc='b', ec='b', zorder=10))\n ax.add_patch(Circle((x2, y2), 0.08, fc='r', ec='r', zorder=10))", "def line(self, points, ls=\"--\", draw=\"black\", lw=None, options=None, kwoptions=None):\n\n draw = norm_colour(draw)\n self.use_colour(draw)\n\n if kwoptions is None:\n kwoptions = {}\n kwopts = {'draw': draw, **kwoptions}\n if lw:\n kwopts['line width'] = lw\n\n self._commands.append(rf\"\\draw{wrap(fmt_options(options,kwopts))} \" +\n f\" {ls} \".join(map(fmt_point, points))+\";\")", "def line(canvas, points, line_width, line_color):\n \n # duplicate first point in case only one point was given\n points = points[0], points\n canvas.create_line(points, width = int(line_width), fill = line_color)", "def draw_line(image, pt1, pt2, color):\n\n\t# TODO -> Add your line drawing code here\n\n\t# Sample code to draw white dots at pt1 and pt2 positions\n\tset_pixel(image, pt1, color)\n\tset_pixel(image, pt2, color)", "def draw_line(self, p1, p2, color, thickness=2):\n p1 = self._format_point(p1)\n p2 = self._format_point(p2)\n opencv.line(self.img, p1.tuple(), p2.tuple(), color.bgra(), thickness=thickness)", "def draw_line(self, color, p1: Point, p2: Point, width):\n _p1 = self.T.itrans(p1)\n _p2 = self.T.itrans(p2)\n pg.draw.line(self.screen, color, _p1(), _p2(), 2)", "def _draw_line_text(self):\n self._line_text.set_text(self.model.get_current_line())", "def draw(self):\n if len(self.__points) >= 2:\n self._total_length = 0\n for i in range(len(self.__points) - 1):\n p1 = self.__points[i]\n p2 = self.__points[i + 1]\n coords = self.__line_segment(p1, p2)\n if not coords is None:\n pyglet.graphics.draw_indexed(4, pyglet.gl.GL_TRIANGLES,\n [0, 1, 2, 1, 2, 3],\n ('v2i', coords),\n ('c4b', self.color * 4)\n )\n coords = self.__line_cap(p2)\n pyglet.graphics.draw_indexed(4, pyglet.gl.GL_TRIANGLES,\n [0, 1, 2, 0, 2, 3],\n ('v2i', coords),\n ('c4b', self.color * 4)\n )", "def plot(self, x, y, color=\"black\"):\n self.__checkOpen()\n xs,ys = self.toScreen(x,y)\n #self.create_line(xs,ys,xs+1,ys, fill=color)\n _tkExec(self.create_line,xs,ys,xs+1,ys,fill=color,tag=\"line\")\n self.__autoflush()", "def _draw_line(plot, hori, vert, color, text):\n plot.plot(hori, vert, '-o'+color)\n plot.text(hori[-1]-3, vert[-1]+2, text, color=color)", "def DrawArrow(self, dc):\r\n\r\n rect = self.GetClientRect()\r\n point = wx.Point()\r\n\r\n point.x = (rect.GetLeft() + rect.GetRight()) / 2\r\n point.y = (rect.GetTop() + rect.GetBottom()) / 2\r\n rx, ry = wx.Size(), wx.Size()\r\n \r\n if self._direction == wx.TOP:\r\n rx = wx.Size(1, 0)\r\n ry = wx.Size(0, 1)\r\n\r\n elif self._direction == wx.LEFT:\r\n rx = wx.Size(0, -1)\r\n ry = wx.Size(1, 0)\r\n\r\n elif self._direction == wx.RIGHT:\r\n rx = wx.Size(0, 1)\r\n ry = wx.Size(-1, 0)\r\n\r\n elif self._direction == wx.BOTTOM:\r\n rx = wx.Size(-1, 0)\r\n ry = wx.Size(0, -1) \r\n\r\n point.x += ry.x*3\r\n point.y += ry.y*3\r\n\r\n dc.SetPen(wx.Pen(colourIconArrow))\r\n\r\n for i in xrange(4):\r\n pt1 = wx.Point(point.x - rx.x*i, point.y - rx.y*i)\r\n pt2 = wx.Point(point.x + rx.x*(i+1), point.y + rx.y*(i+1))\r\n dc.DrawLinePoint(pt1, pt2)\r\n point.x += ry.x\r\n point.y += ry.y", "def Draw_Node( self, node, xstart, ystart):\r\n xdist = node.data.length * cb.xtick\r\n handle = self.canvas_one.create_line( xstart, ystart, xstart+xdist, ystart,width = 3, fill=self.branch_color )\r\n #Attach a handle to a node and place in the handle_list of all LineSegments\r\n ls = LineSegment( handle, node )\r\n self.handle_list.append(ls)\r\n return ystart", "def handle_draw( self, brush ):\n # draw background\n brush.color = self.background_color\n width, height = self.size\n brush.move_to( 0, 0 )\n brush.path_to( width, 0 )\n brush.path_to( width, height )\n brush.path_to( 0, height )\n brush.close_path()\n brush.fill_path()\n brush.clear_path()\n \n # draw all lines in lines list\n brush.color = self.line_color\n brush.size = self.line_thickness\n for (x0, y0), (x1, y1) in self.lines:\n brush.move_to( x0, y0 ) # move to beginning of line\n brush.path_to( x1, y1 ) # make path to end of line\n brush.stroke_path() # stroke line with current color and thickness\n brush.clear_path() # clear line path we just drew\n\n # if we are currently drawing a line draw rubber band\n if (self.first_point is not None) and (self.last_point is not None):\n brush.color = self.rubber_line_color\n brush.size = self.rubber_line_thickness\n brush.move_to( *self.first_point )\n brush.path_to( *self.last_point )\n brush.stroke_path()\n brush.clear_path()", "def draw_line():\n\n # Small Size Line\n glLineWidth(0.1)\n glColor3f(0.5, 1.0, 0.9)\n wid = 0\n while wid <= width:\n length = 0\n while length <= height:\n glBegin(GL_LINES)\n glVertex3f(0.0, length, 0.0)\n glVertex3f(wid, length, 0)\n glEnd()\n glBegin(GL_LINES)\n glVertex3f(length, 0, 0.0)\n glVertex3f(length, wid, 0)\n glEnd()\n length += 10\n wid += 50\n # Medium Size Line\n glLineWidth(2.0)\n wid = 0\n while wid <= width:\n length = 0\n while length <= height:\n glBegin(GL_LINES)\n glVertex3f(0.0, length, 0.0)\n glVertex3f(wid, length, 0)\n glEnd()\n length += 50\n glBegin(GL_LINES)\n glVertex3f(length, 0, 0.0)\n glVertex3f(length, wid, 0)\n glEnd()\n wid += 50\n # Main Line\n # ordinat\n glLineWidth(1.5)\n glColor3f(0.5, 0.4, 0.8)\n glBegin(GL_LINES)\n glVertex3f(height / 2, 0, 0.0)\n glVertex3f(height / 2, width, 0)\n glEnd()\n # absis\n glBegin(GL_LINES)\n glVertex3f(0, width / 2, 0.0)\n glVertex3f(height, width / 2, 0)\n glEnd()", "def graphicsDraw(self, win, center):\n\t\tlastPoint = None\n\t\tfor p in self.points:\n\t\t\tthisPoint = Point(p[0] + center.x, p[1] + center.y)\n\t\t\tif lastPoint is not None:\n\t\t\t\tline = Line(lastPoint, thisPoint)\n\t\t\t\tline.draw(win)\n\t\t\tlastPoint = thisPoint", "def draw_polyline(*points):\r\n global _canvas\r\n if _canvas == None:\r\n raise RuntimeError(\"Canvas is not open yet.\")\r\n else:\r\n #print(points)\r\n #print(len(points))\r\n newpoints = []\r\n for x in range(0, len(points), 2):\r\n #print(x)\r\n pt = Point(points[x], points[x+1])\r\n newpoints += [ pt ]\r\n #print(newpoints)\r\n path = Path(*newpoints)\r\n path.setBorderWidth(_current_line_thickness)\r\n path.setBorderColor(_current_color)\r\n _canvas.add(path)", "def draw(self, x, y, dx, dy, color):\n\n draw = ImageDraw.Draw(self.image)\n\n draw.rectangle([(x,y),(dx,dy)], color, outline=None)", "def create_line(self, x1, y1, x2, y2, style=None, parent=None):\n attrs = {'d': 'M %5f %5f L %5f %5f' % (x1, y1, x2, y2)}\n return self.create_path(attrs, style, parent)", "def draw_line(self,\n p,\n q,\n line_width=0.8,\n line_cap=cairo.LINE_CAP_ROUND,\n procrastinate=1000):\n if procrastinate == 0:\n self.set_line_width(line_width)\n self.set_line_cap(line_cap)\n self.move_to(*p)\n self.line_to(*q)\n self.stroke()\n else:\n rgb = self.rgb\n self.postponed.setdefault(procrastinate, [])\n self.postponed[procrastinate].append(\n (self.draw_line, (p, q, line_width, line_cap), rgb)\n )", "def draw_lines(self):\n for x_cord in range(0, Dimension.SCREEN_WIDTH.value, Dimension.SQUARE_WIDTH.value):\n pg.draw.line(self.window, Colors.BLACK.value, (x_cord, 0), (x_cord, Dimension.SCREEN_HEIGHT.value))\n\n for y_cord in range(0, Dimension.SCREEN_HEIGHT.value, Dimension.SQUARE_HEIGHT.value):\n pg.draw.line(self.window, Colors.BLACK.value, (0, y_cord), (Dimension.SCREEN_WIDTH.value, y_cord))\n\n pg.display.update()", "def drawSegment(self, point):\n import Part\n if self.planetrack and self.node:\n self.planetrack.set(self.node[-1])\n if len(self.node) == 1:\n _msg(translate(\"draft\", \"Pick next point\"))\n elif len(self.node) == 2:\n last = self.node[len(self.node) - 2]\n newseg = Part.LineSegment(last, point).toShape()\n self.obj.Shape = newseg\n self.obj.ViewObject.Visibility = True\n _msg(translate(\"draft\", \"Pick next point\"))\n else:\n currentshape = self.obj.Shape.copy()\n last = self.node[len(self.node) - 2]\n if not DraftVecUtils.equals(last, point):\n newseg = Part.LineSegment(last, point).toShape()\n newshape = currentshape.fuse(newseg)\n self.obj.Shape = newshape\n _msg(translate(\"draft\", \"Pick next point\"))", "def drawSegment(self, point):\n import Part\n if self.planetrack and self.node:\n self.planetrack.set(self.node[-1])\n if len(self.node) == 1:\n _msg(translate(\"draft\", \"Pick next point\"))\n elif len(self.node) == 2:\n last = self.node[len(self.node) - 2]\n newseg = Part.LineSegment(last, point).toShape()\n self.obj.Shape = newseg\n self.obj.ViewObject.Visibility = True\n _msg(translate(\"draft\", \"Pick next point\"))\n else:\n currentshape = self.obj.Shape.copy()\n last = self.node[len(self.node) - 2]\n if not DraftVecUtils.equals(last, point):\n newseg = Part.LineSegment(last, point).toShape()\n newshape = currentshape.fuse(newseg)\n self.obj.Shape = newshape\n _msg(translate(\"draft\", \"Pick next point\"))", "def line(self, start, end, color=(255, 255, 255), width=1):\n start = self._transform(start)\n end = self._transform(end)\n\n pygame.draw.line(self.screen, color, start, end, width)", "def line(self, clear_screen=True, x1=10, y1=10, x2=50, y2=50, line_color='black', width=1):\n\n if clear_screen:\n self.clear()\n\n return self.draw.line((x1, y1, x2, y2), fill=line_color, width=width)", "def _draw_line(self, event):\n if not self.obstacle_creation_mode:\n return\n\n if self.previous_coordinates is None:\n self.previous_coordinates = event.x, event.y\n self.new_obstacle.append([event.x, event.y])\n return\n\n x1, y1 = event.x, event.y\n\n if self._is_closing_shape(x1, y1, self.new_obstacle):\n x1, y1 = self.new_obstacle[0]\n else:\n self.new_obstacle.append([x1, y1])\n\n x0, y0 = self.previous_coordinates\n self.canvas.create_line(x0, y0, x1, y1, **self.LINE_OPTIONS)\n self.previous_coordinates = x1, y1", "def draw_path(self, path, color):\n\n half_width = self.cell_width/2\n half_height = self.cell_height/2\n\n # List of coordinates corresponding to the center of each\n # cell in the path, in a form acceptable to cvs.create_line.\n coord_list = []\n for cell in path:\n coord_list.append((self.cell_width*cell[1] + half_width,\n self.height - (self.cell_height*cell[0] + half_height)))\n \n self.cvs.create_line(coord_list, width=4, fill=color)\n self.draw()", "def draw_svg_line(points_list, parent, style):\n line_attribs = {'style': simplestyle.formatStyle(style),\n inkex.addNS('label', 'inkscape'): 'line',\n 'd': to_path_string(points_list, False)}\n\n inkex.etree.SubElement(parent, inkex.addNS('path', 'svg'), line_attribs)", "def OnDraw(self):\r\n self.SetCurrent()\r\n\r\n glClear(GL_COLOR_BUFFER_BIT)\r\n\r\n glBegin(GL_LINES)\r\n\r\n glColor3f(1.0, 1.0, 1.0)\r\n \r\n \r\n #Just in case these aren't set back to their starting place yet...\r\n self.currentpoint = self.startingpoint\r\n self.currentheading = 0 \r\n \r\n for element in self.finalstring:\r\n if element == '+':\r\n self.currentheading += self.angle\r\n elif element == '-':\r\n self.currentheading -= self.angle\r\n elif element == 'F':\r\n glVertex2i(self.currentpoint[0], self.currentpoint[1])\r\n self.currentpoint = self.NextPoint(self.currentpoint, self.length, self.currentheading)\r\n glVertex2i(self.currentpoint[0], self.currentpoint[1])\r\n elif element == '[':\r\n self.stack.append([self.currentpoint[0], self.currentpoint[1], self.currentheading])\r\n elif element == ']':\r\n popped = self.stack.pop()\r\n self.currentheading = popped.pop()\r\n self.currentpoint = popped\r\n \r\n \r\n glEnd()\r\n self.currentpoint = self.startingpoint\r\n self.currentheading = 0\r\n \r\n \r\n self.SwapBuffers() \r\n\r\n return", "def line():\n tt.left(90)\n tt.down()\n tt.forward(50)\n tt.up()\n tt.right(90)\n tt.forward(10)\n tt.right(90)\n tt.forward(50)\n tt.left(90)", "def draw_line(self, x0, y0, x1, y1, color=None, colorFunc=None, aa=False):\n if aa:\n self._draw_wu_line(x0, y0, x1, y1, color, colorFunc)\n else:\n self._draw_bresenham_line(x0, y0, x1, y1, color, colorFunc)", "def drawPath(self, path=[]):\n subpath = NSBezierPath.alloc().init()\n subpath.moveToPoint_(path[0][0])\n for p in path[1:]:\n if len(p) == 3:\n # curve\n A, B, C = p\n subpath.curveToPoint_controlPoint1_controlPoint2_(C, A, B)\n else:\n subpath.lineToPoint_(p[0])\n\n subpath.closePath()\n NSColor.colorWithCalibratedRed_green_blue_alpha_(\n 0, 0, 1, self.alpha\n ).set()\n subpath.stroke()", "def draw_line(mat, pt1, pt2, color=(0, 0, 255), thickness=1):\n cv2.line(mat, pt1, pt2, color, thickness=thickness)", "def down():\n global y, canvas # y é modificado\n canvas.create_line(x, y, x, y + 10)\n y += 10", "def paint_line(canvas, x0, y0, x1, y1, color):\n num_points = max(abs(x0-x1), abs(y0-y1))\n #create the points at every pixel between each coordinate\n xs = np.linspace(x0, x1, num_points)\n ys = np.linspace(y0, y1, num_points)\n\n #do splatter\n for i in range(len(xs) - 1):\n canvas[int(math.ceil(xs[i]))][int(math.ceil(ys[i]))] = color\n canvas[int(math.floor(xs[i]))][int(math.floor(ys[i]))] = color", "def DrawCurrent(self):\r\n \r\n x1, y1 = self._currentX, 0\r\n x1, y1 = self.ClientToScreen((x1, y1))\r\n x2 = self._currentX-1\r\n if wx.Platform == \"__WXMSW__\":\r\n x2 += 1 # but why ????\r\n\r\n y2 = 0\r\n dummy, y2 = self._owner.GetClientSize()\r\n x2, y2 = self._owner.ClientToScreen((x2, y2))\r\n\r\n dc = wx.ScreenDC()\r\n dc.SetLogicalFunction(wx.INVERT)\r\n dc.SetPen(wx.Pen(wx.BLACK, 2, wx.SOLID))\r\n dc.SetBrush(wx.TRANSPARENT_BRUSH)\r\n\r\n self.AdjustDC(dc)\r\n dc.DrawLine (x1, y1, x2, y2)\r\n dc.SetLogicalFunction(wx.COPY)", "def _createline(self):\n return self.cv.create_line(0, 0, 0, 0, fill=\"\", width=2,\n capstyle = TK.ROUND)", "def drawLine(x0,y0,x1,y1,ucoords=1):\n if ucoords:\n dislin.rline(x0,y0,x1,y1)\n else:\n dislin.line(x0,y0,x1,y1)", "def draw():", "def line(self, x0, y0, x1, y1, color):\n steep = abs(y1 - y0) > abs(x1 - x0)\n if steep:\n x0, y0 = y0, x0\n x1, y1 = y1, x1\n if x0 > x1:\n x0, x1 = x1, x0\n y0, y1 = y1, y0\n dx = x1 - x0\n dy = abs(y1 - y0)\n err = dx // 2\n ystep = 1 if y0 < y1 else -1\n while x0 <= x1:\n if steep:\n self.pixel(y0, x0, color)\n else:\n self.pixel(x0, y0, color)\n err -= dy\n if err < 0:\n y0 += ystep\n err += dx\n x0 += 1", "def drawLine(img, start, end, color = (0,0,255), thickness = 3):\n\tcv2.line(img, start, end, color, thickness)", "def draw_trace(self, trace):\n pts = [xform.chain(p) for p in (trace.p1, trace.p2)]\n self.canvas.line([(p.x, p.y) for p in pts], fill=colour)", "def draw(self, scene):\n scene.add(svg.Line(start=[self.position[0, 0], self.position[0, 1]], end=[self.position[1, 0], self.position[1, 1]], thickness=3))\n scene.add(svg.Line(start=[self.position[1, 0], self.position[1, 1]], end=[self.position[2, 0], self.position[2, 1]], thickness=3))\n\n scene.add(svg.Circle(center=self.position[0, :], radius=3, color='gray'))\n scene.add(svg.Circle(center=self.position[1, :], radius=3, color='gray'))\n\n # draw trajectory\n X = list(self.end_point_traj)\n for i in range(1, len(self.end_point_traj)):\n x1 = X[i-1]\n x2 = X[i]\n scene.add(svg.Line(start=x1, end=x2, thickness=1, color='red'))\n\n scene.add(svg.Circle(center=self.position[2, :], radius=3, color='blue'))", "def draw_lines(self, coords, **options):\n path = aggdraw.Path()\n\n def traverse_ring(coords):\n # begin\n coords = grouper(coords, 2)\n startx,starty = next(coords)\n path.moveto(startx, starty)\n \n # connect to each successive point\n for nextx,nexty in coords:\n path.lineto(nextx, nexty)\n \n # get drawing tools from options\n args = []\n if options[\"outlinecolor\"]:\n pen = aggdraw.Pen(options[\"outlinecolor\"], options[\"outlinewidth\"])\n args.append(pen)\n if options[\"fillcolor\"]:\n brush = aggdraw.Brush(options[\"fillcolor\"])\n args.append(brush)\n\n # draw the constructed path\n self.drawer.path((0,0), path, *args)", "def draw_polyline(self, points, line_width, line_color):\n line_color = check_color(line_color)\n STline.line(self.canvas, points, line_width, line_color)", "def draw_lines(self, x, y,\n north=False, south=False, east=False, west=False,\n color=\"black\"):\n upper_left = (y * self.scale, x * self.scale)\n upper_right = (upper_left[0] + self.scale, upper_left[1])\n lower_left = (upper_left[0], upper_left[1] + self.scale)\n lower_right = (upper_left[0] + self.scale, upper_left[1] + self.scale)\n\n if north:\n self.canvas.create_line(*upper_left, *upper_right, fill=color)\n\n if south:\n self.canvas.create_line(*lower_left, *lower_right, fill=color)\n\n if east:\n self.canvas.create_line(*upper_right, *lower_right, fill=color)\n\n if west:\n self.canvas.create_line(*upper_left, *lower_left, fill=color)", "def line_layer(self):\n screen_origin = self.ids.mapview.get_window_xy_from(lat1, lon1, self.ids.mapview.zoom)\n screen_destination = self.ids.mapview.get_window_xy_from(lat2, lon2, self.ids.mapview.zoom)\n point_list = [screen_origin[0], screen_origin[1], screen_destination[0], screen_destination[1]]\n\n with self.ids.line.canvas:\n self.ids.line.canvas.clear()\n\n Color(0, 0, 0, .6)\n Line(points=point_list, width=3, joint=\"bevel\")", "def draw_a(self):\r\n pen.down()\r\n pen.left(90)\r\n pen.forward(40)\r\n pen.right(90)\r\n pen.forward(40)\r\n pen.right(90)\r\n pen.forward(40)\r\n pen.up()\r\n pen.back(20)\r\n pen.right(90)\r\n pen.down()\r\n pen.forward(40)\r\n pen.up()\r\n pen.left(90)\r\n pen.forward(20)\r\n pen.left(90)\r\n pen.forward(50)", "def set_current_tool_to_draw_line_by_dragging(self, line_id=None):\n\n self.variables.current_shape_id = line_id\n self.show_shape(line_id)\n self.variables.active_tool = TOOLS.DRAW_LINE_BY_DRAGGING\n self.variables.current_tool = TOOLS.DRAW_LINE_BY_DRAGGING", "def draw_line(self, point1, point2, line_color, line_width=2):\n if point1 is not None and point2 is not None:\n self.draw.line([point1, point2], fill=line_color, width=line_width)", "def plot_line(self,x_0,y_0,x_1,y_1,col=\"black\",line_width=1,line_type=\"solid\"):\n self._fig.add_shape(\n go.layout.Shape(\n type=\"line\",\n x0=x_0,\n y0=y_0,\n x1=x_1,\n y1=y_1,\n line=dict(\n color=col,\n width=line_width,\n dash=line_type\n )\n )\n )", "def line_to(self, point: Onion[Tuple[float, float], Point2D, Point3D, Point]):\n start_point = self.last_point\n end_point = _point_2d(point)\n self._segments.append(Line3D.create(start_point, end_point))" ]
[ "0.74554735", "0.68954694", "0.6881306", "0.68413955", "0.68105817", "0.67588437", "0.6756782", "0.67204547", "0.6719156", "0.67131805", "0.66987574", "0.6690832", "0.6682115", "0.6653802", "0.66391045", "0.6631516", "0.6622917", "0.6610148", "0.65972066", "0.65874225", "0.65817493", "0.6572892", "0.65692616", "0.64773583", "0.6474336", "0.64631367", "0.6453293", "0.64431775", "0.64224786", "0.6415529", "0.6404759", "0.6398178", "0.6365423", "0.63072115", "0.62920576", "0.6284871", "0.6222283", "0.6220751", "0.6213327", "0.61803627", "0.617772", "0.61686534", "0.61407006", "0.6139056", "0.61193615", "0.61095226", "0.61037236", "0.6094679", "0.6094032", "0.6081325", "0.60804564", "0.60615915", "0.6059584", "0.60585934", "0.60584414", "0.60556257", "0.60530716", "0.6046487", "0.60457766", "0.60417503", "0.60141486", "0.6001325", "0.5986757", "0.59778714", "0.5977766", "0.5961665", "0.5941685", "0.59397936", "0.5928925", "0.59285045", "0.59285045", "0.59205353", "0.5916214", "0.59105235", "0.59035164", "0.59006333", "0.5888879", "0.58704823", "0.5868455", "0.586699", "0.58643323", "0.5857376", "0.58534163", "0.5845739", "0.58413076", "0.5828355", "0.58240664", "0.5822037", "0.5819585", "0.58073217", "0.5807006", "0.58068895", "0.57886297", "0.57730454", "0.577031", "0.5769129", "0.5757054", "0.57563365", "0.57559514", "0.57512665" ]
0.7574059
0
Draws a line from the current pen position to (x,y)
def drawTo(self, x, y): assert (type(x) in [int, float]), "parameter x:%s is not a valid number" % `x` assert (type(y) in [int, float]), "parameter y:%s is not a valid number" % `y` self._turtle.setposition(x, y)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def draw_line():\n global y1, y2\n canvas.create_line(x1, y1, x2, y2, width=2, fill=color)\n y1 -= 10\n y2 += 10", "def line(self, x, y):\n self.call('line', x, y)", "def draw_line(self, x):\n self.PDF.setStrokeColor(black01)\n self.PDF.setLineWidth(1)\n self.PDF.line(75, x, 550, x)\n self.PDF.setStrokeColor(\"black\")", "def draw_line(self, x0, y0, x1, y1, color=Color['white']):\n pygame.draw.line(self.display, color, (x0, y0), (x1, y1))", "def drawLine(self, dx, dy):\n assert (type(dx) in [int, float]), \"parameter x:%s is not a valid number\" % `dx`\n assert (type(dy) in [int, float]), \"parameter y:%s is not a valid number\" % `dy`\n x = self._turtle.xcor()\n y = self._turtle.ycor()\n self._turtle.setposition(x+dx, y+dy)", "def dline(x, y):\n glClear(GL_COLOR_BUFFER_BIT)\n glColor3f(0.0, 0.0, 1.0)\n glPointSize(10.0)\n glBegin(GL_POINTS)\n while (x <= y):\n glVertex2f(x, x)\n x += 0.05\n glEnd()\n glFlush()", "def draw_line(x1, y1, x2, y2):\r\n #global _canvas\r\n #global _current_line_thickness\r\n #global _current_color\r\n if _canvas == None:\r\n raise RuntimeError(\"Canvas is not open yet.\")\r\n else:\r\n path = Path(Point(x1, y1), Point(x2, y2))\r\n path.setBorderWidth(_current_line_thickness)\r\n path.setBorderColor(_current_color)\r\n _canvas.add(path)", "def __draw_line(display, color, ball_pos, dx, dy):\n pygame.draw.line(display, color, ball_pos, (ball_pos[0] + dx, ball_pos[1] + dy), 2)", "def draw_line(color, start_pos, end_pos, width=1):\n pygame.draw.line(screen, color, start_pos, end_pos, width)", "def draw_line(self, x1, y1, x2, y2, color):\n painter = QPainter()\n painter.begin(self.lbFFmpeg.pixmap())\n painter.setPen(QColor(color))\n painter.drawLine(x1, y1, x2, y2)\n painter.end()\n self.lbFFmpeg.update()", "def DrawLinePoint(*args, **kwargs):\n return _gdi_.PseudoDC_DrawLinePoint(*args, **kwargs)", "def draw_line(self, gray=0, nextline=0):\n\n self.fontsize = 4\n if nextline:\n self.nextline()\n else:\n self.linespace(8)\n self.resetx()\n c = self.canvas\n c.setStrokeGray(gray)\n c.setLineWidth(1)\n #self.y = self.y + self.linespacing + (self.fontsize/2)\n c.line(self.x, self.y, self.width - self.x, self.y)\n self.y = self.y + (self.linespacing)", "def plot(self, x, y, color=\"black\"):\n self.__checkOpen()\n xs,ys = self.toScreen(x,y)\n #self.create_line(xs,ys,xs+1,ys, fill=color)\n _tkExec(self.create_line,xs,ys,xs+1,ys,fill=color,tag=\"line\")\n self.__autoflush()", "def draw_point(self, p):\n length = 3\n self.set_line_width(0.1)\n self.set_source_rgba(0, 0, 1, 1)\n self.move_to(p.x + length, p.y)\n self.line_to(p.x - length, p.y)\n self.stroke()\n self.move_to(p.x, p.y + length)\n self.line_to(p.x, p.y - length)\n self.stroke()", "def draw_shape_line(self, line, xform, colour):\n pts = [xform.chain(p) for p in (line.p1, line.p2)]\n self.canvas.line([(p.x, p.y) for p in pts], fill=colour)", "def wdraw_line(self, wx0, wy0, wx1, wy1, color, arrow):\r\n dx0, dy0 = self.w_to_d(wx0, wy0)\r\n dx1, dy1 = self.w_to_d(wx1, wy1)\r\n self.canvas.create_line(dx0, dy0, dx1, dy1, fill=color, arrow=arrow)", "def DrawLinePoint(*args, **kwargs):\n return _gdi_.DC_DrawLinePoint(*args, **kwargs)", "def _draw_line(event, x, y, flags, params):\n global img, source_img\n global p1, p2\n if event == cv2.EVENT_LBUTTONDOWN:\n img = source_img.copy()\n p1 = (x, y)\n elif event == cv2.EVENT_LBUTTONUP:\n p2 = (x, y)\n img = source_img.copy()\n text = 'position: %d' % p2[0]\n cv2.putText(img, text, (100, 100),\n cv2.FONT_HERSHEY_SIMPLEX, 3, DrawingShapeUtils.COLOR, \n DrawingShapeUtils.LINE_THICKNESS)\n cv2.line(img, (x, y+100), (x, y-100), DrawingShapeUtils.COLOR,\n DrawingShapeUtils.LINE_THICKNESS)", "def draw_line(self, DISP, side:str, indizes:tuple, pink = False):\r\n offset = 1 #< Just to draw the line nicely\r\n pos = (indizes[0] - 1) * self.grid_size, indizes[1] * self.grid_size\r\n # Check if it's a pink line\r\n if pink:\r\n start_pos = pos[0], pos[1] + self.grid_size // 2\r\n end_pos = pos[0] + self.grid_size, pos[1] + self.grid_size // 2\r\n # Check if the line should be vertically. u for up\r\n elif side == 'u':\r\n start_pos = pos[0] + self.width - offset + self.grid_size // 2, pos[1] + self.grid_size // 2\r\n end_pos = pos[0] + self.grid_size + offset + self.grid_size // 2 - self.width, pos[1] + self.grid_size // 2\r\n # Check if the line should be horizontally. l for left\r\n elif side == 'l':\r\n start_pos = pos[0] + self.grid_size // 2, pos[1] + self.width - offset + self.grid_size // 2\r\n end_pos = pos[0] + self.grid_size // 2, pos[1] - self.width + self.grid_size + offset + self.grid_size // 2\r\n if not pink:\r\n pg.draw.line(DISP, Colors.colors['BLACK'], start_pos,end_pos, self.width + 2 * offset) \r\n else:\r\n pg.draw.line(DISP, Colors.colors['PINK'], start_pos,end_pos, self.width + 2 * offset)", "def startLineDrawing(self, startPos):\n self.line = LineNodePath(render2d, thickness=2, colorVec=(0.8,0.8,0.8,1))\n self.line.moveTo(startPos)\n t = taskMgr.add(self.drawLineTask, \"drawLineTask\")\n t.startPos = startPos", "def line_to(self, x, y):\n self._impl.line_to(x, y)", "def drawLine(self,start,stop):\n startX = int(self.vert[start][0]*self.scale + self.size/2)\n startY = int(self.vert[start][1]*self.scale + self.size/2)\n endX = int(self.vert[stop][0]*self.scale + self.size/2)\n endY = int(self.vert[stop][1]*self.scale + self.size/2)\n \n self.canvas.create_line(startX,startY,endX,endY,fill='white')", "def draw(x,y,x1,y1,d,color=1):\n d.add(dxf.line((x,y),(x1,y1),color=color, layer='LINES',thickness=0.01))", "def line(self, clear_screen=True, x1=10, y1=10, x2=50, y2=50, line_color='black', width=1):\n\n if clear_screen:\n self.clear()\n\n return self.draw.line((x1, y1, x2, y2), fill=line_color, width=width)", "def DrawLine(*args, **kwargs):\n return _gdi_.PseudoDC_DrawLine(*args, **kwargs)", "def line(canvas, points, line_width, line_color):\n \n # duplicate first point in case only one point was given\n points = points[0], points\n canvas.create_line(points, width = int(line_width), fill = line_color)", "def draw_line(self, point1, point2, line_width, line_color):\n line_color = check_color(line_color)\n STline.line(self.canvas, (point1, point2), line_width, line_color)", "def draw(self):\n # s1 = ShowPoint(self.cnv, self.p1.xpt, self.p1.ypt)\n # s2 = ShowPoint(self.cnv, self.p2.xpt, self.p2.ypt)\n # s1.draw()\n # # s2.draw()\n self.cnv.create_line(self.p1.xpt, self.p1.ypt, self.p2.xpt, self.p2.ypt)", "def _draw_current_pos(self, painter):\n\t\tif self.current_pos is None:\n\t\t\treturn\n\t\tpx = self.map_stamp_to_x(self.current_pos)\n\t\tpw, ph = self._current_pos_pointer_size\n\n\t\t# Line\n\t\tpainter.setPen(QtGui.QPen(self._current_pos_color))\n\t\tpainter.setBrush(QtGui.QBrush(self._current_pos_color))\n\t\tpainter.drawLine(px, self._history_top - 1, px, self._history_bottom + 2)\n\n\t\t# Upper triangle\n\t\tpy = self._history_top - ph\n\t\tpainter.drawPolygon(\n\t\t QtGui.QPolygonF(\n\t\t [QtCore.QPointF(px, py + ph),\n\t\t QtCore.QPointF(px + pw, py),\n\t\t QtCore.QPointF(px - pw, py)]\n\t\t )\n\t\t)\n\n\t\t# Lower triangle\n\t\tpy = self._history_bottom + 1\n\t\tpainter.drawPolygon(\n\t\t QtGui.QPolygonF(\n\t\t [QtCore.QPointF(px, py),\n\t\t QtCore.QPointF(px + pw, py + ph),\n\t\t QtCore.QPointF(px - pw, py + ph)]\n\t\t )\n\t\t)\n\n\t\tpainter.setBrush(self._default_brush)\n\t\tpainter.setPen(self._default_pen)", "def draw_lines(self):\n # draw x lines\n y = self.step_y\n while y <= self.height:\n x = 0\n while x <= self.width:\n self.canvas.create_line(x, y, x+3.5, y)\n self.canvas.update()\n x += 3.5\n y += self.step_y\n \n # draw y lines\n x = self.step_x\n while x <= self.width:\n y = 0\n while y <= self.height:\n self.canvas.create_line(x, y, x, y+3.5)\n self.canvas.update()\n y += 3.5\n x += self.step_x\n \n self.is_operating = False", "def paint_line(canvas, x0, y0, x1, y1, color):\n num_points = max(abs(x0-x1), abs(y0-y1))\n #create the points at every pixel between each coordinate\n xs = np.linspace(x0, x1, num_points)\n ys = np.linspace(y0, y1, num_points)\n\n #do splatter\n for i in range(len(xs) - 1):\n canvas[int(math.ceil(xs[i]))][int(math.ceil(ys[i]))] = color\n canvas[int(math.floor(xs[i]))][int(math.floor(ys[i]))] = color", "def draw_line(self, pt0, pt1, color):\n steep = False\n if abs(pt0[0]-pt1[0]) < abs(pt0[1]-pt1[1]):\n pt0[0], pt0[1] = pt0[1], pt0[0]\n pt1[0], pt1[1] = pt1[1], pt1[0]\n steep = True\n\n if pt0[0] > pt1[0]:\n pt0[0], pt1[0] = pt1[0], pt0[0]\n pt0[1], pt1[1] = pt1[1], pt0[1]\n\n if pt0[1] > pt1[1]:\n dy = pt0[1] - pt1[1]\n inc_y = -1\n else:\n dy = pt1[1] - pt0[1]\n inc_y = 1\n\n dx = pt1[0] - pt0[0]\n d = 2 * dy - dx\n incr_e = 2 * dy\n incr_ne = 2 * (dy - dx)\n x = pt0[0]\n y = pt0[1]\n\n if not steep:\n self.buffer.set_pixel((x, y), color)\n while x < pt1[0]:\n if d <= 0:\n d = d + incr_e\n x = x + 1\n else:\n d = d + incr_ne\n x = x + 1\n y = y + inc_y\n self.buffer.set_pixel((x, y), color)\n else:\n self.buffer.set_pixel((y, x), color)\n while x < pt1[0]:\n if d <= 0:\n d = d + incr_e\n x = x + 1\n else:\n d = d + incr_ne\n x = x + 1\n y = y + inc_y\n self.buffer.set_pixel((y, x), color)", "def draw_line(self, start_p, end_p, color, thickness: float):\n line_seg = LineSegs(\"interface\")\n line_seg.setColor(*color)\n line_seg.moveTo(start_p[0] * self.w_scale, 0, start_p[1] * self.h_scale)\n line_seg.drawTo(end_p[0] * self.w_scale, 0, end_p[1] * self.h_scale)\n line_seg.setThickness(thickness)\n line_np = self.aspect2d.attachNewNode(line_seg.create(False))\n return line_np", "def StrokeLine(*args, **kwargs):\n return _gdi_.GraphicsContext_StrokeLine(*args, **kwargs)", "def DrawLine(*args, **kwargs):\n return _gdi_.DC_DrawLine(*args, **kwargs)", "def _draw_line(plot, hori, vert, color, text):\n plot.plot(hori, vert, '-o'+color)\n plot.text(hori[-1]-3, vert[-1]+2, text, color=color)", "def drawPath(self):\r\n bgl.glColor4f(0.8,0.8,0.9,0.01)\r\n bgl.glLineWidth(0.01)\r\n\r\n bgl.glBegin(bgl.GL_LINES)\r\n bgl.glVertex3f(self.p1[0],self.p1[1],self.p1[2])\r\n bgl.glVertex3f(self.p2[0],self.p2[1],self.p2[2])\r\n bgl.glEnd()\r\n\r\n bgl.glNormal3f(0.0,0.0,1.0)\r\n bgl.glShadeModel(bgl.GL_SMOOTH);", "def draw_line(self, color, p1: Point, p2: Point, width):\n _p1 = self.T.itrans(p1)\n _p2 = self.T.itrans(p2)\n pg.draw.line(self.screen, color, _p1(), _p2(), 2)", "def draw_line(self,\n p,\n q,\n line_width=0.8,\n line_cap=cairo.LINE_CAP_ROUND,\n procrastinate=1000):\n if procrastinate == 0:\n self.set_line_width(line_width)\n self.set_line_cap(line_cap)\n self.move_to(*p)\n self.line_to(*q)\n self.stroke()\n else:\n rgb = self.rgb\n self.postponed.setdefault(procrastinate, [])\n self.postponed[procrastinate].append(\n (self.draw_line, (p, q, line_width, line_cap), rgb)\n )", "def add_line(self, x0, y0, x1, y1, style=None):\n style = self.__prepare_style(style, 'o')\n if x0 > x1:\n # swap A and B\n x1, x0 = x0, x1\n y1, y0 = y0, y1\n # get delta x, y\n dx = x1 - x0\n dy = y1 - y0\n # if a length of line is zero just add point\n if dx == 0 and dy == 0:\n if self.check_coord_in_range(x0, y0):\n self.canvas[y0][x0] = style\n return\n # when dx >= dy use fill by x-axis, and use fill by y-axis otherwise\n if abs(dx) >= abs(dy):\n for x in range(x0, x1 + 1):\n y = y0 if dx == 0 else y0 + int(round((x - x0) * dy / float((dx))))\n if self.check_coord_in_range(x, y):\n self.canvas[y][x] = style\n else:\n if y0 < y1:\n for y in range(y0, y1 + 1):\n x = x0 if dy == 0 else x0 + int(round((y - y0) * dx / float((dy))))\n if self.check_coord_in_range(x, y):\n self.canvas[y][x] = style\n else:\n for y in range(y1, y0 + 1):\n x = x0 if dy == 0 else x1 + int(round((y - y1) * dx / float((dy))))\n if self.check_coord_in_range(x, y):\n self.canvas[y][x] = style", "def _defLine(self):\n self._dline=GPath(points = [0,100,GAME_WIDTH,100], linewidth = 1.5,\n linecolor = 'cyan')", "def drawPoints(self, qp):\n\n# pen = self.pen\n\n\n size = self.size()\n self.yOffset = [size.height()*0.2 + size.height()*0.618/self.NUM_CHANNEL * y for y in xrange(self.NUM_CHANNEL) ]\n\n for ix in xrange(self.NUM_CHANNEL):\n self.pen.setStyle(Qt.SolidLine)\n self.pen.setWidth(2)\n self.pen.setBrush(self.PEN_COLOR[ix])\n self.pen.setCapStyle(Qt.RoundCap)\n self.pen.setJoinStyle(Qt.RoundJoin)\n qp.setPen(self.pen)\n\n qp.drawLine(self.x - 2, self.yOffset[ix] - \\\n self.data_1[ix] * self.DISPLAY_SCALING[ix],\\\n self.x , self.yOffset[ix] - \\\n self.data[ix] * self.DISPLAY_SCALING[ix])", "def draw_line(self, p1, p2, color, thickness=2):\n p1 = self._format_point(p1)\n p2 = self._format_point(p2)\n opencv.line(self.img, p1.tuple(), p2.tuple(), color.bgra(), thickness=thickness)", "def __draw_path(\n self, x_path, y_path, opt_line, opt_marker,\n opt_colour, thickness=0.05):\n # Get colour\n colour = self.__get_colour_from_string(opt_colour)\n\n # For every point in the list, draw a line to the next one\n # (excluding last point)\n for point in range(0, len(x_path)):\n # Get point 1\n x1 = x_path[point]\n y1 = y_path[point]\n p1 = vector(x1, y1, 0)\n\n # If at end / only coordinate - draw a marker\n if point == len(x_path) - 1:\n create_marker(self.scene, x1, y1, opt_marker, colour)\n return\n\n # Get point 2\n x2 = x_path[point + 1]\n y2 = y_path[point + 1]\n p2 = vector(x2, y2, 0)\n\n if opt_line == '':\n # Only one marker to avoid double-ups\n create_marker(self.scene, x1, y1, opt_marker, colour)\n elif opt_line == '-':\n create_line(\n p1, p2, self.scene, colour=colour, thickness=thickness)\n # Only one marker to avoid double-ups\n create_marker(self.scene, x1, y1, opt_marker, colour)\n elif opt_line == '--':\n create_segmented_line(\n p1, p2, self.scene, 0.3, colour=colour,\n thickness=thickness)\n # Only one marker to avoid double-ups\n create_marker(self.scene, x1, y1, opt_marker, colour)\n elif opt_line == ':':\n create_segmented_line(\n p1, p2, self.scene, 0.05, colour=colour,\n thickness=thickness)\n # Only one marker to avoid double-ups\n create_marker(self.scene, x1, y1, opt_marker, colour)\n elif opt_line == '-.':\n raise NotImplementedError(\"Other line types not implemented\")\n else:\n raise ValueError(\"Invalid line type given\")", "def draw_line(image, pt1, pt2, color):\n\n\t# TODO -> Add your line drawing code here\n\n\t# Sample code to draw white dots at pt1 and pt2 positions\n\tset_pixel(image, pt1, color)\n\tset_pixel(image, pt2, color)", "def _draw_line_text(self):\n self._line_text.set_text(self.model.get_current_line())", "def DrawCurrent(self):\r\n \r\n x1, y1 = self._currentX, 0\r\n x1, y1 = self.ClientToScreen((x1, y1))\r\n x2 = self._currentX-1\r\n if wx.Platform == \"__WXMSW__\":\r\n x2 += 1 # but why ????\r\n\r\n y2 = 0\r\n dummy, y2 = self._owner.GetClientSize()\r\n x2, y2 = self._owner.ClientToScreen((x2, y2))\r\n\r\n dc = wx.ScreenDC()\r\n dc.SetLogicalFunction(wx.INVERT)\r\n dc.SetPen(wx.Pen(wx.BLACK, 2, wx.SOLID))\r\n dc.SetBrush(wx.TRANSPARENT_BRUSH)\r\n\r\n self.AdjustDC(dc)\r\n dc.DrawLine (x1, y1, x2, y2)\r\n dc.SetLogicalFunction(wx.COPY)", "def draw_line(self, frame, rect):\n print(\"x0, y0, x1, y1\", self.x0, self.y0, self.x1, self.y1)\n print(\"cross_x, cross_y\", self.cross_x, self.cross_y)\n left, top, right, bottom = rect\n # 枠内では線を表示しないようにしてやる\n if top<self.y1<bottom and left<self.x1<right:\n return\n # フレームと線の交点\n if (self.x1 >= right or self.x1 <= left or self.y1 <= top or self.y1 >= bottom) and self.cross_x == 0:\n self.cross_x = self.x1\n self.cross_y = self.y1\n return\n draw = ImageDraw.Draw(frame)\n draw.line((self.cross_x, self.cross_y, self.x1, self.y1), fill=(255, 255, 255), width=3)", "def draw_path(self):\r\n if len(self.path) > 1:\r\n for i in range(1, len(self.path)):\r\n pg.draw.line(self.screen, (0, 150, 0),\r\n self.path[i - 1], self.path[i], 1)\r\n elif len(self.path) == 1:\r\n pg.draw.circle(self.screen, (0, 150, 0),\r\n (int(self.path[0].x), int(self.path[0].y)), 1)", "def draw_line(self, point1, point2, line_color, line_width=2):\n if point1 is not None and point2 is not None:\n self.draw.line([point1, point2], fill=line_color, width=line_width)", "def _lineTo(self, pt):\n x, y = pt\n # duplicate point\n if x == self._lastX and y == self._lastY:\n return\n # vertical line\n elif x == self._lastX:\n cmd = \"V\"\n pts = str(y)\n # horizontal line\n elif y == self._lastY:\n cmd = \"H\"\n pts = str(x)\n # previous was a moveto\n elif self._lastCommand == \"M\":\n cmd = None\n pts = \" \" + pointToString(pt)\n # basic\n else:\n cmd = \"L\"\n pts = pointToString(pt)\n # write the string\n t = \"\"\n if cmd:\n t += cmd\n self._lastCommand = cmd\n t += pts\n self._commands.append(t)\n # store for future reference\n self._lastX, self._lastY = pt", "def draw_line(mat, pt1, pt2, color=(0, 0, 255), thickness=1):\n cv2.line(mat, pt1, pt2, color, thickness=thickness)", "def draw_point(self, x, y, color):\n RADIUS = 6\n painter = QPainter()\n painter.begin(self.lbFFmpeg.pixmap())\n painter.setPen(QColor(color))\n painter.drawEllipse(QPoint(x, y), RADIUS, RADIUS)\n # cross inside circle\n painter.drawLine(x - RADIUS, y, x + RADIUS, y)\n painter.drawLine(x, y - RADIUS, x, y + RADIUS)\n painter.end()\n self.lbFFmpeg.update()", "def drawLine(x0,y0,x1,y1,ucoords=1):\n if ucoords:\n dislin.rline(x0,y0,x1,y1)\n else:\n dislin.line(x0,y0,x1,y1)", "def line(self, x0, y0, x1, y1, char): # noqa: C901, PLR0912\n # pylint: disable=too-many-arguments, too-many-branches\n if x0 > x1:\n x1, x0 = x0, x1\n y1, y0 = y0, y1\n\n dx = x1 - x0\n dy = y1 - y0\n\n if dx == 0 and dy == 0:\n self.point(x0, y0, char)\n elif abs(dx) >= abs(dy):\n for x in range(x0, x1 + 1):\n if dx == 0:\n y = y0\n else:\n y = y0 + int(round((x - x0) * dy / float(dx)))\n self.point(x, y, char)\n elif y0 < y1:\n for y in range(y0, y1 + 1):\n if dy == 0:\n x = x0\n else:\n x = x0 + int(round((y - y0) * dx / float(dy)))\n self.point(x, y, char)\n else:\n for y in range(y1, y0 + 1):\n if dy == 0:\n x = x0\n else:\n x = x1 + int(round((y - y1) * dx / float(dy)))\n self.point(x, y, char)", "def line(self, start, end, color=(255, 255, 255), width=1):\n start = self._transform(start)\n end = self._transform(end)\n\n pygame.draw.line(self.screen, color, start, end, width)", "def line(self, x0, y0, x1, y1, color):\n steep = abs(y1 - y0) > abs(x1 - x0)\n if steep:\n x0, y0 = y0, x0\n x1, y1 = y1, x1\n if x0 > x1:\n x0, x1 = x1, x0\n y0, y1 = y1, y0\n dx = x1 - x0\n dy = abs(y1 - y0)\n err = dx // 2\n ystep = 1 if y0 < y1 else -1\n while x0 <= x1:\n if steep:\n self.pixel(y0, x0, color)\n else:\n self.pixel(x0, y0, color)\n err -= dy\n if err < 0:\n y0 += ystep\n err += dx\n x0 += 1", "def draw_line(self, x0, y0, x1, y1, color=None, colorFunc=None, aa=False):\n if aa:\n self._draw_wu_line(x0, y0, x1, y1, color, colorFunc)\n else:\n self._draw_bresenham_line(x0, y0, x1, y1, color, colorFunc)", "def move(self, x, y):\r\n if self.brush_on:\r\n for lx, ly in line(self.pos_x, self.pos_y, x, y):\r\n self.set(lx, ly)\r\n\r\n self.pos_x = x\r\n self.pos_y = y", "def line_layer(self):\n screen_origin = self.ids.mapview.get_window_xy_from(lat1, lon1, self.ids.mapview.zoom)\n screen_destination = self.ids.mapview.get_window_xy_from(lat2, lon2, self.ids.mapview.zoom)\n point_list = [screen_origin[0], screen_origin[1], screen_destination[0], screen_destination[1]]\n\n with self.ids.line.canvas:\n self.ids.line.canvas.clear()\n\n Color(0, 0, 0, .6)\n Line(points=point_list, width=3, joint=\"bevel\")", "def plotPixel(self, x, y, color=\"black\"):\n self.__checkOpen()\n #self.create_line(x,y,x+1,y, fill=color)\n _tkExec(self.create_line, x,y,x+1,y, fill=color,tag=\"line\")\n self.__autoflush()", "def line():\n tt.left(90)\n tt.down()\n tt.forward(50)\n tt.up()\n tt.right(90)\n tt.forward(10)\n tt.right(90)\n tt.forward(50)\n tt.left(90)", "def draw(self, draw_line):\n #draw_rect(self.color, (self.position, (self.size, self.size)))\n line_start = (int(self.position[0]), int(self.position[1] - self.size/2))\n line_end = (int(line_start[0] + self.size), line_start[1])\n draw_line(self.color, line_start, line_end, self.size)\n\n gun_start = (int(self.position[0] + self.size/2), line_start[1])\n gun_end = (int(gun_start[0] + math.cos(self.angle) * self.barrel), int(gun_start[1] - math.sin(self.angle) * self.barrel))\n draw_line(self.color, gun_start, gun_end, 5)", "def draw_line():\n\n # Small Size Line\n glLineWidth(0.1)\n glColor3f(0.5, 1.0, 0.9)\n wid = 0\n while wid <= width:\n length = 0\n while length <= height:\n glBegin(GL_LINES)\n glVertex3f(0.0, length, 0.0)\n glVertex3f(wid, length, 0)\n glEnd()\n glBegin(GL_LINES)\n glVertex3f(length, 0, 0.0)\n glVertex3f(length, wid, 0)\n glEnd()\n length += 10\n wid += 50\n # Medium Size Line\n glLineWidth(2.0)\n wid = 0\n while wid <= width:\n length = 0\n while length <= height:\n glBegin(GL_LINES)\n glVertex3f(0.0, length, 0.0)\n glVertex3f(wid, length, 0)\n glEnd()\n length += 50\n glBegin(GL_LINES)\n glVertex3f(length, 0, 0.0)\n glVertex3f(length, wid, 0)\n glEnd()\n wid += 50\n # Main Line\n # ordinat\n glLineWidth(1.5)\n glColor3f(0.5, 0.4, 0.8)\n glBegin(GL_LINES)\n glVertex3f(height / 2, 0, 0.0)\n glVertex3f(height / 2, width, 0)\n glEnd()\n # absis\n glBegin(GL_LINES)\n glVertex3f(0, width / 2, 0.0)\n glVertex3f(height, width / 2, 0)\n glEnd()", "def draw_line_segment(\n x1: float, y1: float, x2: float, y2: float, color: C3F\n ) -> None:\n pyglet.graphics.draw(\n 2,\n pyglet.gl.GL_LINE_STRIP,\n (GeoDrawer._VERTEX_MODE, [x1, y1, x2, y2]),\n (GeoDrawer._COLOR_MODE, color * 2),\n )", "def AddLineToPoint(*args):\n return _gdi_.GraphicsPath_AddLineToPoint(*args)", "def line(self, points, ls=\"--\", draw=\"black\", lw=None, options=None, kwoptions=None):\n\n draw = norm_colour(draw)\n self.use_colour(draw)\n\n if kwoptions is None:\n kwoptions = {}\n kwopts = {'draw': draw, **kwoptions}\n if lw:\n kwopts['line width'] = lw\n\n self._commands.append(rf\"\\draw{wrap(fmt_options(options,kwopts))} \" +\n f\" {ls} \".join(map(fmt_point, points))+\";\")", "def draw_polyline(self, points, line_width, line_color):\n line_color = check_color(line_color)\n STline.line(self.canvas, points, line_width, line_color)", "def _draw_line(self, event):\n if not self.obstacle_creation_mode:\n return\n\n if self.previous_coordinates is None:\n self.previous_coordinates = event.x, event.y\n self.new_obstacle.append([event.x, event.y])\n return\n\n x1, y1 = event.x, event.y\n\n if self._is_closing_shape(x1, y1, self.new_obstacle):\n x1, y1 = self.new_obstacle[0]\n else:\n self.new_obstacle.append([x1, y1])\n\n x0, y0 = self.previous_coordinates\n self.canvas.create_line(x0, y0, x1, y1, **self.LINE_OPTIONS)\n self.previous_coordinates = x1, y1", "def draw_trace(self, trace):\n pts = [xform.chain(p) for p in (trace.p1, trace.p2)]\n self.canvas.line([(p.x, p.y) for p in pts], fill=colour)", "def up():\n global y, canvas # y é modificado\n canvas.create_line(x, y, x, y - 10)\n y -= 10", "def next_line():\r\n set_point(point().next_line())", "def hLine(x_min, x_max, y):\n glClear(GL_COLOR_BUFFER_BIT)\n glColor3f(1.0, 0.0, 0.0)\n glPointSize(10.0) # Set the point with a specific radius\n glBegin(GL_POINTS) # Begin plotting point\n x = x_min\n while (x <= x_max):\n glVertex2f(x, y)\n x += 0.05\n glEnd()\n glFlush()", "def draw_polyline(*points):\r\n global _canvas\r\n if _canvas == None:\r\n raise RuntimeError(\"Canvas is not open yet.\")\r\n else:\r\n #print(points)\r\n #print(len(points))\r\n newpoints = []\r\n for x in range(0, len(points), 2):\r\n #print(x)\r\n pt = Point(points[x], points[x+1])\r\n newpoints += [ pt ]\r\n #print(newpoints)\r\n path = Path(*newpoints)\r\n path.setBorderWidth(_current_line_thickness)\r\n path.setBorderColor(_current_color)\r\n _canvas.add(path)", "def vline(y_min, y_max, x):\n glClear(GL_COLOR_BUFFER_BIT)\n glColor3f(0.0, 1.0, 0.0)\n glPointSize(10.0)\n glBegin(GL_POINTS)\n y = y_min\n while (y <= y_max):\n glVertex2f(x, y)\n y += 0.05\n glEnd()\n glFlush()", "def down():\n global y, canvas # y é modificado\n canvas.create_line(x, y, x, y + 10)\n y += 10", "def set_current_tool_to_draw_line_by_clicking(self, line_id=None):\n\n self.variables.current_shape_id = line_id\n self.show_shape(line_id)\n self.variables.active_tool = TOOLS.DRAW_LINE_BY_CLICKING\n self.variables.current_tool = TOOLS.DRAW_LINE_BY_CLICKING", "def line(\n self, x: Hashable | None = None, y: Hashable | None = None, **kwargs\n ) -> PlotAccessor:\n return self(kind=\"line\", x=x, y=y, **kwargs)", "def lineTo(self, pt: Tuple[float, float]) -> None:\n raise NotImplementedError", "def line(self, drawer, canvas):\n start_width = random.randint(\n self._width / 8, self._width / 4)\n start_height = random.randint(\n self._height / 4, self._height * 3 / 4)\n stop_width = random.randint(\n self._width * 3 / 4, self._width * 7 / 8)\n stop_height = random.randint(\n self._height / 4, self._height * 3 / 4)\n drawer.line(\n (start_width,\n start_height,\n stop_width,\n stop_height),\n fill=random.randint(128, 155),\n width=3\n )", "def drawSlope(self):\n length = sqrt(1 + self.slope**2) # Length of the line segment over 1 x-unit\n xOffset = (segmentLength / length) / 2 # Figures out how many times the length of the 1 unit length fits into the desired length\n # then divides by 2 becuase half is on the left and half on the right of the center\n\n\n # Left end point\n xLeft = self.x - xOffset\n yLeft = (self.slope * (xLeft - self.x)) + self.y\n\n # Right end point\n xRight = self.x + xOffset\n yRight = (self.slope * (xRight - self.x)) + self.y\n\n\n # Converts the left and right end points from cartesian coordinates to screen coordinates\n left = cartesianToScreen(xLeft , yLeft)\n right = cartesianToScreen(xRight, yRight)\n\n\n pygame.draw.aaline(display, self.color, left, right, 1) # DRAWS THE LINE AHHHHHHHHHHHHHHHHHH :P", "def drawVertLine(self, x, y1, y2, val):\r\n for y in range(y2 - y1):\r\n self.setPixel(x, y1 + y, val)", "def draw(self, base, level):\n\n a = base.a\n b = base.b\n\n if level > 0:\n delta = base.b - base.a\n px = a.x + delta.x / 3\n py = a.y + delta.y / 3\n rx = a.x + 2 * delta.x / 3\n ry = a.y + 2 * delta.y / 3\n p = Point(px, py)\n r = Point(rx, ry)\n q = Point(rx, ry)\n q.rotate_deg(60, p)\n self.draw(Line(a,p), level-1)\n self.draw(Line(p,q), level-1)\n self.draw(Line(q,r), level-1)\n self.draw(Line(r,b), level-1)\n else:\n self.container.window.create_line(a.x, a.y, b.x, b.y)", "def draw_lines(self):\n for x_cord in range(0, Dimension.SCREEN_WIDTH.value, Dimension.SQUARE_WIDTH.value):\n pg.draw.line(self.window, Colors.BLACK.value, (x_cord, 0), (x_cord, Dimension.SCREEN_HEIGHT.value))\n\n for y_cord in range(0, Dimension.SCREEN_HEIGHT.value, Dimension.SQUARE_HEIGHT.value):\n pg.draw.line(self.window, Colors.BLACK.value, (0, y_cord), (Dimension.SCREEN_WIDTH.value, y_cord))\n\n pg.display.update()", "def draw_lines(self, x, y,\n north=False, south=False, east=False, west=False,\n color=\"black\"):\n upper_left = (y * self.scale, x * self.scale)\n upper_right = (upper_left[0] + self.scale, upper_left[1])\n lower_left = (upper_left[0], upper_left[1] + self.scale)\n lower_right = (upper_left[0] + self.scale, upper_left[1] + self.scale)\n\n if north:\n self.canvas.create_line(*upper_left, *upper_right, fill=color)\n\n if south:\n self.canvas.create_line(*lower_left, *lower_right, fill=color)\n\n if east:\n self.canvas.create_line(*upper_right, *lower_right, fill=color)\n\n if west:\n self.canvas.create_line(*upper_left, *lower_left, fill=color)", "def plot_line(self,x_0,y_0,x_1,y_1,col=\"black\",line_width=1,line_type=\"solid\"):\n self._fig.add_shape(\n go.layout.Shape(\n type=\"line\",\n x0=x_0,\n y0=y_0,\n x1=x_1,\n y1=y_1,\n line=dict(\n color=col,\n width=line_width,\n dash=line_type\n )\n )\n )", "def create_line(self, x1, y1, x2, y2, style=None, parent=None):\n attrs = {'d': 'M %5f %5f L %5f %5f' % (x1, y1, x2, y2)}\n return self.create_path(attrs, style, parent)", "def set_mode_line():\n global DRAW_MODE, CURRENT_LABEL, SHAPE_SIZE\n global mouse_pos, line_start_pos\n\n if DRAW_MODE==\"line\":\n # draw the line on the mask\n cv.line(source_msk, line_start_pos, mouse_pos, CURRENT_LABEL, thickness=SHAPE_SIZE)\n\n line_start_pos = mouse_pos\n DRAW_MODE=\"line\"", "def drawLine(img, start, end, color = (0,0,255), thickness = 3):\n\tcv2.line(img, start, end, color, thickness)", "def update_line(self):\n self._draw_line_text()\n self._draw_status()\n self._line_listbox.set_focus(self.model.l_index)", "def set_current_tool_to_draw_line_by_dragging(self, line_id=None):\n\n self.variables.current_shape_id = line_id\n self.show_shape(line_id)\n self.variables.active_tool = TOOLS.DRAW_LINE_BY_DRAGGING\n self.variables.current_tool = TOOLS.DRAW_LINE_BY_DRAGGING", "def _plot_line(self, image, x1, y1, x2, y2, width, color):\n\n draw = ImageDraw.Draw(image, \"RGBA\")\n draw.line([x1, y1, x2, y2], fill=color, width=width)\n del draw\n\n return image", "def DrawHLine(pintX, pintY):\n for i in range(0, pintX):\n display.pixel(i, pintY, 1)", "def draw(self):\n if len(self.__points) >= 2:\n self._total_length = 0\n for i in range(len(self.__points) - 1):\n p1 = self.__points[i]\n p2 = self.__points[i + 1]\n coords = self.__line_segment(p1, p2)\n if not coords is None:\n pyglet.graphics.draw_indexed(4, pyglet.gl.GL_TRIANGLES,\n [0, 1, 2, 1, 2, 3],\n ('v2i', coords),\n ('c4b', self.color * 4)\n )\n coords = self.__line_cap(p2)\n pyglet.graphics.draw_indexed(4, pyglet.gl.GL_TRIANGLES,\n [0, 1, 2, 0, 2, 3],\n ('v2i', coords),\n ('c4b', self.color * 4)\n )", "def pygDraw(self, **kwargs):\n # --> following code from nodebox.context\n #fill, stroke, strokewidth, strokestyle = color_mixin(**kwargs)\n #if stroke is not None and strokewidth > 0:\n # glColor4f(stroke[0], stroke[1], stroke[2], stroke[3] * _alpha)\n # glLineWidth(strokewidth)\n # glLineDash(strokestyle)\n \n x0,y0 = float(self.point_0.x), float(self.point_0.y) # first point\n x1,y1 = float(self.point_1.x), float(self.point_1.y) # second point\n \n glBegin(GL_LINE_LOOP)\n glVertex2f(x0, y0)\n glVertex2f(x1, y1)\n glEnd()", "def drawHorizLine(self, y, x1, x2, val):\r\n for x in range(x2 - x1):\r\n self.setPixel(x1 + x, y, val)", "def _createline(self):\n return self.cv.create_line(0, 0, 0, 0, fill=\"\", width=2,\n capstyle = TK.ROUND)", "def begining_of_line():\r\n set_point(point().begining_of_line())", "def graphicsDraw(self, win, center):\n\t\tlastPoint = None\n\t\tfor p in self.points:\n\t\t\tthisPoint = Point(p[0] + center.x, p[1] + center.y)\n\t\t\tif lastPoint is not None:\n\t\t\t\tline = Line(lastPoint, thisPoint)\n\t\t\t\tline.draw(win)\n\t\t\tlastPoint = thisPoint", "def line(self, xi: int, yi: int, xf: int, yf: int, color: int):\n if xi == xf:\n self.vline(xi, yi, yf - yi, color)\n elif yi == yf:\n self.hline(xi, yi, xf - xi, color)\n else:\n m = (yf - yi) / (xf - xi)\n\n for x in range(xi, xf + 1):\n y = int(m * (x - xi) + yi)\n self.pixel(x, y, color)" ]
[ "0.7670841", "0.75497407", "0.7464488", "0.7421213", "0.7273697", "0.72548485", "0.7247189", "0.72389364", "0.7186178", "0.7064833", "0.7045982", "0.70430654", "0.7028041", "0.6984681", "0.69775695", "0.69517475", "0.6920274", "0.6900129", "0.68952066", "0.68559307", "0.68300205", "0.6821213", "0.68169993", "0.67988443", "0.67856073", "0.67673016", "0.6748545", "0.6737692", "0.67123455", "0.66853106", "0.66658014", "0.6655162", "0.66421723", "0.6641844", "0.6631204", "0.6606667", "0.6597738", "0.65967566", "0.65746933", "0.6572041", "0.6569999", "0.65627694", "0.65473706", "0.6522896", "0.6522425", "0.6492143", "0.6469257", "0.6436036", "0.64214575", "0.6415949", "0.63999707", "0.63839996", "0.6381151", "0.6376175", "0.63750017", "0.63652444", "0.6361976", "0.6335485", "0.6333745", "0.6329606", "0.6329284", "0.63269985", "0.6323502", "0.6320413", "0.6318831", "0.63117", "0.63107693", "0.6307459", "0.63066846", "0.62979233", "0.6296786", "0.6296595", "0.6287209", "0.628284", "0.62446016", "0.62414974", "0.62277967", "0.62249255", "0.6211589", "0.6202042", "0.6197626", "0.6194554", "0.61892796", "0.61839825", "0.61743", "0.61656314", "0.6160985", "0.61592114", "0.6142753", "0.6139319", "0.612613", "0.6120541", "0.61152947", "0.6104513", "0.60942703", "0.60941964", "0.6092476", "0.6082015", "0.60809785", "0.6071635" ]
0.6691237
29
Draw a circle of radius r centered on the pen.
def drawCircle(self, r): assert (type(r) in [int, float]), "parameter r:%s is not a valid number" % `r` x = self._turtle.xcor() y = self._turtle.ycor() # Move the pen into position fstate = self._turtle.pendown() if fstate: self._turtle.penup() self._turtle.setposition(x, y-r) if fstate: self._turtle.pendown() # Draw the circle and fill if necessary self._turtle.circle(r) self.flush() self._turtle.forward(0) # Return the pen to the position if fstate: self._turtle.penup() self._turtle.setposition(x, y) if fstate: self._turtle.pendown()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def drawCircle(x, y, r):\n pen1.up()\n pen1.goto(x,y)\n pen1.down()\n pen1.circle(r)", "def circle(draw, centrex, centrey, radius, color=\"#AAAAAAFF\") -> None:\n # convert cartesian centre to pixel centre\n cx, cy = pixelcoord(centrex, centrey)\n # top left and bottom right coordinates\n rect = [(cx-radius, cy-radius), (cx+radius, cy+radius)]\n # draw\n draw.arc(rect, 0, 360, color)", "def draw_circle(c):\n turtle.circle(c.radius)", "def draw_circle(c):\n turtle.circle(c.radius)", "def draw_circle(self, x0, y0, r, color=None):\n f = 1 - r\n ddF_x = 1\n ddF_y = -2 * r\n x = 0\n y = r\n\n self.set(x0, y0 + r, color)\n self.set(x0, y0 - r, color)\n self.set(x0 + r, y0, color)\n self.set(x0 - r, y0, color)\n\n while x < y:\n if f >= 0:\n y -= 1\n ddF_y += 2\n f += ddF_y\n x += 1\n ddF_x += 2\n f += ddF_x\n\n self.set(x0 + x, y0 + y, color)\n self.set(x0 - x, y0 + y, color)\n self.set(x0 + x, y0 - y, color)\n self.set(x0 - x, y0 - y, color)\n self.set(x0 + y, y0 + x, color)\n self.set(x0 - y, y0 + x, color)\n self.set(x0 + y, y0 - x, color)\n self.set(x0 - y, y0 - x, color)", "def __drawCircle(self, center, radius, color, drawwidth=1):\n radius *= self.viewZoom\n if radius < 1: radius = 1\n else: radius = int(radius)\n\n pygame.draw.circle(self.screen, color, center, radius, drawwidth)", "def draw_circle(self, x, y, radius, color=Color['white']):\n pygame.draw.circle(self.display, color, (x, y), radius)", "def circle(self, center, radius, color=(255, 255, 255), width=0):\n center = self._transform(center)\n pygame.draw.circle(self.screen, color, center, radius, width)", "def draw_circle(self, color, center, radius, width):\n _c = self.T.itrans(center)\n pg.draw.circle(self.screen, color, _c(), radius, width)", "def DrawCircle(self, center, radius, color, drawwidth=1):\r\n radius *= self.zoom\r\n if radius < 1:\r\n radius = 1\r\n else: radius = int(radius)\r\n\r\n pygame.draw.circle(self.surface, color.bytes, center, radius, drawwidth)", "def circle(self, x, y, r, solid = False):\n px = 0\n py = r\n d = 1 - 2 * r\n err = 0\n while py >= 0:\n if solid:\n for i in range(x - px, x + px + 1):\n self.pixel(i, y + py, 1)\n self.pixel(i, y - py, 1)\n else:\n self.pixel(x + px, y + py, 1)\n self.pixel(x + px, y - py, 1)\n self.pixel(x - px, y + py, 1)\n self.pixel(x - px, y - py, 1)\n err = 2 * (d + py) - 1\n if d < 0 and err <= 0:\n px += 1\n d += 2 *px + 1\n else:\n err = 2 * (d - px) - 1\n if d > 0 and err > 0:\n py -= 1\n d += 1 - 2 * py\n else:\n px += 1\n d += 2 * (px - py)\n py -= 1", "def drawCircle(r):\r\n # create a turtle-painter instance using turtle library\r\n painter = turtle.Turtle()\r\n\r\n # turtle properties (we want the turtle to look nicer)\r\n painter.shape(\"turtle\") # setting painter shape to turtle\r\n painter.shapesize(3,3,1) # making turtle-painter 3 times bigger\r\n painter.color(\"limegreen\") # setting painting color to limegreen\r\n\r\n # move the turtle-painter to ready position\r\n painter.pu() # we just move without drawing anything\r\n x0 = coordX(r, 0) # compute initial coordinate x0\r\n y0 = coordY(r, 0) # compute initial coordinate y0\r\n\r\n painter.goto(x0,y0) # move the turtle to the ready position\r\n \r\n # tell the turtle to put pencil down on the paper\r\n painter.pd()\r\n\r\n # draw a circle\r\n for theta in range(0, 361, 1):\r\n x = coordX(r, theta, useradians = False)\r\n y = coordY(r, theta, useradians = False)\r\n\r\n painter.goto(x,y)\r\n\r\n # tell the turtle to put pencil up from the paper\r\n painter.pu()\r\n # hide the painter after he finished to draw\r\n painter.ht()\r\n print(\"Draw a circle of r = \", r )", "def DrawSolidCircle(self, center, radius, axis, color):\r\n radius *= self.zoom\r\n if radius < 1:\r\n radius = 1\r\n else: radius = int(radius)\r\n\r\n pygame.draw.circle(self.surface, (color/2).bytes+[127],\r\n center, radius, 0)\r\n pygame.draw.circle(self.surface, color.bytes, center, radius, 1)\r\n pygame.draw.aaline(self.surface, (255, 0, 0), center,\r\n (center[0] - radius*axis[0], center[1] +\r\n radius*axis[1]))", "def _circle(i, r=.05):\n\treturn Circle((i, 0), r, fill=True, color='black')", "def plot_circle(r,**kw):\n try:\n fmt = kw.pop('fmt')\n except:\n fmt='k'\n try:\n label = kw.pop('label')\n except:\n label = None\n x = num.arange(-r,r+0.01,0.01)\n y = num.sqrt(num.fabs(r**2. - x**2.))\n pyplot.plot(x,y,fmt,**kw)\n pyplot.plot(x,-y,fmt,label=label,**kw)", "def draw_circle(color, position, radius, width=0):\n #print('(color={}, position={}, radius={}, width={})')\n pygame.draw.circle(screen, color, position, radius, width)", "def draw_circle(self, color, position, radius, width = 0, anchor= 'topleft'):\n color = spyral.color._determine(color)\n offset = self._calculate_offset(anchor)\n pygame.draw.circle(self._surf, color, position + offset, radius, width)", "def draw_circle(centerx, centery, radius):\r\n global _canvas\r\n global _current_color\r\n if _canvas == None:\r\n raise RuntimeError(\"Canvas is not open yet.\")\r\n else:\r\n circle = Circle()\r\n circle.move(centerx, centery)\r\n circle.setRadius(radius)\r\n _set_not_filled(circle)\r\n _canvas.add(circle)", "def drawCircle(t, x, y, radius):\r\n t.up()\r\n t.goto(x + radius, y)\r\n t.setheading(90)\r\n t.down()\r\n for count in range(120):\r\n t.left(3)\r\n t.forward(2.0 * math.pi * radius / 120.0)", "def create_circle(self, x, y, r, **kwargs):\n return self.create_oval(*self.circ_to_oval(x, y, r), **kwargs)", "def draw_circle_filled(center_x, center_y, radius, color):\n width = radius\n height = radius\n draw_ellipse_filled(center_x, center_y, width, height, color)", "def circle(radius, center, dim):\n kern = np.zeros(shape=(radius*2,radius*2))\n kern[draw.circle(r=radius, c=radius, radius=radius)] = 1\n return kern", "def draw_circle(self, center, radius, line_width, line_color, fill_color=\"\"):\n line_color, fill_color = check_color(line_color), check_color(fill_color)\n SToval.oval(self.canvas, center, radius, line_width, line_color, fill_color)", "def draw_circle(t, circle):\n t.pu()\n t.goto(circle.center.x, circle.center.y)\n t.pd()\n polygon.circle(t, circle.radius)", "def circle(self, center, rad):\n self.gc.show_circles(center[0], center[1], rad, facecolor='none', edgecolor=self.color, linewidth=0.5)", "def circle(self, pos, radius, draw=None, fill=\"black\", lw=0, options=None, kwoptions=None):\n\n fill = norm_colour(fill)\n self.use_colour(fill)\n\n draw = norm_colour(draw)\n if draw is None:\n draw = fill\n self.use_colour(draw)\n\n self._commands.append(rf\"\\filldraw[line width={lw},\"\n rf\"{fmt_options(options, kwoptions, draw=draw, fill=fill)}] \"\n rf\" {fmt_point(pos)} circle ({radius});\")", "def circle(self, center_x, center_y, radius, color):\n x = radius - 1\n y = 0\n d_x = 1\n d_y = 1\n err = d_x - (radius << 1)\n while x >= y:\n self.pixel(center_x + x, center_y + y, color)\n self.pixel(center_x + y, center_y + x, color)\n self.pixel(center_x - y, center_y + x, color)\n self.pixel(center_x - x, center_y + y, color)\n self.pixel(center_x - x, center_y - y, color)\n self.pixel(center_x - y, center_y - x, color)\n self.pixel(center_x + y, center_y - x, color)\n self.pixel(center_x + x, center_y - y, color)\n if err <= 0:\n y += 1\n err += d_y\n d_y += 2\n if err > 0:\n x -= 1\n d_x += 2\n err += d_x - (radius << 1)", "def circle(self, x, y, r, cls=None, style=None):\n x, y, r = self._meta.units(x, y, r)\n cls_str = 'class=\"%s\" ' % cls if cls else ''\n style_str = 'style=\"%s\" ' % self._meta.make_style(style) if style else ''\n self.elements.append(\"\"\"\n <circle cx=\"%s\" cy=\"%s\" r=\"%s\" %s%s/>\n \"\"\".strip() % (\n x, y, r, cls_str, style_str\n ))\n return self", "def circle(self, x, y, r, cls=None, style=None):\n x, y, r = self._meta.units(x, y, r)\n cls_str = 'class=\"%s\" ' % cls if cls else ''\n style_str = 'style=\"%s\" ' % self._meta.make_style(style) if style else ''\n self.elements.append(\"\"\"\n <circle cx=\"%s\" cy=\"%s\" r=\"%s\" %s%s/>\n \"\"\".strip() % (\n x, y, r, cls_str, style_str\n ))\n return self", "def draw_circle_outline(center_x, center_y, radius, color, border_width=1):\n width = radius\n height = radius\n draw_ellipse_outline(center_x, center_y, width, height,\n color, border_width)", "def circle(t, r):\n circumference = math.pi * 2 * r\n n = 60\n length = circumference / n\n polygon(t, length, n)", "def circle(self, p, radius, **kwargs):\n cx, cy = self._sky2img(p)\n self._draw.ellipse([cx-radius, cy-radius, cx+radius, cy+radius], **kwargs)", "def dot(self, pos, r=0.2, **options):\n underride(options, fill='white', outline='orange')\n return self.circle(pos, r, **options)", "def draw_circle(self, color, position, radius, width=0, anchor='topleft'):\n offset = self._calculate_offset(anchor)\n pygame.draw.circle(self._surf, color, (position + offset).floor(),\n radius, width)\n self._version += 1\n spyral.util.scale_surface.clear(self._surf)\n return self", "def circle(self, clear_screen=True, x=50, y=50, radius=40, fill_color='black', outline_color='black'):\n\n if clear_screen:\n self.clear()\n\n x1 = x - radius\n y1 = y - radius\n x2 = x + radius\n y2 = y + radius\n\n return self.draw.ellipse((x1, y1, x2, y2), fill=fill_color, outline=outline_color)", "def DrawCircle(*args, **kwargs):\n return _gdi_.DC_DrawCircle(*args, **kwargs)", "def plot_circle(self, radius, c=color, ax=None, label=True, fontsize=12, **kwargs):\n if ax is None:\n ax = plt.gca()\n circle = Circle(self.coords, radius, fill=None, ec=c, **kwargs)\n ax.add_artist(circle)\n if label and self.i is not None:\n plt.text(\n *(np.array(self.coords) - [0, 1.5 * radius]),\n self.i,\n c=c,\n ha=\"center\",\n va=\"top\",\n fontsize=fontsize,\n )", "def DrawCircle(*args, **kwargs):\n return _gdi_.PseudoDC_DrawCircle(*args, **kwargs)", "def circle(self):\n return circle(self.N, self.o, self.r)", "def circleCirc(radius):\n radius = float(radius)\n return 2*math.pi*radius", "def draw_neuron(self, center, radius, color):\r\n self.pen.up()\r\n self.pen.color(color)\r\n self.pen.goto(center)\r\n\r\n self.pen.setheading(0)\r\n self.pen.forward(radius)\r\n self.pen.setheading(90)\r\n\r\n # draw circle\r\n self.pen.begin_fill()\r\n self.pen.pendown()\r\n self.pen.circle(radius)\r\n self.pen.end_fill()\r\n\r\n self.pen.color('black')\r\n self.pen.up()\r\n self.pen.goto(center)\r\n self.pen.setheading(0)", "def draw_circle(self, circle, color, thickness=2):\n center = self._format_point(circle.center())\n opencv.circle(self.img, center.tuple(), int(circle.radius()), color.bgra(), thickness=thickness)", "def draw_filled_circle(centerx, centery, radius):\r\n global _canvas\r\n global _current_color\r\n if _canvas == None:\r\n raise RuntimeError(\"Canvas is not open yet.\")\r\n else:\r\n circle = Circle()\r\n circle.move(centerx, centery)\r\n circle.setRadius(radius)\r\n _set_filled(circle)\r\n _canvas.add(circle)", "def plot(self, radius=15, **kwargs):\n self.plot_circle(radius, **kwargs)", "def make_circle(x, y, r):\n\tnew_circle = Circle()\n\tnew_circle.x = x\n\tnew_circle.y = y\n\tnew_circle.r = r\n\treturn new_circle", "def drawCircle(x,y,radius,ucoords=1):\n if ucoords:\n dislin.rlcirc(x,y,radius)\n else:\n dislin.circle(x,y,radius)", "def circle(radius, extent=360):\n turtleTmp.circle(radius, extent)", "def fillcircle(draw, centrex, centrey, radius, color=\"#AAAAAAFF\") -> None:\n # convert cartesian centre to pixel centre\n cx, cy = pixelcoord(centrex, centrey)\n # top left and bottom right coordinates, must never reverse\n rect = [(cx-radius, cy-radius), (cx+radius, cy+radius)]\n # draw, same color for outline and fill\n draw.ellipse(rect, color, color)", "def circle(r=0):\n\tteta = 2*pi*random()\n\tx = (r+1)*cos(teta) + L//2\n\ty = (r+1)*sin(teta) + L//2\n\t\n\ti = int(x) + 1\n\tj = int(y) + 1\n\tprint(r)\n\treturn i,j", "def draw_circle_filled(self, x0, y0, r, color=None):\n self._draw_fast_vline(x0, y0 - r, 2 * r + 1, color)\n self._draw_circle_filled_helper(x0, y0, r, 3, 0, color)", "def wdraw_circle(self, wx, wy, dradius, fill, outline):\r\n dx, dy = self.w_to_d(wx, wy)\r\n self.canvas.create_oval(dx - dradius, dy - dradius, dx + dradius, dy + dradius, fill=fill, outline=outline)", "def draw_full_circle(x, y, radius):\n iterations = int(2 * radius * pi)\n s = sin(2 * pi / iterations)\n c = cos(2 * pi / iterations)\n\n dx, dy = radius, 0.\n\n glBegin(GL_TRIANGLE_FAN)\n glVertex2f(x, y)\n for _ in range(iterations + 1):\n glVertex2f(x + dx, y + dy)\n dx, dy = (dx * c + dy * s), (dy * c - dx * s)\n glEnd()", "def circle(self, radius, extent=None, steps=None):\n super().circle(radius, extent, steps)", "def draw_circle(mat, center, radius, color=(0, 0, 255), thickness=1):\n cv2.circle(mat, center, radius, color, thickness=thickness)", "def fill_circle(self, x0, y0, r, color):\n f = 1 - r\n dx = 1\n dy = -r - r\n x = 0\n y = r\n self.vline(x0, y0 - r, 2 * r + 1, color)\n while x < y:\n if f >= 0:\n y -= 1\n dy += 2\n f += dy\n x += 1\n dx += 2\n f += dx\n self.vline(x0 + x, y0 - y, 2 * y + 1, color)\n self.vline(x0 - x, y0 - y, 2 * y + 1, color)\n self.vline(x0 - y, y0 - x, 2 * x + 1, color)\n self.vline(x0 + y, y0 - x, 2 * x + 1, color)", "def _generate_circle(self, center, radius):\n assert len(center) in [2, 3], 'Center of circle must have 2 or 3 elements'\n assert radius > 0, 'Radius must be greater than zero'\n return Point(*center).buffer(radius)", "def draw(self):\n radius = self.width / 2\n center_x = self.x + radius\n center_y = self.y + radius\n arcade.draw_circle_filled(center_x, center_y, radius, self.fill.color)\n arcade.draw_circle_outline(\n center_x, center_y, radius, self.pen.color, 3)", "def circle(center, radius, *args, **kwargs):\n return patch.Circle(center, radius, *args, **kwargs)", "def draw(self):\r\n arcade.draw_circle_filled(self.center.x, self.center.y, self.radius, TARGET_COLOR)", "def showCircle(self, window, color=None, radius=None, fill=None, conversion=None):\n if not color: color = self.color\n if not radius: radius = self.radius\n if not fill: fill = self.fill\n if not conversion: conversion = self.conversion\n window.draw.circle(window.screen, color, [self.x, self.y], radius, fill, conversion)", "def circle(cls, radius, position, open_circle=False):\n\n nb_points = 2*np.pi*radius/1\n points1 = radius*np.transpose(np.concatenate(([np.cos(2*np.pi*np.arange(0,nb_points+1)/nb_points)],[np.sin(2*np.pi*np.arange(0,nb_points+1)/nb_points)]),axis=0))\n \n for y in range(points1.shape[0]):\n points1[y,:]=points1[y,:]+position\n \n circle_obj = cls()\n circle_obj.coord = [points1]\n circle_obj.open = open_circle\n return circle_obj", "def draw(self, draw_circle):\n draw_circle(self.color, (int(self.position[0]), int(self.position[1])), self.size)", "def circle(radius = 10, angle_resolution = 2.5, layer = 0):\n D = Device(name = 'circle')\n t = np.linspace(0, 360, int(np.ceil(360/angle_resolution) + 1)) * pi/180\n xpts = (radius*cos(t)).tolist()\n ypts = (radius*sin(t)).tolist()\n D.add_polygon(points = (xpts, ypts), layer = layer)\n return D", "def get_nice_circle(x, y, radius, color=\"lightsteelblue\", facecolor=\"green\", alpha=.6, ax=None ):\n e = pl.Circle([x, y], radius)\n if ax is None:\n ax = pl.gca()\n ax.add_artist(e)\n e.set_clip_box(ax.bbox)\n e.set_edgecolor( color )\n e.set_linewidth(3)\n e.set_facecolor( facecolor ) # \"none\" not None\n e.set_alpha( alpha )\n return e", "def DrawCirclePoint(*args, **kwargs):\n return _gdi_.PseudoDC_DrawCirclePoint(*args, **kwargs)", "def DrawCirclePoint(*args, **kwargs):\n return _gdi_.DC_DrawCirclePoint(*args, **kwargs)", "def drawCircle( image, center, radius):\n fig = plt.figure()\n ax = fig.add_subplot(111)\n # optionaly first draw image\n if image is not None:\n if np.size(image) == np.size(image, 0) * np.size(image, 1):\n ax.imshow( image, cmap = 'gray' ) # grayscale\n #ax.imshow( image ) # grayscale\n else:\n ax.imshow( image ) # rgb\n \n angles = np.linspace( 0, 2*np.pi, 100, endpoint=False )\n x = center[0] + radius*np.cos( angles )\n y = center[1] + radius*np.sin( angles )\n \n ax.plot( x, y, 'r.' )\n # ax.set_xlabel('x')\n # ax.set_ylabel('y')\n # ax.set_title('Simple XY point plot')\n plt.show()", "def circle(self, xo: int, yo: int, radius: int, color: int, fill=False):\n for x in range(xo - radius, xo + radius + 1):\n square = sqrt(radius ** 2 - (x - xo) ** 2)\n y = yo + square\n self.pixel(x, floor(y), color)\n y = yo - square\n self.pixel(x, floor(y), color)\n for y in range(yo - radius, yo + radius + 1):\n square = sqrt(radius ** 2 - (y - yo) ** 2)\n x = xo + square\n self.pixel(floor(x), y, color)\n x = xo - square\n self.pixel(floor(x), y, color)\n if fill:\n if radius > 1:\n self.circle(xo, yo, radius - 1, color, True)\n else:\n self.circle(xo, yo, radius - 1, color, False)", "def draw(self):\n pygame.draw.circle(screen, self.color, (int(self.x), int(self.y)),\n self.radius)", "def generate_circle(R,center,N=100,t0=0.0,t1=2.0*np.pi):\r\n theta = np.linspace(t0,t0+t1,N)\r\n y = R*np.sin(theta) + center[1]\r\n x = R*np.cos(theta) + center[0]\r\n return x,y", "def circle(self,image,radius,i,j,c_x,c_y):\r\n major_axis=radius\r\n minor_axis=radius\r\n self.ellipse(image,major_axis,minor_axis,i,j,c_x,c_y)", "def circle(x: float, y: float, r: float, quantization: float):\n\n n = math.ceil(2 * math.pi * r / quantization)\n angle = np.array(list(range(n)) + [0]) / n * 2 * math.pi\n return LineCollection([r * (np.cos(angle) + 1j * np.sin(angle)) + complex(x, y)])", "def plot_circle(self):\n if self.lastmouse is not None:\n pygame.gfxdraw.circle(self.screen,\n self.lastmouse[0], self.lastmouse[1],\n int(self.drawsize), (255, 0, 255))", "def drawCircle(img, center, radius = 3, color = (0,0,255), fill = -1):\n\tcv2.circle(img, center, radius, color, fill)", "def filled_circle(shape, radius, center=None):\n\tr2 = radius*radius\n\tif center is None:\n\t\t### set to center of array\n\t\tcenter = (shape[0]-1)/2.0,(shape[1]-1)/2.0\n\tdef func(i0, i1):\n\t\tii0 = i0 - center[0]\n\t\tii1 = i1 - center[1]\n\t\trr2 = ii0**2 + ii1**2\n\t\tc = numpy.where(rr2<r2, 0.0, 1.0)\n\t\treturn c\n\treturn numpy.fromfunction(func, shape)", "def draw_circle(self, particle, color='black', fill=False, label=False):\n\n circle = plt.Circle(self.positions[particle],\n self.diameters[particle]/2, color=color, fill=fill,\n zorder=0) # circle artist representing particle\n self.ax.add_artist(circle)\n\n for dim in range(2):\n if (np.abs(self.positions[particle][dim]) >\n self.box_size/2 - self.diameters[particle]/2):\n newPosition = self.positions[particle].copy()\n newPosition[dim] -= (np.sign(self.positions[particle][dim])\n *self.box_size)\n circle = plt.Circle(newPosition,\n self.diameters[particle]/2, color=color, fill=fill,\n zorder=0) # circle artist representing particle\n self.ax.add_artist(circle)\n\n if label:\n self.ax.annotate(\n \"%i\" % particle, xy=self.positions[particle], ha=\"center\")", "def circle_area(r):\n if r < 0:\n raise ValueError(\"Radius cannot be negative\")\n\n return pi*(r**2)", "def circle(self, radius, extent=360):\n temp = self.bearing\n self.b_change = 0;\n tempSpeed = self.speedVar\n self.speedVar = 1\n\n for i in range(0, (extent//2)):\n n = math.fabs(math.radians(self.b_change) * radius)\n if(radius >= 0):\n self.forward(n)\n self.left(2)\n else:\n self.forward(n)\n self.right(2)\n if(radius >= 0):\n self.bearing = (temp + extent)\n else:\n self.bearing = (temp - extent)\n self.speedVar = tempSpeed", "def plot_circle(radius=1.0, centre=[0,0], height=0, *args, **kwargs):\n\n deg = np.linspace(0, 360, 361)\n rad = np.deg2rad(deg)\n x = centre[0] + np.sqrt((radius**2-height**2))*np.cos(rad)\n y = centre[1] + np.sqrt((radius**2-height**2))*np.sin(rad)\n plt.plot(x, y, *args, **kwargs)", "def circle(draw, bbox, thickness=4, loops=2, fill=(255,0,0)):\n offset = 0\n x1, y1, x2, y2 = bbox\n w, h = x2 - x1, y2 - y1\n x_c, y_c = x1 + w/2, y1 + h/2\n rot = noise(0.6)\n a, b = w, h\n for loop in range(loops):\n for r in np.arange(0, 2*pi + random.random(), 1/(max(w, h))):\n offset += noise()\n for i in range(thickness):\n x, y = ellipse_pt(r, x_c, y_c, a+i+offset, b+i+offset, rot)\n draw.point((x,y), fill=fill)\n a, b = a + 1, b + 1", "def create_circle(self, cx, cy, radius, style=None, parent=None):\n if parent is None:\n parent = self.current_parent\n if parent is not None:\n attrs = {'r': str(radius), 'cx': str(cx), 'cy': str(cy)}\n if style:\n attrs['style'] = style\n return etree.SubElement(parent, svgns('circle'), attrs)", "def createCircle(self, x, y, radius):\n # TODO (#2398) fix this to be top left coordinates, width, height\n return QtCore.QRectF(\n int(x - radius), int(y - radius), int(radius * 2), int(radius * 2)\n )", "def AddCircle(self,centerPnt,radius):\n\t\tcircle=self.Space.AddCircle(centerPnt,radius)\n\t\treturn circle", "def __drawPoint(self, p, size, color):\n self.__drawCircle(p, size / self.viewZoom, color, drawwidth=0)", "def get_circle_coords(center, r):\n circle = [[r, 180* phi/3.14159265] for phi in range(0, 180, 5)]\n circle = [pol2cart(p[0], p[1]) + (center[0], center[1]) for p in circle]\n return circle", "def DrawPoint(self, p, size, color):\r\n self.DrawCircle(p, size/self.zoom, color, drawwidth=0)", "def draw_ball(self):\n circle(screen, self.color, (self.x, self.y), self.r)", "def plotPoint(img, point, radius = 3, color = (0, 0, 255)):\n\tdrawCircle(img, point, radius = radius, color=color)", "def area_circle(r):\n return (r ** 2) * math.pi", "def draw_arc(self, color, position, size, start_angle, end_angle, border_width = 0, anchor = 'topleft'):\n color = spyral.color._determine(color)\n offset = self._calculate_offset(anchor, size)\n pygame.draw.arc(self._surf, color, (position + offset, size), start_angle, end_angle, border_width)", "def circle(x, r, a, b, x_lim):\n y = (b + np.sqrt(maximum(\n r ** 2 - ((x - a) ** 2) * (x >= x_lim[0]) * (x <= x_lim[1]))\n )) * (x >= x_lim[0]) * (x <= x_lim[1])\n return y", "def area_of_circle(r):\n a = r**2 * math.pi\n return a", "def area_of_circle(r):\n a = r**2 * math.pi\n return a", "def circle(center, perp_vect, radius, element_number=10):\n # tl = [0, 0.2, 0.4, 0.6, 0.8]\n tl = np.linspace(0, 1, element_number)\n\n # vector form center to edge of circle\n # u is a unit vector from the centre of the circle to any point on the\n # circumference\n\n # normalized perpendicular vector\n n = perp_vect / np.linalg.norm(perp_vect)\n\n # normalized vector from the centre to point on the circumference\n u = perpendicular_vector(n)\n u /= np.linalg.norm(u)\n\n pts = []\n\n for t in tl:\n # u = np.array([0, 1, 0])\n # n = np.array([1, 0, 0])\n pt = (\n radius * np.cos(t * 2 * np.pi) * u\n + radius * np.sin(t * 2 * np.pi) * np.cross(u, n)\n + center\n )\n\n pt = pt.tolist()\n pts.append(pt)\n\n return pts", "def wdraw_wcircle(self, wx, wy, wradius, fill, outline):\r\n x0, y0 = self.w_to_d(wx - wradius, wy - wradius)\r\n x1, y1 = self.w_to_d(wx + wradius, wy + wradius)\r\n self.canvas.create_oval(x0, y0, x1, y1, fill=fill, outline=outline)", "def plot(self, radius=None, **kwargs):\n self.plot_ellipse(radius, **kwargs)", "def addCircle(self, radius=5.0, value=1.0, cx=None, cy=None):\n self.fimage = None\n # Create a circle at the center.\n if cx == None:\n cx = self.nx/2.0\n if cy == None:\n cy = self.ny/2.0\n tmp = (self.xx - cx)**2 + (self.yy - cy)**2\n circle = numpy.where(tmp<=radius**2, value, 0)\n self.image += circle\n return", "def plot_circles(R,center,N=15):\r\n Ri = np.linspace(0,R,N+1)[1:]\r\n color = ['r--','b--','m--','y--']\r\n for i in range(N):\r\n xi, yi = generate_circle(Ri[i],center)\r\n j = i % 4\r\n plt.plot(xi,yi,color[j],alpha=0.3)", "def add_circle(self, r_center, c_center, radius, color=BLUE, image=np.full((640, 480, 3), BLACK)):\n circle = np.fromfunction(lambda r, c, _: (r - r_center) ** 2 + (c - c_center) ** 2 <= radius ** 2, image.shape)\n return np.where(circle, color, image)", "def draw(self):\n arcade.draw_circle_filled(self.position_x, self.position_y, self.radius,self.player_color)" ]
[ "0.8421582", "0.78745735", "0.78179467", "0.78179467", "0.7747963", "0.7729969", "0.77170885", "0.76944184", "0.7692904", "0.7636455", "0.7619516", "0.7604409", "0.74545527", "0.7423694", "0.7403186", "0.7349896", "0.73346525", "0.732114", "0.7312092", "0.7307844", "0.729076", "0.7290352", "0.72844625", "0.7284075", "0.72728354", "0.7221165", "0.7216204", "0.7195231", "0.7195231", "0.7181659", "0.71521765", "0.71244246", "0.708436", "0.7078405", "0.7039284", "0.7035548", "0.702965", "0.7004061", "0.69909376", "0.6983138", "0.6960886", "0.6945871", "0.69409466", "0.6940926", "0.6932741", "0.69273525", "0.6921371", "0.69125366", "0.6872142", "0.6870323", "0.68648875", "0.6821605", "0.6815134", "0.6724997", "0.66993546", "0.66902596", "0.6680127", "0.6672582", "0.6667268", "0.6644943", "0.66429377", "0.6625822", "0.66076297", "0.6601977", "0.6587318", "0.65522164", "0.6550786", "0.6544724", "0.650177", "0.64980006", "0.6491681", "0.647883", "0.6466757", "0.6456242", "0.64379865", "0.6421471", "0.64185774", "0.6396344", "0.639634", "0.63222754", "0.6307072", "0.62949824", "0.62877464", "0.6272781", "0.62681216", "0.6262214", "0.6253395", "0.6239744", "0.6234695", "0.62302965", "0.62281024", "0.62101775", "0.62101775", "0.61946416", "0.61906487", "0.6184565", "0.61745375", "0.61710846", "0.61308295", "0.61285996" ]
0.8723447
0
Deletes the pen's drawings from the window. This method does not move the pen or alter its attributes.
def clear(self): self._turtle.clear()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _clear_drawing(self) -> None:\n self.vertices.clear()\n self.edges.clear()\n self.subplot.clear()\n self.selected_element = None\n self.pressed_elements.clear()", "def __del__(self):\n self._screen._removePen(self)\n del self._turtle", "def _removePen(self,pen):\n if pen in self._pencils:\n self._pencils.remove(pen)", "def remove_drawing_rect(self):\n self.drawing_rect = QPolygonF()\n if self.connecting_rect:\n self.connecting_rect.setVisible(False)\n self.connecting_rect = None\n self.first_draw = True", "def _clear_drawing(self) -> None:\n self.vertices.clear()\n self.edges.clear()\n self.subplot.clear()\n self.subplot2.clear()", "def remove_drawing_poly(self):\n\n self.drawing_poly = QPolygonF()\n self.drawing_points_coords = []\n\n for p in self.drawing_points:\n p.setVisible(False)\n\n for line in self.connecting_line_list:\n line.setVisible(False)\n if self.connecting_line:\n self.connecting_line.setVisible(False)\n self.connecting_line = None\n self.first_draw = True\n if self.set_tooltip:\n self.set_tooltip(\"\")", "def penup(self):\n if not self._drawing:\n return\n self.pen(pendown=False)", "def delwin(self):\n\t\tfor c in self.components:\n\t\t\tc.delwin()\n\t\tself.win = None", "def undraw(self):\n \n if not self.canvas: return\n if not self.canvas.isClosed():\n #self.canvas.delete(self.id)\n _tkExec(self.canvas.delete, self.id)\n if self.canvas.autoflush:\n #_root.update()\n _tkCall(_root.update)\n pass\n self.canvas = None\n self.id = None", "def clear_press(self):\n\n for win in self.window.additional_windows:\n win.del_win()\n\n pos = self.window.physics_canvas.physics_objects\n self.window.physics_canvas.physics_objects = []\n\n for obj in pos:\n self.window.physics_canvas.canvas.delete(obj.canvas_id)\n\n for force in self.window.physics_canvas.interacting_forces:\n force.remove()\n\n for particle in self.window.physics_canvas.particles:\n self.window.physics_canvas.canvas.delete(particle.canvas_id)", "def clear_selected_shapes(self):\n self.shapes_to_draw = []", "def clear_visualization(self) -> None:\n if self._drawing_handle is not None:\n sim.simAddDrawingObjectItem(self._drawing_handle, None)", "def on_draw(event):\n # First, we clear the window in white\n # (it is necessary to do that at every frame)\n gloo.set_clear_color((1.0, 1.0, 1.0, 1.0))\n gloo.clear()\n program.draw(\"line_strip\")", "def draw(self, *args, **kwargs):\n self.window.clear()\n self.batch.draw()", "def clear_drawn_objects(self, view_manager):\n view = view_manager.get_view()\n for item in self._drawnObjects:\n view.removeItem(item)\n # clear the list:\n self._drawnObjects = []", "def clear(self):\n self._plt.clear()\n self._layer_items = {}", "def updateDraw(self):\r\n self.delConns()\r\n self.delTags()\r\n self.drawConns()\r\n self.drawTags()", "def clear_scene(self, event):\n self.shapes = []\n self.redraw()", "def clearCanvas():\n global c, coordinates\n c.delete(\"all\")\n drawMusicLines()\n coordinates.clear()", "def _clear(self):\n self._fillitem = self._fillpath = None\n for item in self.items:\n self.screen._delete(item)\n self.currentLineItem = self.screen._createline()\n self.currentLine = []\n if self._drawing:\n self.currentLine.append(self._position)\n self.items = [self.currentLineItem]\n self.clearstamps()", "def clear_canvas():\n self.parent_class.canvas.delete(\"all\")", "def do_paint(self):\r\n curses.curs_set(0)\r\n if self.win:\r\n self.paint()\r\n self.done_paint()", "def remove(self) -> None:\n self.map.remove_brush(self)", "def __drawAndErase(self, boxToDraw, boxToErase=None):\n dc = wx.ClientDC(self.drawingSurface)\n dc.BeginDrawing()\n dc.SetPen(wx.Pen(wx.WHITE, 1, wx.DOT))\n dc.SetBrush(wx.TRANSPARENT_BRUSH)\n dc.SetLogicalFunction(wx.XOR)\n if boxToErase:\n r = wx.Rect(*boxToErase)\n dc.DrawRectangleRect(r)\n\n r = wx.Rect(*boxToDraw)\n dc.DrawRectangleRect(r)\n dc.EndDrawing()", "def _clear(self, event):\n if self.ignore(event) or self._changed_canvas():\n return\n self._background = self.canvas.copy_from_bbox(self.ax.bbox)\n self.ax.draw_artist(self._buttons)\n if hasattr(self, \"_circles\"):\n for circle in self._circles:\n self.ax.draw_artist(circle)", "def delete_current_shape(self):\n print(\"deleting shape!\")\n self.shapes.remove(self.current_shape)\n self.current_shape = None\n self.changed()", "def erase_plot(self, line_position=0):\n self.axplot.lines.pop(line_position).remove\n self.fig.canvas.draw()\n return", "def cleanup(self):\r\n\r\n # Remove strip from window.\r", "def delete_selection(self):\n if self.selected_point_index is not None:\n del self.current_shape[self.selected_point_index]\n self.selected_point_index = None\n self.changed()", "def removePick(self):\n self.pnt = None\n vtkRenWin.delMarker(self.renWin)", "def OnRemoveAutomation(self, event, automation):\n\n self.app.RemoveAutomation(automation)\n for child in self.GetChildren():\n child.Destroy()\n\n self.Draw()", "def reset(self):\n TNavigator.reset(self)\n TPen._reset(self)\n self._clear()\n self._drawturtle()\n self._update()", "def remove_stroke(settings):\r\n if settings.fillstyle == 'border':\r\n settings.fillstyle = 'none'\r\n elif settings.fillstyle == 'fill+border':\r\n settings.fg = settings.bg\r\n settings.fillstyle = 'fill'", "def clear_strip(self):\r\n wlogger.log_info(\"Clear Strip\")\r\n for led in range(self.num_led):\r\n self.set_pixel(led, 0, 0, 0)\r\n self.show()", "def pen(self, pen=None, **pendict):\n _pd = {\"shown\" : self._shown,\n \"pendown\" : self._drawing,\n \"pencolor\" : self._pencolor,\n \"fillcolor\" : self._fillcolor,\n \"pensize\" : self._pensize,\n \"speed\" : self._speed,\n \"resizemode\" : self._resizemode,\n \"stretchfactor\" : self._stretchfactor,\n \"outline\" : self._outlinewidth,\n \"tilt\" : self._tilt\n }\n\n if not (pen or pendict):\n return _pd\n\n if isinstance(pen, dict):\n p = pen\n else:\n p = {}\n p.update(pendict)\n\n _p_buf = {}\n for key in p:\n _p_buf[key] = _pd[key]\n\n if self.undobuffer:\n self.undobuffer.push((\"pen\", _p_buf))\n\n newLine = False\n if \"pendown\" in p:\n if self._drawing != p[\"pendown\"]:\n newLine = True\n if \"pencolor\" in p:\n if isinstance(p[\"pencolor\"], tuple):\n p[\"pencolor\"] = self._colorstr((p[\"pencolor\"],))\n if self._pencolor != p[\"pencolor\"]:\n newLine = True\n if \"pensize\" in p:\n if self._pensize != p[\"pensize\"]:\n newLine = True\n if newLine:\n self._newLine()\n if \"pendown\" in p:\n self._drawing = p[\"pendown\"]\n if \"pencolor\" in p:\n self._pencolor = p[\"pencolor\"]\n if \"pensize\" in p:\n self._pensize = p[\"pensize\"]\n if \"fillcolor\" in p:\n if isinstance(p[\"fillcolor\"], tuple):\n p[\"fillcolor\"] = self._colorstr((p[\"fillcolor\"],))\n self._fillcolor = p[\"fillcolor\"]\n if \"speed\" in p:\n self._speed = p[\"speed\"]\n if \"resizemode\" in p:\n self._resizemode = p[\"resizemode\"]\n if \"stretchfactor\" in p:\n sf = p[\"stretchfactor\"]\n if isinstance(sf, (int, float)):\n sf = (sf, sf)\n self._stretchfactor = sf\n # if \"shearfactor\" in p:\n # self._shearfactor = p[\"shearfactor\"]\n if \"outline\" in p:\n self._outlinewidth = p[\"outline\"]\n if \"shown\" in p:\n self._shown = p[\"shown\"]\n if \"tilt\" in p:\n self._tilt = p[\"tilt\"]\n \n self._update()", "def clear(self):\n self.canvas = [[self.style] * self.cols for _ in range(self.lines)]", "def _clear(self, event):\n if self.ignore(event) or self._changed_canvas():\n return\n self._background = self.canvas.copy_from_bbox(self.ax.bbox)\n self.ax.draw_artist(self._checks)\n if hasattr(self, '_lines'):\n for l1, l2 in self._lines:\n self.ax.draw_artist(l1)\n self.ax.draw_artist(l2)", "def remove_canvas(self,):\r\n # reset plot view beofre change\r\n self.canvas.toolbar.home()\r\n # remove widgets from canvas_vlayout\r\n self.canvas_vlayout.removeWidget(self.toolbar)\r\n self.toolbar.close()\r\n self.canvas_vlayout.removeWidget(self.canvas)\r\n self.canvas.close()", "def erase(self):\n output = Output(self.stdout)\n\n output.cursor_backward(self._cursor_pos.x)\n output.cursor_up(self._cursor_pos.y)\n output.erase_down()\n output.reset_attributes()\n output.flush()\n\n self.reset()", "def clear(self):\n self.animation.stop()\n self.draw(0, 0, 0, 0, 0)", "def stopLineDrawing(self):\n taskMgr.remove(\"drawLineTask\")\n if self.line is not None:\n self.line.reset()\n self.line = None", "def remove_from_drawn(section: str, index: int):\r\n del drawn[section][index]", "def erase(self):\r\n self.in_arrow = None\r\n self.out_arrow = None", "def erase(self):\n self.view.erase_status('00_git_gutter')", "def clear(self):\n for i in range(len(self.canvas)):\n self.canvas[i] = 0", "def draw(self, renderer):\n renderer.drawRect(pyui.colors.black, self.windowRect)\n renderer.drawText( \"Strokes: %d\" % len(self.strokes), (650,50), pyui.colors.white)\n for start, end, color in self.strokes:\n renderer.drawLine(start[0], start[1], end[0], end[1], color)", "def clear(self, event):\r\n self.selectedRegion = None\r\n self.paint()", "def invalidate_canvas(self):\n\n if self.window:\n x, y, w, h = self.get_allocation()\n self.window.invalidate_rect((0,0,w,h), False)\n self.cr = self.window.cairo_create()\n self.cr.update_layout(self.pg)", "def clear(self):\n self._delayvalue = _CFG[\"delay\"]\n self._colormode = _CFG[\"colormode\"]\n self._delete(\"all\")\n self._bgpic = self._createimage(\"\")\n self._bgpicname = \"nopic\"\n self._tracing = 1\n self._updatecounter = 0\n self._turtles = []\n self.bgcolor(\"white\")\n for btn in 1, 2, 3:\n self.onclick(None, btn)\n self.onkeypress(None)\n for key in self._keys[:]:\n self.onkey(None, key)\n self.onkeypress(None, key)\n Myturtle._pen = None", "def clearScreen():\n dislin.erase()", "def delete(self):\n subprocess.run([\"axicli\", \"--mode\", \"manual\", \"-M\", \"enable_xy\"])\n subprocess.run([\"axicli\", \"--mode\", \"manual\", \"-M\", \"raise_pen\"])\n subprocess.run([\"axicli\", \"--mode\", \"manual\", \"-M\", \"disable_xy\"])\n\n return self.get()", "def clear_main(self):\n\n if self.terminate:\n return\n\n self.windows['MAIN'].erase()\n self.windows['MAIN'].border(' ', ' ',\n curses.ACS_HLINE, curses.ACS_HLINE,\n curses.ACS_HLINE, curses.ACS_HLINE,\n curses.ACS_HLINE, curses.ACS_HLINE)", "def clear(self):\n lines = self._lines\n image, bkg_image = self.image, self._image\n for line in lines: line.clear(image, bkg_image) #prej bkg_img\n self._cursor = 0", "def clear(self):\n self._frame.clear()\n self._turtles = []\n self._gpens = []", "def clear(self):\n for key in self.__columns:\n self.__widths[key] = 0\n self.__data = []\n self.__selectedRow = -1\n self.__formatString = \"\"\n self._window.clear()\n self.drawBorder()", "def wipe(self, segments):\n self.firstCoords = None\n self.moveLead(MIDDLE, MIDDLE)\n for seg in self.segs:\n self.can.delete(seg.getGraphicObject())\n seg.rmGraphicObject()\n self.segs = segments\n self.redrawSegs()", "def clear(self):\n self.pointscontroller.pop(self.currentlyadded)", "def pendown(self):\n if self._drawing:\n return\n self.pen(pendown=True)", "def on_draw(self):\n self.clear()\n self.manager.draw()", "def erase(self):\n\tself.state={}\n\tself.display(update_board=0)", "def __del__(self):\n self.clear()\n self._screen._removeTurtle(self)\n del self._turtle", "def draw(self):\n self.strip.show()", "def on_delete():\r\n del win.box[-1] # delete last line\r\n #del win.box[0:-1] # delete all lines \r", "def _destroy(self):\n root = self._root\n turtle.Turtle._pen = None\n turtle.Turtle._screen = None\n self._root = None\n self._canvas = None\n turtle.TurtleScreen._RUNNING = True\n root.destroy()", "def __editDelete(self):\n if QApplication.focusWidget() == e5App().getObject(\"Shell\"):\n e5App().getObject(\"Shell\").clear()\n else:\n self.activeWindow().clear()", "def DeleteWindow(self):\r\n\r\n if self._wnd:\r\n self._wnd.Destroy()\r\n self._wnd = None", "def removeScene(self):\n del self.scene, self.imgPixmapItem", "def on_draw(self):\n self.clear()\n self.gamestatemanager.peek().on_draw(self.get_size())", "def onChartRemoveSeries(self):\n self.chart().removeAllSeries()\n self.series = {}\n self.yaxis = {}\n self.pen = {}\n self.ymin = {}\n self.ymax = {}", "def _clear_window(self):\n self.buf[:] = []", "def mouseDubbleClicked(self,mouseEvent):\n\t\tself.canvas.nodeDelete(self)", "def undoChanges(self):\n Objects.undoChanges(self)\n self.draw()", "def delete_drawing(self, drawing_id=None):\n self.get_drawings()\n\n _drawing = self.get_drawing(drawing_id=drawing_id)\n if not _drawing:\n raise ValueError(\"drawing not found\")\n\n _url = (\n f\"{self.connector.base_url}/projects/{self.project_id}/drawings/\"\n f\"{_drawing['drawing_id']}\"\n )\n\n self.connector.http_call(\"delete\", _url)\n\n self.get_drawings()", "def on_draw():\n window.clear()\n world.draw()", "def __del__(self):\n try:\n self._frame._destroy()\n except:\n pass\n self._turtles = []\n self._pencils = []\n del self._frame", "def paint(self):\r\n self.canvas.delete(tkinter.ALL)\r\n self.visit(self.tree.root)", "def erase(self, x, y):\n self.console.draw_char(x, y, ' ', bg=None)", "def OnEraseBackground(self, event):\r\n \r\n if wx.Platform == \"__WXMAC__\":\r\n event.Skip()", "def clear(self):\n black = neo.Color(0,0,0)\n self.set_all(black)\n self.draw()", "def pointer_clear_focus(self) -> None:\n\n return lib.wlr_seat_pointer_clear_focus(self._ptr)", "def clear(self):\n self._x_prev = None\n self._y_prev = None", "def setDrawing(self):\n self.graph_drawing=[]", "def delConns(self):\r\n for line in self.connLines:\r\n self.canvasCirkt.delete(line)\r\n self.canvasCirkt.update()", "def clear_screen(self):\r\n lst_grid = self.root.grid_slaves()\r\n for widget in lst_grid:\r\n widget.destroy()\r\n lst_pack = self.root.pack_slaves()\r\n for widget in lst_pack:\r\n widget.destroy()", "def delete_ball(self):\r\n self.movement = \"\"\r\n self.canvas.delete(self.ball)", "def del_points(self):\r\n del self._points", "def clear(self):\n pygame.draw.rect(self.screen,BLACK,(0,0,WINDOWWIDTH,\n WINDOWHEIGHT))\n pygame.display.update()", "def destroy (self, *attrs):\n for attr in ('rect', 'draw_fn') + attrs:\n try:\n delattr(self, attr)\n except AttributeError:\n pass", "def bye(self):\n self._frame._destroy()\n self._turtles = []\n self._gpens = []\n del self._frame", "def discard(self) -> None:\n\n self.plot.close()", "def _on_key_press(self, event):\n # Remove the pending vertex if entering the 'move_vertex' or\n # 'move_all' mode\n if (not self._selection_completed\n and ('move_vertex' in self._state or\n 'move_all' in self._state)):\n self._xys.pop()\n self._draw_polygon()", "def EndDraw(self):\r\n\r\n pass", "def clear(self):\n self.clear_markers()\n self.l_marker.remove()\n self.l_line.remove()\n self.r_marker.remove()\n self.r_line.remove()", "def subtract(self):\n self.parent.copyCurrentWinState(self.pltw)\n self.pltw.blklst[self.blkno][self.ypos] = self.data[1] - self.data[2]\n self.pltw.updatePlot()\n self.pltw.dirty = True\n self.pltw.activecurv = self.cpos\n self.parent.updateUI()\n self.hide()", "def _onRemove(self, event):\n index = self.colorlist.GetSelection()\n del self.graphColors[index]\n self._tupleListToStrings()\n if len(self.graphColors) > 0:\n self.colorlist.SetSelection(0)\n self._updateButtons(None)", "def reset_window(self):\n self.sorting = False\n self.sort_list = []\n self.window.delete('all')\n for i in range(100):\n random_height = randint(40,280)\n line_id = self.window.create_line(4*i+50, 20, 4*i+50, random_height)\n self.sort_list.append([random_height, line_id])\n self.window.update()", "def cog_unload(self):\n self._get_sketch_prompt.cancel()", "def delX(self):\n del self.components[0]", "def delX(self):\n del self.components[0]" ]
[ "0.67064834", "0.66925335", "0.6634227", "0.6408168", "0.63029826", "0.62384754", "0.6211156", "0.618159", "0.61493623", "0.61185306", "0.60864323", "0.59476477", "0.5929833", "0.5870426", "0.5830595", "0.5802855", "0.574537", "0.5735789", "0.5705624", "0.56923753", "0.5638945", "0.5637188", "0.5632267", "0.55991113", "0.5595818", "0.55513656", "0.5512727", "0.55115587", "0.55036193", "0.548847", "0.54802316", "0.5452932", "0.5437548", "0.54285014", "0.5424866", "0.541609", "0.5414299", "0.53985626", "0.53918815", "0.5391154", "0.53899235", "0.5388347", "0.53851277", "0.5376225", "0.5372281", "0.53651786", "0.5361072", "0.53536254", "0.53422976", "0.53408766", "0.5332249", "0.53314114", "0.5331235", "0.5306754", "0.52921534", "0.52853304", "0.52834946", "0.5281549", "0.52517635", "0.52466184", "0.5243629", "0.5233662", "0.5226164", "0.5216753", "0.5216128", "0.521466", "0.52128315", "0.52112615", "0.5192969", "0.51923174", "0.51849574", "0.5180572", "0.51802975", "0.5176419", "0.5171204", "0.5166149", "0.51644003", "0.5160659", "0.51577795", "0.5157663", "0.5149189", "0.514149", "0.512708", "0.5123001", "0.5121601", "0.51203173", "0.5119461", "0.51167285", "0.5108353", "0.5106301", "0.51058394", "0.5104026", "0.51039356", "0.510282", "0.5099985", "0.5088246", "0.50809145", "0.50740135", "0.50740135" ]
0.5612297
23
Deletes the pen's drawings from the window. This method recenters the pen and resets all attributes to their default values.
def reset(self): self._turtle.clear() self._turtle.setposition((0,0)) try: self._turtle.shape('pen.gif') except: self._turtle.shape('classic') self._turtle.color('red') self.speed = 0 #pair = self._turtle.color() self._pencolor = self._turtle.color()[0] self._fillcolor = self._turtle.color()[0]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _clear_drawing(self) -> None:\n self.vertices.clear()\n self.edges.clear()\n self.subplot.clear()\n self.selected_element = None\n self.pressed_elements.clear()", "def __del__(self):\n self._screen._removePen(self)\n del self._turtle", "def _clear_drawing(self) -> None:\n self.vertices.clear()\n self.edges.clear()\n self.subplot.clear()\n self.subplot2.clear()", "def remove_drawing_rect(self):\n self.drawing_rect = QPolygonF()\n if self.connecting_rect:\n self.connecting_rect.setVisible(False)\n self.connecting_rect = None\n self.first_draw = True", "def _removePen(self,pen):\n if pen in self._pencils:\n self._pencils.remove(pen)", "def clear_selected_shapes(self):\n self.shapes_to_draw = []", "def undraw(self):\n \n if not self.canvas: return\n if not self.canvas.isClosed():\n #self.canvas.delete(self.id)\n _tkExec(self.canvas.delete, self.id)\n if self.canvas.autoflush:\n #_root.update()\n _tkCall(_root.update)\n pass\n self.canvas = None\n self.id = None", "def reset(self):\n TNavigator.reset(self)\n TPen._reset(self)\n self._clear()\n self._drawturtle()\n self._update()", "def clear(self):\n self._plt.clear()\n self._layer_items = {}", "def remove_drawing_poly(self):\n\n self.drawing_poly = QPolygonF()\n self.drawing_points_coords = []\n\n for p in self.drawing_points:\n p.setVisible(False)\n\n for line in self.connecting_line_list:\n line.setVisible(False)\n if self.connecting_line:\n self.connecting_line.setVisible(False)\n self.connecting_line = None\n self.first_draw = True\n if self.set_tooltip:\n self.set_tooltip(\"\")", "def clear_visualization(self) -> None:\n if self._drawing_handle is not None:\n sim.simAddDrawingObjectItem(self._drawing_handle, None)", "def penup(self):\n if not self._drawing:\n return\n self.pen(pendown=False)", "def _clear(self):\n self._fillitem = self._fillpath = None\n for item in self.items:\n self.screen._delete(item)\n self.currentLineItem = self.screen._createline()\n self.currentLine = []\n if self._drawing:\n self.currentLine.append(self._position)\n self.items = [self.currentLineItem]\n self.clearstamps()", "def clear_press(self):\n\n for win in self.window.additional_windows:\n win.del_win()\n\n pos = self.window.physics_canvas.physics_objects\n self.window.physics_canvas.physics_objects = []\n\n for obj in pos:\n self.window.physics_canvas.canvas.delete(obj.canvas_id)\n\n for force in self.window.physics_canvas.interacting_forces:\n force.remove()\n\n for particle in self.window.physics_canvas.particles:\n self.window.physics_canvas.canvas.delete(particle.canvas_id)", "def delwin(self):\n\t\tfor c in self.components:\n\t\t\tc.delwin()\n\t\tself.win = None", "def clear(self):\n self._turtle.clear()", "def clear(self):\n self._turtle.clear()", "def updateDraw(self):\r\n self.delConns()\r\n self.delTags()\r\n self.drawConns()\r\n self.drawTags()", "def remove_stroke(settings):\r\n if settings.fillstyle == 'border':\r\n settings.fillstyle = 'none'\r\n elif settings.fillstyle == 'fill+border':\r\n settings.fg = settings.bg\r\n settings.fillstyle = 'fill'", "def on_draw(event):\n # First, we clear the window in white\n # (it is necessary to do that at every frame)\n gloo.set_clear_color((1.0, 1.0, 1.0, 1.0))\n gloo.clear()\n program.draw(\"line_strip\")", "def clearCanvas():\n global c, coordinates\n c.delete(\"all\")\n drawMusicLines()\n coordinates.clear()", "def clear_canvas():\n self.parent_class.canvas.delete(\"all\")", "def clear(self):\n self._delayvalue = _CFG[\"delay\"]\n self._colormode = _CFG[\"colormode\"]\n self._delete(\"all\")\n self._bgpic = self._createimage(\"\")\n self._bgpicname = \"nopic\"\n self._tracing = 1\n self._updatecounter = 0\n self._turtles = []\n self.bgcolor(\"white\")\n for btn in 1, 2, 3:\n self.onclick(None, btn)\n self.onkeypress(None)\n for key in self._keys[:]:\n self.onkey(None, key)\n self.onkeypress(None, key)\n Myturtle._pen = None", "def clear_drawn_objects(self, view_manager):\n view = view_manager.get_view()\n for item in self._drawnObjects:\n view.removeItem(item)\n # clear the list:\n self._drawnObjects = []", "def draw(self, *args, **kwargs):\n self.window.clear()\n self.batch.draw()", "def clear_scene(self, event):\n self.shapes = []\n self.redraw()", "def _clear(self, event):\n if self.ignore(event) or self._changed_canvas():\n return\n self._background = self.canvas.copy_from_bbox(self.ax.bbox)\n self.ax.draw_artist(self._buttons)\n if hasattr(self, \"_circles\"):\n for circle in self._circles:\n self.ax.draw_artist(circle)", "def clear(self):\n self.canvas = [[self.style] * self.cols for _ in range(self.lines)]", "def clear(self):\n for i in range(len(self.canvas)):\n self.canvas[i] = 0", "def clear(self):\n for key in self.__columns:\n self.__widths[key] = 0\n self.__data = []\n self.__selectedRow = -1\n self.__formatString = \"\"\n self._window.clear()\n self.drawBorder()", "def removePick(self):\n self.pnt = None\n vtkRenWin.delMarker(self.renWin)", "def erase(self):\r\n self.in_arrow = None\r\n self.out_arrow = None", "def do_paint(self):\r\n curses.curs_set(0)\r\n if self.win:\r\n self.paint()\r\n self.done_paint()", "def clear(self):\n self._frame.clear()\n self._turtles = []\n self._gpens = []", "def clear(self):\n self._x_prev = None\n self._y_prev = None", "def cleanup(self):\r\n\r\n # Remove strip from window.\r", "def clear_strip(self):\r\n wlogger.log_info(\"Clear Strip\")\r\n for led in range(self.num_led):\r\n self.set_pixel(led, 0, 0, 0)\r\n self.show()", "def clear(self):\n lines = self._lines\n image, bkg_image = self.image, self._image\n for line in lines: line.clear(image, bkg_image) #prej bkg_img\n self._cursor = 0", "def clear(self):\n black = neo.Color(0,0,0)\n self.set_all(black)\n self.draw()", "def remove(self) -> None:\n self.map.remove_brush(self)", "def pen(self, pen=None, **pendict):\n _pd = {\"shown\" : self._shown,\n \"pendown\" : self._drawing,\n \"pencolor\" : self._pencolor,\n \"fillcolor\" : self._fillcolor,\n \"pensize\" : self._pensize,\n \"speed\" : self._speed,\n \"resizemode\" : self._resizemode,\n \"stretchfactor\" : self._stretchfactor,\n \"outline\" : self._outlinewidth,\n \"tilt\" : self._tilt\n }\n\n if not (pen or pendict):\n return _pd\n\n if isinstance(pen, dict):\n p = pen\n else:\n p = {}\n p.update(pendict)\n\n _p_buf = {}\n for key in p:\n _p_buf[key] = _pd[key]\n\n if self.undobuffer:\n self.undobuffer.push((\"pen\", _p_buf))\n\n newLine = False\n if \"pendown\" in p:\n if self._drawing != p[\"pendown\"]:\n newLine = True\n if \"pencolor\" in p:\n if isinstance(p[\"pencolor\"], tuple):\n p[\"pencolor\"] = self._colorstr((p[\"pencolor\"],))\n if self._pencolor != p[\"pencolor\"]:\n newLine = True\n if \"pensize\" in p:\n if self._pensize != p[\"pensize\"]:\n newLine = True\n if newLine:\n self._newLine()\n if \"pendown\" in p:\n self._drawing = p[\"pendown\"]\n if \"pencolor\" in p:\n self._pencolor = p[\"pencolor\"]\n if \"pensize\" in p:\n self._pensize = p[\"pensize\"]\n if \"fillcolor\" in p:\n if isinstance(p[\"fillcolor\"], tuple):\n p[\"fillcolor\"] = self._colorstr((p[\"fillcolor\"],))\n self._fillcolor = p[\"fillcolor\"]\n if \"speed\" in p:\n self._speed = p[\"speed\"]\n if \"resizemode\" in p:\n self._resizemode = p[\"resizemode\"]\n if \"stretchfactor\" in p:\n sf = p[\"stretchfactor\"]\n if isinstance(sf, (int, float)):\n sf = (sf, sf)\n self._stretchfactor = sf\n # if \"shearfactor\" in p:\n # self._shearfactor = p[\"shearfactor\"]\n if \"outline\" in p:\n self._outlinewidth = p[\"outline\"]\n if \"shown\" in p:\n self._shown = p[\"shown\"]\n if \"tilt\" in p:\n self._tilt = p[\"tilt\"]\n \n self._update()", "def erase(self):\n\tself.state={}\n\tself.display(update_board=0)", "def clear(self):\n self.animation.stop()\n self.draw(0, 0, 0, 0, 0)", "def undoChanges(self):\n Objects.undoChanges(self)\n self.draw()", "def clearScreen():\n dislin.erase()", "def _clear(self, event):\n if self.ignore(event) or self._changed_canvas():\n return\n self._background = self.canvas.copy_from_bbox(self.ax.bbox)\n self.ax.draw_artist(self._checks)\n if hasattr(self, '_lines'):\n for l1, l2 in self._lines:\n self.ax.draw_artist(l1)\n self.ax.draw_artist(l2)", "def erase(self):\n output = Output(self.stdout)\n\n output.cursor_backward(self._cursor_pos.x)\n output.cursor_up(self._cursor_pos.y)\n output.erase_down()\n output.reset_attributes()\n output.flush()\n\n self.reset()", "def clear(self, event):\r\n self.selectedRegion = None\r\n self.paint()", "def clear_main(self):\n\n if self.terminate:\n return\n\n self.windows['MAIN'].erase()\n self.windows['MAIN'].border(' ', ' ',\n curses.ACS_HLINE, curses.ACS_HLINE,\n curses.ACS_HLINE, curses.ACS_HLINE,\n curses.ACS_HLINE, curses.ACS_HLINE)", "def _destroy(self):\n root = self._root\n turtle.Turtle._pen = None\n turtle.Turtle._screen = None\n self._root = None\n self._canvas = None\n turtle.TurtleScreen._RUNNING = True\n root.destroy()", "def erase(self):\n self.view.erase_status('00_git_gutter')", "def _clear_window(self):\n self.buf[:] = []", "def clear(self):\n if self.flag == 0:\n for coord in INDICES:\n self.kill(coord)\n self.chart[coord] = DEAD", "def reset(self):\n for i in range(self.shapeRow):\n for j in range(self.shapeColumn):\n self.buttons[i][j].setText(\" \")", "def delete_current_shape(self):\n print(\"deleting shape!\")\n self.shapes.remove(self.current_shape)\n self.current_shape = None\n self.changed()", "def delete(self):\n subprocess.run([\"axicli\", \"--mode\", \"manual\", \"-M\", \"enable_xy\"])\n subprocess.run([\"axicli\", \"--mode\", \"manual\", \"-M\", \"raise_pen\"])\n subprocess.run([\"axicli\", \"--mode\", \"manual\", \"-M\", \"disable_xy\"])\n\n return self.get()", "def reset(self):\n self.x_pos = 10\n self.y_pos = 10\n self.line_height = 15", "def cog_unload(self):\n self._get_sketch_prompt.cancel()", "def clear(self):\n self._fig = go.Figure()", "def erase_plot(self, line_position=0):\n self.axplot.lines.pop(line_position).remove\n self.fig.canvas.draw()\n return", "def stopLineDrawing(self):\n taskMgr.remove(\"drawLineTask\")\n if self.line is not None:\n self.line.reset()\n self.line = None", "def clear(self):\n self.pointscontroller.pop(self.currentlyadded)", "def on_draw(self):\n self.clear()\n self.manager.draw()", "def onChartRemoveSeries(self):\n self.chart().removeAllSeries()\n self.series = {}\n self.yaxis = {}\n self.pen = {}\n self.ymin = {}\n self.ymax = {}", "def __del__(self):\n self.clear()\n self._screen._removeTurtle(self)\n del self._turtle", "def __drawAndErase(self, boxToDraw, boxToErase=None):\n dc = wx.ClientDC(self.drawingSurface)\n dc.BeginDrawing()\n dc.SetPen(wx.Pen(wx.WHITE, 1, wx.DOT))\n dc.SetBrush(wx.TRANSPARENT_BRUSH)\n dc.SetLogicalFunction(wx.XOR)\n if boxToErase:\n r = wx.Rect(*boxToErase)\n dc.DrawRectangleRect(r)\n\n r = wx.Rect(*boxToDraw)\n dc.DrawRectangleRect(r)\n dc.EndDrawing()", "def remove_canvas(self,):\r\n # reset plot view beofre change\r\n self.canvas.toolbar.home()\r\n # remove widgets from canvas_vlayout\r\n self.canvas_vlayout.removeWidget(self.toolbar)\r\n self.toolbar.close()\r\n self.canvas_vlayout.removeWidget(self.canvas)\r\n self.canvas.close()", "def clickClearReferences(self, event):\n self.whiteReference = None\n self.lightBtn.color = '0.85'\n self.darkReference = None\n self.darkBtn.color = '0.85'\n plt.pause(0.3)\n self.axes.autoscale_view()", "def invalidate_canvas(self):\n\n if self.window:\n x, y, w, h = self.get_allocation()\n self.window.invalidate_rect((0,0,w,h), False)\n self.cr = self.window.cairo_create()\n self.cr.update_layout(self.pg)", "def reset_window(self):\n self.sorting = False\n self.sort_list = []\n self.window.delete('all')\n for i in range(100):\n random_height = randint(40,280)\n line_id = self.window.create_line(4*i+50, 20, 4*i+50, random_height)\n self.sort_list.append([random_height, line_id])\n self.window.update()", "def setDrawing(self):\n self.graph_drawing=[]", "def delete_selection(self):\n if self.selected_point_index is not None:\n del self.current_shape[self.selected_point_index]\n self.selected_point_index = None\n self.changed()", "def clear(self):\n self.clear_markers()\n self.l_marker.remove()\n self.l_line.remove()\n self.r_marker.remove()\n self.r_line.remove()", "def clear(self):\n try:\n # This causes stupid errors with tkagg, so just wrap it in\n # try-except for now\n self.fig.clear()\n except: pass\n self.annotators.clear()\n self.dims.clear()\n self.ph.remove(self.ID)", "def reset(self):\n\n self.fig.clear()\n self.ax = self.fig.add_subplot(111)\n self.hasLegend.set(False)\n self.title(Graph.default_title)\n # Lines is a list of DataSet objects. The user should take care to make\n # DataSet names unique, as there is no error checking done by Graph. \n # If a DataSet line is deleted by its formal name, Graph will delete the\n # first line in the list that matches the name.\n self.lines = {}\n self.line_counter = 1", "def pointer_clear_focus(self) -> None:\n\n return lib.wlr_seat_pointer_clear_focus(self._ptr)", "def _reset_blender(self):\n\n # restore factory settings\n #bpy.ops.wm.read_factory_settings()\n for scene in bpy.data.scenes:\n for obj in scene.objects:\n scene.objects.unlink(obj)\n\n # consider only the objects in the default scene\n data = [\n bpy.data.objects,\n bpy.data.meshes,\n bpy.data.lamps,\n bpy.data.cameras\n ]\n for bpy_data_iter in data:\n for id_data in bpy_data_iter:\n bpy_data_iter.remove(id_data)", "def __editDelete(self):\n if QApplication.focusWidget() == e5App().getObject(\"Shell\"):\n e5App().getObject(\"Shell\").clear()\n else:\n self.activeWindow().clear()", "def clear(self) -> None:\n\n self.screen.fill(self.bg)", "def unbind(self, *args, **kwargs):\n self._canvas.unbind(*args, **kwargs)", "def clear(self):\n self.img = PIL.Image.new(self.img.mode, self.img.size, self.background)\n self.drawer = aggdraw.Draw(self.img)", "def destroy (self, *attrs):\n for attr in ('rect', 'draw_fn') + attrs:\n try:\n delattr(self, attr)\n except AttributeError:\n pass", "def clear_sclasses(self, w: Wrapper) -> None:\n w.setProperty(self.style_sclass_property, '')", "def OnRemoveAutomation(self, event, automation):\n\n self.app.RemoveAutomation(automation)\n for child in self.GetChildren():\n child.Destroy()\n\n self.Draw()", "def clear(self):\n pygame.draw.rect(self.screen,BLACK,(0,0,WINDOWWIDTH,\n WINDOWHEIGHT))\n pygame.display.update()", "def clean_canvas(self):\n self.canvas.fill(self.colorPalette.black)\n self.background.to_canvas(canvas=self.canvas)", "def __del__(self):\n try:\n self._frame._destroy()\n except:\n pass\n self._turtles = []\n self._pencils = []\n del self._frame", "def reset(self):\n\n self.__styles = list();", "def discard(self) -> None:\n\n self.plot.close()", "def clear_figure(self):\n self.figure.clf()", "def resetGraph(self):\n self.colours = [self.uncompletedColor] * self.num_points\n self.setData(pos=self.pos, symbolBrush=self.colours, size=1, symbol=self.symbols, pxMode=False, text=self.text)", "def on_draw(self):\n self.clear()\n self.gamestatemanager.peek().on_draw(self.get_size())", "def setPen(self, *args, **kwargs):\n if kwargs == {} and (args == () or args == ('default',)):\n self.opts['pen'] = fn.mkPen(getConfigOption('foreground'))\n else:\n self.opts['pen'] = fn.mkPen(*args, **kwargs)\n\n self.picture = None\n self.update()", "def __del__(self):\n pyplot.clf()", "def pendown(self):\n if self._drawing:\n return\n self.pen(pendown=True)", "def clear_screen(self):\r\n lst_grid = self.root.grid_slaves()\r\n for widget in lst_grid:\r\n widget.destroy()\r\n lst_pack = self.root.pack_slaves()\r\n for widget in lst_pack:\r\n widget.destroy()", "def reset(self):\n if hasattr(self, \"W\"):\n del self.W\n if hasattr(self, \"T\"):\n del self.T\n if hasattr(self, \"P\"):\n del self.P", "def bye(self):\n self._frame._destroy()\n self._turtles = []\n self._gpens = []\n del self._frame", "def SetConnectionPen(self, pen):\r\n\r\n self._dottedPen = pen\r\n self._dirty = True", "def clear(self):\n self._plots[:] = []" ]
[ "0.7000005", "0.68685716", "0.6522826", "0.6467621", "0.64485425", "0.63026386", "0.62595445", "0.62351507", "0.6223138", "0.6192789", "0.6185075", "0.61828935", "0.6163207", "0.61281633", "0.6050944", "0.6014234", "0.6014234", "0.5996016", "0.5904447", "0.58800614", "0.5878777", "0.58523965", "0.5845814", "0.58435595", "0.58068025", "0.5802281", "0.5784232", "0.57422704", "0.57364887", "0.5729464", "0.5707054", "0.5706607", "0.57029027", "0.5690659", "0.568269", "0.566976", "0.5657437", "0.5652461", "0.5639834", "0.5631572", "0.56198204", "0.5619394", "0.56043816", "0.5604085", "0.559373", "0.5584306", "0.55768657", "0.55659425", "0.55602866", "0.55472577", "0.554703", "0.5518912", "0.5518776", "0.5513556", "0.5506266", "0.5488349", "0.5483889", "0.5481545", "0.5473712", "0.5473144", "0.54707676", "0.54690605", "0.5466322", "0.545967", "0.54587847", "0.5456892", "0.5454956", "0.54536366", "0.5450382", "0.5444547", "0.5442313", "0.5439702", "0.5431518", "0.54071134", "0.54037035", "0.5402395", "0.5375589", "0.537227", "0.53559715", "0.5354167", "0.5351976", "0.5346996", "0.53452486", "0.53402853", "0.53401417", "0.53392065", "0.5334748", "0.5334277", "0.53292465", "0.5326557", "0.5323627", "0.5315643", "0.5313231", "0.5302926", "0.5297666", "0.5292639", "0.52817947", "0.52719194", "0.5270967", "0.52617043" ]
0.5666995
36
Fills in the current drawing, but retains state. Normally, an object is not filled until you set the state to False. Calling this method executes this fill, without setting the state to False. If fill is False, this method does nothing.
def flush(self): if self.fill: self._turtle.fill(False) self._turtle.fill(True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def setFilled(self, fill):\n isFilled = fill\n repaint()", "def fill(self):\n return self._turtle.fill()", "def update_fill(self, event):\r\n\r\n if event.type == 'FILL':\r\n self.update_positions_from_fill(event)\r\n self.update_holdings_from_fill(event)", "def update_fill(self, event):\n if event.type == 'FILL':\n self.update_positions_from_fill(event)\n self.update_holdings_from_fill(event)", "def update_fill(self, event):\n if event.type == 'FILL':\n self.update_positions_from_fill(event)\n self.update_holdings_from_fill(event)", "def setFill(self, fill):\n self.area_show = fill", "def fill():\n # Switch in edit mode\n bpy.ops.object.mode_set(mode = 'EDIT')\n \n # Fill hole\n bpy.ops.mesh.fill()", "def _switch(self):\n self.fill= not self.fill", "def clear(self, fill = 0x00):\n self._buffer = [ fill ] * ( self.width * self.height )", "def update_fill(self, event):\n if event.type == 'FILL':\n self.update_positions_from_fill(event)\n self.update_prices_from_fill(event)\n self.update_holdings_from_fill(event)", "def end_fill():\n turtleTmp.end_fill()", "def setPointFill(self, fill):\n for point in self.points:\n point.fill = fill", "def on_draw(self):\n self.clear()\n self.gamestatemanager.peek().on_draw(self.get_size())", "def draw(self):\n self._group.set_state_recursive()\n self._vertex_list.draw(self._draw_mode)\n self._group.unset_state_recursive()", "def write_fill(self, fill: FillFormat):\n if self.fill_type is not None:\n self._write_fill_type(fill)", "def on_draw(self, da, ctx):\n self.referee.get_current_state().draw(ctx)", "def clear(self):\r\n\t\tself.grid.fill(False)", "def filled(self, fill_value):\n sdata = self.data\n new_data = numpy.ma.filled(sdata, fill_value=fill_value)\n if new_data == sdata:\n return self\n else:\n return type(self)(new_data, self.bset)", "def draw(self, surface):\n checked_color = (0, 196, 0) if self.checked else pg.Color(\"white\")\n surface.fill(pg.Color(\"black\"), self.rect)\n surface.fill(self.color, self.rect.inflate(-2,-2))\n surface.fill(pg.Color(\"white\"), self.rect.inflate(-6,-6))\n surface.fill((205,205,205), self.rect.inflate(-8,-8))\n surface.fill(checked_color, self.select_rect)", "def fill(layer, event):\n # on press\n layer.fill(layer.coordinates, layer._value, layer.selected_label)", "def filled(\n self,\n filled: FillReturn,\n fill_type: FillType,\n ax: figure | int = 0,\n color: str = \"C0\",\n alpha: float = 0.7,\n ) -> None:\n fig = self._get_figure(ax)\n color = self._convert_color(color)\n xs, ys = filled_to_bokeh(filled, fill_type)\n if len(xs) > 0:\n fig.multi_polygons(xs=[xs], ys=[ys], color=color, fill_alpha=alpha, line_width=0)", "def on_draw(self):\n self.clear()\n self.manager.draw()", "def fill(self, *args, **kwargs):\n closed = kwargs.pop('closed', True)\n return super(RadarAxes, self).fill(closed=closed, *args, **kwargs)", "def fill(self, *args, **kwargs):\n closed = kwargs.pop('closed', True)\n return super(RadarAxes, self).fill(closed=closed, *args, **kwargs)", "def fill(self, *args, **kwargs):\r\n closed = kwargs.pop('closed', True)\r\n return super(RadarAxes, self).fill(closed=closed, *args, **kwargs)", "def draw(self):\n arcade.draw_xywh_rectangle_filled(\n self.x, self.y, self.width, self.height, self.fill.color\n )\n arcade.draw_xywh_rectangle_outline(\n self.x, self.y, self.width, self.height, self.pen.color, 3\n )", "def _redraw(self, render_as_done: \"bool\" = False) -> \"None\":\n if not self.drawn:\n cast(\"Application\", super())._redraw(render_as_done=True)\n self.drawn = True", "def fill(self, color):", "def fill_px(self, fill_px):\n\n self._fill_px = fill_px", "def fill(self, color):\n self.fill_rect(0, 0, self.width, self.height, color)", "def draw(self):\n self.screen.fill(WHITE)\n self.color_invalid()\n self.draw_selected()\n self.shade_locked_cells()\n self.draw_grid()\n self.draw_buttons()\n self.draw_numbers()", "def do_paint(self):\r\n curses.curs_set(0)\r\n if self.win:\r\n self.paint()\r\n self.done_paint()", "def clear(self):\n self.animation.stop()\n self.draw(0, 0, 0, 0, 0)", "def paint(self, g):\n r = getAWTBounds()\n if isFilled():\n g.setColor(getFillColor())\n g.fillRect(r.x, r.y, r.width, r.height)\n g.setColor(getColor())\n g.drawRect(r.x, r.y, r.width, r.height)", "def _draw_square(self, left_x, top_y, side, color, fill):\n self.pen.up()\n self.pen.color(color)\n self.pen.goto(left_x, top_y)\n self.pen.down()\n self.pen.begin_fill()\n for _ in range(4):\n self.pen.forward(side)\n self.pen.right(90)\n self.pen.end_fill()", "def stop_loss_on_fill(self, stop_loss_on_fill):\n\n self._stop_loss_on_fill = stop_loss_on_fill", "def stop_loss_on_fill(self, stop_loss_on_fill):\n\n self._stop_loss_on_fill = stop_loss_on_fill", "def setFill(self, color):\n self._reconfig(\"fill\", color)", "def redraw(self) -> None:\n self.canvas.draw_idle()\n self.Refresh()", "def fill(self):\r\n return self._structure.fill", "def setFillColor(self, color):\n fillColor = color\n repaint()", "def draw(self):\n if self.is_clicked:\n pg.draw.circle(self.window, self.color, (self.x, self.y), self.r, 0)\n else:\n pg.draw.circle(self.window, self.color, (self.x, self.y), self.r, 1)", "def clear(self):\n black = neo.Color(0,0,0)\n self.set_all(black)\n self.draw()", "def draw(self):\n if self.master != None :\n fill = Cell.FILLED_COLOR_BG\n outline = Cell.FILLED_COLOR_BORDER\n\n if not self.fill:\n fill = Cell.EMPTY_COLOR_BG\n outline = Cell.EMPTY_COLOR_BORDER\n walls[self.ord][self.abs] = 0\n else:\n walls[self.ord][self.abs] = 1\n\n\n xmin = self.abs * self.size\n xmax = xmin + self.size\n ymin = self.ord * self.size\n ymax = ymin + self.size\n self.master.create_rectangle(xmin, ymin, xmax, ymax, fill = fill, outline = outline)", "def draw(self):\n if self.master != None :\n fill = self.fill\n #fill = Cell.FILLED_COLOR_BG\n outline = Cell.EMPTY_COLOR_BORDER\n\n #if not self.fill:\n # fill = Cell.EMPTY_COLOR_BG\n # outline = Cell.EMPTY_COLOR_BORDER\n\n xmin = self.abs * self.size\n xmax = xmin + self.size\n ymin = self.ord * self.size\n ymax = ymin + self.size\n\n self.master.create_rectangle(xmin, ymin, xmax, ymax, fill = fill, outline = outline)", "def clear(self):\n self.np.fill(OFF)\n self.np.show()\n return True", "def all_off(self):\n self.fill_off()\n self.update()\n self.fill_off()\n self.update()", "def draw(self, shape):\r\n if not self.s_flg:\r\n opengles.glEnable(GL_SCISSOR_TEST)\r\n opengles.glScissor(ctypes.c_int(int(0)), ctypes.c_int(self.y0),\r\n ctypes.c_int(self.ix), ctypes.c_int(1))\r\n self.s_flg = True\r\n shape.draw(shader=self.shader)", "def filled(self, *args, **kwargs):\n out = getcopy(self)\n out.fill(*args, **kwargs)\n return out", "def redraw(self):\n self._create()", "def fill(self, screen=None, colour=None):\n if not screen:\n screen = self.screen\n if not colour:\n colour = self.backgroundColour\n screen.fill(colour)", "def draw(self):\n arcade.draw_circle_filled(self.center.x, self.center.y, BALL_RADIUS, BALL_COLOR)\n return", "def fill(self, x, y, color):\n raise NotImplementedError # Override this function in the Solution classes", "def fill(self, color):\n self.format.fill(self, color)", "def draw(self):\n self.figure.canvas.draw_idle()", "def draw(self):\n if self.master != None :\n outline = Cell.FILLED_COLOR_BORDER if self.fill else Cell.EMPTY_COLOR_BORDER\n\n xmin = self.abs * self.size\n xmax = xmin + self.size\n ymin = self.ord * self.size\n ymax = ymin + self.size\n\n self.master.create_rectangle(xmin, ymin, xmax, ymax, fill = self.fill, outline = outline)", "def set_white(self):\n self.fill=Cell.EMPTY_COLOR_BG\n self.draw()", "def draw(self):\r\n arcade.draw_circle_filled(self.center.x, self.center.y, self.radius, TARGET_COLOR)", "def Redraw(self):\n print(\"EMPTY VIEW REDRAW\")", "def reset(self):\n self.state.fill(EMPTY)", "def _clear_drawing(self) -> None:\n self.vertices.clear()\n self.edges.clear()\n self.subplot.clear()\n self.selected_element = None\n self.pressed_elements.clear()", "def fill(self):\n return self[\"fill\"]", "def fill(self):\n return self[\"fill\"]", "def draw(self):\r\n arcade.draw_rectangle_filled(self.center.x, self.center.y, self.radius, self.radius, TARGET_SAFE_COLOR)", "def draw (self, screen):\n drew = bool(self.draw_fn(self, screen, self.dirty))\n self.dirty = False\n return drew", "def isFilled(self):\n return self.isFilled", "def draw(self):\n\n for item in self.vis:\n item.undraw()\n self.render()\n for item in self.vis:\n item.draw(self.win)\n self.drawn = True", "def fill(self, value):\n self.fill_color = value", "def reset(self):\n self._turtle.clear()\n self._turtle.setposition((0,0)) \n try:\n self._turtle.shape('pen.gif')\n except:\n self._turtle.shape('classic')\n self._turtle.color('red')\n self.speed = 0\n \n #pair = self._turtle.color()\n self._pencolor = self._turtle.color()[0]\n self._fillcolor = self._turtle.color()[0]", "def fill_draw(self):\n self.draw = [x + str(y) for x in COLOR for y in CARD_VALUE]", "def remove_fill(settings):\r\n if settings.fillstyle == 'fill':\r\n settings.fillstyle = 'none'\r\n\r\n elif settings.fillstyle == 'fill+border':\r\n settings.fillstyle = 'border'\r\n return", "def _clear(self):\n self._fillitem = self._fillpath = None\n for item in self.items:\n self.screen._delete(item)\n self.currentLineItem = self.screen._createline()\n self.currentLine = []\n if self._drawing:\n self.currentLine.append(self._position)\n self.items = [self.currentLineItem]\n self.clearstamps()", "def draw(self):\n\n self.state_stack.peek().draw(self.screen)", "def remove_drawing_rect(self):\n self.drawing_rect = QPolygonF()\n if self.connecting_rect:\n self.connecting_rect.setVisible(False)\n self.connecting_rect = None\n self.first_draw = True", "def begin_fill(*args,**kwargs):\n if(len(args)==2 and len(kwargs)==0):\n turtleTmp.begin_fill(args[0], args[1])\n elif (len(args)==1 and len(kwargs)==0):\n turtleTmp.begin_fill(args[0], turtleTmp.color)\n elif (len(args)==0 and len(kwargs)==1):\n if(\"borderColor\" in kwargs):\n turtleTmp.begin_fill(kwargs[\"borderColor\"], turtleTmp.color)\n else:\n turtleTmp.begin_fill(turtleTmp.color, kwargs[\"fillColor\"])\n elif (len(args)==1 and len(kwargs)==1):\n if(\"borderColor\" in kwargs):\n turtleTmp.begin_fill(kwargs[\"borderColor\"], args[0])\n else:\n turtleTmp.begin_fill(args[0], kwargs[\"fillColor\"])\n elif(len(kwargs)==2 and len(args)==0):\n turtleTmp.begin_fill(kwargs[\"borderColor\"], kwargs[\"fillColor\"])\n else:\n turtleTmp.begin_fill(turtleTmp.color, turtleTmp.color)", "def clear(self) -> None:\n for y in range(self.width):\n for x in range(self.height):\n self.set_value(Point(y, x), FieldState.EMPTY)", "def redraw(self, state: EngineeringState) -> None:\n pass", "def position_fill(self, position_fill):\n allowed_values = [\"OPEN_ONLY\", \"REDUCE_FIRST\", \"REDUCE_ONLY\", \"DEFAULT\"] # noqa: E501\n if position_fill not in allowed_values:\n raise ValueError(\n \"Invalid value for `position_fill` ({0}), must be one of {1}\" # noqa: E501\n .format(position_fill, allowed_values)\n )\n\n self._position_fill = position_fill", "def position_fill(self, position_fill):\n allowed_values = [\"OPEN_ONLY\", \"REDUCE_FIRST\", \"REDUCE_ONLY\", \"DEFAULT\"] # noqa: E501\n if position_fill not in allowed_values:\n raise ValueError(\n \"Invalid value for `position_fill` ({0}), must be one of {1}\" # noqa: E501\n .format(position_fill, allowed_values)\n )\n\n self._position_fill = position_fill", "def pendown(self):\n if self._drawing:\n return\n self.pen(pendown=True)", "def drawCells(self):\r\n self.drawing = not self.drawing\r\n if self.drawing:\r\n self.draw_button['text'] = \"No Draw\"\r\n else:\r\n self.draw_button['text'] = \"Draw\"", "def clear(self, event):\r\n self.selectedRegion = None\r\n self.paint()", "def draw_circle_filled(self, x0, y0, r, color=None):\n self._draw_fast_vline(x0, y0 - r, 2 * r + 1, color)\n self._draw_circle_filled_helper(x0, y0, r, 3, 0, color)", "def add_fill(self, shape, value, name=None):\n return self._build_op('Fill', [shape, value], name=name)", "def draw(self, force = False):\n\t\tpass", "def clear(self) -> None:\n\n self.screen.fill(self.bg)", "def draw(self):\n arcade.draw_circle_filled(self.position_x, self.position_y, self.radius,self.player_color)", "def draw(self):\n if self.node:\n if self.async:\n if self.cancel_draw:\n self.after_cancel(self.cancel_draw)\n self.cancel_draw = self.after(3, self._draw)\n else: self._draw()", "def fillcolor(self, *args):\n if args:\n color = self._colorstr(args)\n if color == self._fillcolor:\n return\n self.pen(fillcolor=color)\n else:\n return self._color(self._fillcolor)", "def _clear(self, event):\n if self.ignore(event) or self._changed_canvas():\n return\n self._background = self.canvas.copy_from_bbox(self.ax.bbox)\n self.ax.draw_artist(self._buttons)\n if hasattr(self, \"_circles\"):\n for circle in self._circles:\n self.ax.draw_artist(circle)", "def fill_rect(self, x, y, width, height, color):\n # pylint: disable=too-many-arguments, too-many-boolean-expressions\n self.rect(x, y, width, height, color, fill=True)", "def _clear(self, event):\n if self.ignore(event) or self._changed_canvas():\n return\n self._background = self.canvas.copy_from_bbox(self.ax.bbox)\n self.ax.draw_artist(self._checks)\n if hasattr(self, '_lines'):\n for l1, l2 in self._lines:\n self.ax.draw_artist(l1)\n self.ax.draw_artist(l2)", "def fill(self, colour: int, /) -> None:", "def drawChanges(self):\n self.draw(wait=False)\n draw(self.values,color='yellow',bbox=None,clear=False,shrink=self.shrink)", "def FillPath(*args, **kwargs):\n return _gdi_.GraphicsContext_FillPath(*args, **kwargs)", "def draw(self, *args, **kwargs):\n self.window.clear()\n self.batch.draw()", "def draw(self):\n arcade.draw_rectangle_filled(self.center.x,\n self.center.y,\n self.width,\n self.height,\n arcade.color.WHITE)", "def picture_fill_format(self, picture_fill_format):\n self._picture_fill_format = picture_fill_format", "def zero(self):\n\t\tself.angle = 0.0\n\t\tself.draw()\n\t\ttime.sleep(self.delay)", "def EndDraw(self):\r\n\r\n pass" ]
[ "0.76267654", "0.7002292", "0.6535505", "0.6509827", "0.6509827", "0.6482051", "0.64610213", "0.63605654", "0.6331894", "0.6112397", "0.6048987", "0.59302145", "0.58996844", "0.5897555", "0.5853862", "0.57472944", "0.5747061", "0.5738132", "0.5734731", "0.5734146", "0.57322496", "0.5723832", "0.5720785", "0.5720785", "0.57183105", "0.571228", "0.5708237", "0.56902003", "0.568447", "0.567119", "0.5663943", "0.563796", "0.5626931", "0.56127805", "0.55963767", "0.5595681", "0.5595681", "0.5581789", "0.5560201", "0.5556428", "0.5551118", "0.5550829", "0.55496806", "0.55490744", "0.5535814", "0.553214", "0.551897", "0.551479", "0.55140114", "0.55011845", "0.5497865", "0.5496757", "0.54918134", "0.5491184", "0.5485707", "0.5481997", "0.5480763", "0.5468864", "0.5464868", "0.54584306", "0.54472977", "0.5440264", "0.5440264", "0.54276824", "0.54209656", "0.54114854", "0.5408924", "0.54069775", "0.5406684", "0.5395298", "0.5393318", "0.53923523", "0.5392214", "0.53900063", "0.5382459", "0.5379238", "0.5376812", "0.5372466", "0.5372466", "0.53693724", "0.53576386", "0.53533244", "0.5351859", "0.5351469", "0.53465855", "0.53390986", "0.5338474", "0.5328554", "0.53275865", "0.5324992", "0.5322874", "0.53143775", "0.53117925", "0.52807236", "0.52530116", "0.5243986", "0.5240997", "0.5239178", "0.5237102", "0.52348644" ]
0.70235515
1
hgtStartData is the source data from the NASA JPL topological data
def __init__(self, hgtStartData): self.data = [] for row in hgtStartData: toAdd = [] for height in row: toAdd.append([height, 0]) self.data.append(toAdd) self.maxX = len(hgtStartData[0]) - 1 self.maxY = len(hgtStartData) - 1 self.minFloodHeight = 0
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _compute_single_source_data(self, start, end):\n single_source_data = {}\n dijkstra = Dijkstra(self.graph_provider)\n single_source_data['start'] = dijkstra.single_source(start)\n single_source_data['end'] = dijkstra.single_source(end)\n\n self._single_source_data = single_source_data", "def test_data_source_soaps_id_head(self):\n pass", "def extract_head(data):\n tl = data['tls'][data['i']];\n br = data['brs'][data['i']];\n head = extract_area(data,(tl,br));\n return head;", "def getOLAPSource():", "def get_start_delta(target_horizon, gt_id=\"contest_tmp2m\"):\n return get_measurement_lag(gt_id) + get_forecast_delta(target_horizon)", "def mdsData(shotno=None,\n\t\t\tdataAddress=['\\HBTEP2::TOP.DEVICES.SOUTH_RACK:CPCI_10:INPUT_94',\n\t\t\t\t\t\t '\\HBTEP2::TOP.DEVICES.SOUTH_RACK:CPCI_10:INPUT_95'],\n\t\t\ttStart=[],tStop=[]):\t\t\t\n\t\t\n\t# convert dataAddress to a list if it not one originally \n\tif type(dataAddress) is not list:\n\t\tdataAddress=[dataAddress];\n\t\t\n#\t# if shotno == -1, use the latest shot number\n#\tif shotno==-1:\n#\t\tshotno=latestShotNumber()\n\t\t\n\t# init arrays\n\ttime = []\n\tdata = []\n\t\t\n\t# check if computer is located locally or remotely. The way it connects to spitzer remotely can only use one method, but locally, either method can be used. \n\tif _ON_HBTEP_SERVER==True: # if operating local to the tree\n\t\t# converted from Ian's code\n\t\t\n\t\ttree = _mds.Tree('hbtep2', shotno) \n\t\tfor i in range(0,len(dataAddress)):\n\n\t\t\tnode = tree.getNode(dataAddress[i])\t\t\t#Get the proper node\t\n\t\t\tdata.append(node.data())\t\t\t \t \t#Get the data from this node \n\t\tif type(data[0]) is _np.ndarray: # if node is an array, return data and time\n\t\t\ttime = node.dim_of().data()\t\t\n\n\t\n\telse: # operaeting remotely\n\t\n\t\t# if shotno is specified, this function gets its own mdsConn\n\t\tif type(shotno) is float or type(shotno) is int or type(shotno) is _np.int64:\n\t\t\tmdsConn=_initRemoteMDSConnection(shotno);\n\n\t\tfor i in range(0,len(dataAddress)):\n\t\t\tdata.append(mdsConn.get(dataAddress[i]).data())\n\t\t\n\t\t# if data is an array, also get time\n\t\tif type(data[0]) is _np.ndarray:\n\t\n\t\t\ttime = mdsConn.get('dim_of('+dataAddress[0]+')').data(); # time assocated with data\n \n\tif time != [] and type(tStop)!=list:\n\t\t# trim time and data\n\t\ttime,data= _trimTime(time,data,tStart,tStop)\n\t\t\n\tif time != []:\n\t\treturn data, time\n\telse: \n\t\treturn data", "def get_start_state_data(start_state: int, states: [State]) -> tuple:\n first_node = 0\n for state in states:\n if state.trigs:\n for trig in state.trigs:\n if trig.source == start_state:\n first_node = trig.target\n return (get_state_by_id(states, first_node, \"new\").new_id, get_state_by_id(states, first_node, \"old\").y,\n (get_state_by_id(states, first_node, \"new\").x - 2))", "def horde_start(self, observation):", "def soho_load(dataset, startdate, enddate, path=None, resample=None, pos_timestamp=None, max_conn=5):\n if not (pos_timestamp=='center' or pos_timestamp=='start' or pos_timestamp is None):\n raise ValueError(f'\"pos_timestamp\" must be either None, \"center\", or \"start\"!')\n\n if dataset == 'SOHO_COSTEP-EPHIN_L2-1MIN':\n df, metadata = soho_ephin_loader(startdate, enddate, resample=resample, path=path, all_columns=False, pos_timestamp=pos_timestamp)\n else:\n trange = a.Time(startdate, enddate)\n cda_dataset = a.cdaweb.Dataset(dataset)\n try:\n result = Fido.search(trange, cda_dataset)\n filelist = [i[0].split('/')[-1] for i in result.show('URL')[0]]\n filelist.sort()\n if path is None:\n filelist = [sunpy.config.get('downloads', 'download_dir') + os.sep + file for file in filelist]\n elif type(path) is str:\n filelist = [path + os.sep + f for f in filelist]\n downloaded_files = filelist\n\n for i, f in enumerate(filelist):\n if os.path.exists(f) and os.path.getsize(f) == 0:\n os.remove(f)\n if not os.path.exists(f):\n downloaded_file = Fido.fetch(result[0][i], path=path, max_conn=max_conn)\n\n # downloaded_files = Fido.fetch(result, path=path, max_conn=max_conn) # use Fido.fetch(result, path='/ThisIs/MyPath/to/Data/{file}') to use a specific local folder for saving data files\n # downloaded_files.sort()\n data = TimeSeries(downloaded_files, concatenate=True)\n df = data.to_dataframe()\n\n metadata = _get_metadata(dataset, downloaded_files[0])\n\n # remove this (i.e. following lines) when sunpy's read_cdf is updated,\n # and FILLVAL will be replaced directly, see\n # https://github.com/sunpy/sunpy/issues/5908\n # df = df.replace(-1e+31, np.nan) # for all fluxes\n # df = df.replace(-2147483648, np.nan) # for ERNE count rates\n # 4 Apr 2023: previous 2 lines removed because they are taken care of with sunpy\n # 4.1.0:\n # https://docs.sunpy.org/en/stable/whatsnew/changelog.html#id7\n # https://github.com/sunpy/sunpy/pull/5956\n\n # careful!\n # adjusting the position of the timestamp manually.\n # requires knowledge of the original time resolution and timestamp position!\n if pos_timestamp == 'center':\n if (dataset.upper() == 'SOHO_ERNE-HED_L2-1MIN' or\n dataset.upper() == 'SOHO_ERNE-LED_L2-1MIN' or\n dataset.upper() == 'SOHO_COSTEP-EPHIN_L3I-1MIN'):\n df.index = df.index+pd.Timedelta('30s')\n if dataset.upper() == 'SOHO_CELIAS-PM_30S':\n df.index = df.index+pd.Timedelta('15s')\n if pos_timestamp == 'start':\n if dataset.upper() == 'SOHO_CELIAS-SEM_15S':\n df.index = df.index-pd.Timedelta('7.5s')\n\n if isinstance(resample, str):\n df = resample_df(df, resample, pos_timestamp=pos_timestamp)\n except (RuntimeError, IndexError):\n print(f'Unable to obtain \"{dataset}\" data!')\n downloaded_files = []\n df = []\n metadata = []\n return df, metadata", "def __init__(self, data_source, min_sup=MIN_SUPPORT, eq=False):\n self.thd_supp = min_sup\n \"\"\":type thd_supp: float\"\"\"\n self.equal = eq\n \"\"\":type eq: bool\"\"\"\n self.titles, self.data = DataGP.read(data_source)\n \"\"\":type titles: ndarray\"\"\"\n \"\"\":type data: ndarray\"\"\"\n self.row_count, self.col_count = self.data.shape\n self.time_cols = self.get_time_cols()\n self.attr_cols = self.get_attr_cols()\n self.valid_bins = np.array([])\n self.no_bins = False\n self.step_name = '' # For T-GRAANK\n self.attr_size = 0 # For T-GRAANK", "def hcgps(data_src, min_supp=MIN_SUPPORT, max_iteration=MAX_ITERATIONS, step_size=STEP_SIZE, return_gps=False):\n # Prepare data set\n d_set = DataGP(data_src, min_supp)\n d_set.init_attributes()\n attr_keys = [GI(x[0], x[1].decode()).as_string() for x in d_set.valid_bins[:, 0]]\n\n if d_set.no_bins:\n return []\n\n # Parameters\n it_count = 0\n var_min = 0\n counter = 0\n var_max = int(''.join(['1'] * len(attr_keys)), 2)\n eval_count = 0\n\n # Empty Individual Template\n best_sol = structure()\n candidate = structure()\n\n # Best Cost of Iteration\n best_costs = np.empty(max_iteration)\n best_patterns = []\n str_best_gps = list()\n str_iter = ''\n str_eval = ''\n repeated = 0\n\n # generate an initial point\n best_sol.position = None\n # candidate.position = None\n if best_sol.position is None:\n best_sol.position = np.random.uniform(var_min, var_max, N_VAR)\n # evaluate the initial point\n apply_bound(best_sol, var_min, var_max)\n best_sol.cost = costfxn(best_sol.position, attr_keys, d_set)\n\n # run the hill climb\n while counter < max_iteration:\n # while eval_count < max_evaluations:\n # take a step\n candidate.position = None\n if candidate.position is None:\n candidate.position = best_sol.position + (random.randrange(var_min, var_max) * step_size)\n apply_bound(candidate, var_min, var_max)\n candidate.cost = costfxn(candidate.position, attr_keys, d_set)\n\n if candidate.cost < best_sol.cost:\n best_sol = candidate.deepcopy()\n eval_count += 1\n str_eval += \"{}: {} \\n\".format(eval_count, best_sol.cost)\n\n best_gp = validategp(d_set, decodegp(attr_keys, best_sol.position))\n \"\"\":type best_gp: GP\"\"\"\n is_present = isduplicate(best_gp, best_patterns)\n is_sub = amcheck(best_patterns, best_gp, subset=True)\n if is_present or is_sub:\n repeated += 1\n else:\n if best_gp.support >= min_supp:\n best_patterns.append(best_gp)\n str_best_gps.append(best_gp.print(d_set.titles))\n\n try:\n # Show Iteration Information\n # Store Best Cost\n best_costs[it_count] = best_sol.cost\n str_iter += \"{}: {} \\n\".format(it_count, best_sol.cost)\n except IndexError:\n pass\n it_count += 1\n\n if max_iteration == 1:\n counter = repeated\n else:\n counter = it_count\n # Output\n out = json.dumps({\"Algorithm\": \"LS-GRAD\", \"Best Patterns\": str_best_gps, \"Iterations\": it_count})\n \"\"\":type out: object\"\"\"\n if return_gps:\n return out, best_patterns\n else:\n return out", "def get_tmin(self):\n tmin = min(sorted(self.srcData.keys()))\n return tmin", "def get_start(network, road_id):\n return network[0][road_id][0]", "def start(self):\n self.log.setLevel(logging.INFO)\n super().start()\n \n self._dts = rift.tasklets.DTS(self.tasklet_info,\n UtCompositeYang.get_schema(),\n self._loop,\n self.on_dts_state_change) \n\n # Set the instance id\n self.instance_name = self.tasklet_info.instance_name\n self.instance_id = int(self.instance_name.rsplit('-', 1)[1])\n self.log.debug(\"Starting TestDriverTasklet Name: {}, Id: {}\".format(\n self.instance_name,\n self.instance_id))\n\n self.state = TaskletState.STARTING", "def get_data(station,starttime,endtime,activity=False,\n rep='/GNOMEDrive/gnome/serverdata/',resample=None):\n setname = \"MagneticFields\"\n dstr = ['%Y','%m','%d','%H','%M']\n dsplit = '-'.join(dstr[:starttime.count('-')+1])\n start = datetime.strptime(starttime,dsplit)\n starttime = construct_utc_from_metadata(start.strftime(\"%Y/%m/%d\"),\n start.strftime(\"%H:%M:%S.%d\"))\n dsplit = '-'.join(dstr[:endtime.count('-')+1])\n end = datetime.strptime(endtime,dsplit)\n endtime = construct_utc_from_metadata(end.strftime(\"%Y/%m/%d\"),\n end.strftime(\"%H:%M:%S.%d\"))\n dataset = []\n for date in numpy.arange(start,end,timedelta(minutes=1)):\n date = date.astype(datetime)\n path1 = rep+station+'/'+date.strftime(\"%Y/%m/%d/\")\n path2 = station+'_'+date.strftime(\"%Y%m%d_%H%M*.hdf5\")\n fullpath = os.path.join(path1,path2)\n dataset += glob.glob(fullpath)\n if len(dataset)==0:\n print \"ERROR: No data files were found...\"\n quit()\n file_order,data_order = {},{}\n for fname in dataset:\n hfile = h5py.File(fname, \"r\")\n segfile = file_to_segment(hfile,setname)\n file_order[segfile] = fname\n data_order[segfile] = hfile\n # Extract sample rate from metadata of last read data file\n sample_rate = hfile[setname].attrs[\"SamplingRate(Hz)\"]\n # Estimate full segment activity list\n activity = create_activity_list(station,data_order)\n # Generate an ASCII representation of the GPS timestamped\n # segments of time covered by the input data\n seglist = segmentlist(data_order.keys())\n # Sort the segment list\n seglist.sort()\n # Create list of time series from every segment\n ts_list = generate_timeseries(file_order,setname)\n # Retrieve channel data for all the segments\n full_data = numpy.hstack([retrieve_channel_data(data_order[seg],setname)\n for seg in seglist])\n new_sample_rate = sample_rate if resample==None else resample\n new_data_length = len(full_data)*new_sample_rate/float(sample_rate)\n full_data = scipy.signal.resample(full_data,int(new_data_length))\n # Models a time series consisting of uniformly sampled scalar values\n ts_data = types.TimeSeries(full_data,delta_t=1./new_sample_rate,\n epoch=seglist[0][0])\n for v in data_order.values():\n v.close()\n return ts_data,ts_list,activity,int(starttime),int(endtime)", "def start_time(self) -> datetime:\n return self.root_hartree.start_time", "def get_source(source, data):\n\n # source = 'NCv1.143'\n z = data[source]['z']\n line_width = data[source]['line_width']\n delta_v = 1 * kms # do not care actually, fully degenerate with\n # the column density\n\n # selecting only CO lines\n keys = [key for key in data[source].keys()\n if 'CO' in key and 'eCO' not in key]\n CO_data = Table(np.asarray([(Jlow + 1, data[source][key], data[source]['e' + key])\n for Jlow, key in enumerate(keys)\n if np.isfinite(data[source][key])]),\n names=['Jup', 'flux', 'eflux'],\n dtype=[int, float, float])\n\n Jup = CO_data['Jup'].data\n flux = CO_data['flux'].data * Jykms\n eflux = CO_data['eflux'].data * Jykms\n\n return z, line_width, Jup, flux, eflux", "def get_start_node(self):\n return self._start", "def make_source_dataset(self, current_host_index, num_hosts):\n pass", "def prepare_data():\n #data, label = load_ta_data(), load_ta_target()\n data, label = load_own_data(), load_own_target()\n tra_x, tst_x = split_samples(data)\n tra_y, tst_y = split_samples(label)\n return (tra_x, tst_x, tra_y, tst_y)", "def insert_start(self, data):\n\n if self.head is None:\n self.head = ListNode(data)\n else:\n temp = self.head\n self.head = ListNode(data)\n self.head.next = temp", "def getObservationStart(vis, obsid=-1, verbose=False):\n if (os.path.exists(vis) == False):\n print \"vis does not exist = %s\" % (vis)\n return\n if (os.path.exists(vis+'/table.dat') == False):\n print \"No table.dat. This does not appear to be an ms.\"\n print \"Use au.getObservationStartDateFromASDM().\"\n return\n mytb = createCasaTool(tbtool)\n try:\n mytb.open(vis+'/OBSERVATION')\n except:\n print \"ERROR: failed to open OBSERVATION table on file \"+vis\n return(3)\n time_range = mytb.getcol('TIME_RANGE')\n mytb.close()\n if verbose: print \"time_range: \", str(time_range)\n # the first index is whether it is starttime(0) or stoptime(1) \n time_range = time_range[0]\n if verbose: print \"time_range[0]: \", str(time_range)\n if (obsid >= len(time_range)):\n print \"Invalid obsid\"\n return\n if obsid >= 0:\n time_range = time_range[obsid]\n elif (type(time_range) == np.ndarray):\n time_range = np.min(time_range)\n return(time_range)", "def initialize_data(self , station = '', datasets = {} ):\n \n self.datasets = datasets\n self.datasets_keys = datasets.keys()\n self.station = station\n \n data = {} # container for the data of each dataset\n source_configuration = {} # container for the source_configuration of each dataset\n \n\n \n \"\"\" Looping over the datasets \"\"\"\n logging.info('*** Reading and Initializing the data from the netCDF files ')\n \n \n for k,v in datasets.items() :\n logging.info(' Initialising the dataset: *** %s ' , k )\n data[k] = {} \n data['cdm_tables'] = {} \n \n ### alternative with xarray \n #ds = xr.load_dataset(v) \n #observations_table = xr.open_dataset(v , engine = 'h5netcdf' , group = 'observations_table') \n \n ### alternative with netCDF4\n #ds = nc.Dataset(v) \n #data[k]['dateindex'] = ds.variables['dateindex'][0,:] # storing the dateindex \n \n ###for h5py but cant extract date time units !!!\n ds = h5py.File(v , driver=\"core\" ) \n data[k]['df'] = ds # storing the entire file \n try: \n data[k]['source_file'] = ds['source_configuration']['source_file'][0]\n except:\n data[k]['source_file'] = str(v) # temp fix \n \n #data[k]['product_code'] = ds['source_configuration']['product_code'][0] \n #data[k]['recordtimestamp'] = ds['recordtimestamp'].value\n #data[k]['recordindex'] = ds['recordindex'].value \n #ds.close() \n logging.debug('Reading the file with h5py ')\n \n \n # add here appending datasets for the case of ncar_w and ncar_t \n \n \n self.data = data\n self.make_dataframe()\n ds.close()\n \n \"\"\" Reading the header_table, station_configuration, source_configuration \"\"\"\n for k,v in datasets.items() : \n \n #d = xr.open_dataset(v , engine = 'h5netcdf' ) \n #data[k]['recordtimestamp'] = d['recordtimestamp'].values\n #data[k]['recordindex'] = d['recordindex'].values \n \n \n d = xr.open_dataset(v , engine = 'h5netcdf' , group = 'station_configuration') \n data[k]['station_configuration'] = d.to_dataframe() \n #data[k]['station_configuration'] = d ### USELESS ? \n logging.debug('Done with %s station_configuration' , str(k) )\n \n \n d = xr.open_dataset(v , engine = 'h5netcdf' , group = 'header_table') \n logging.debug('Loading the header_table') \n if 'header_table' not in list( self.attributes.keys() ): # saving the attributes to be re-applied at the end\n self.attributes['header_table'] = {}\n for var in d.variables:\n self.attributes['header_table'][var] = {}\n self.attributes['header_table'][var]['description'] = d[var].description\n self.attributes['header_table'][var]['external_table'] = d[var].external_table \n data[k]['header_table'] = d.to_dataframe() \n logging.debug('Done with %s ' , k )\n \n logging.info(\"*** Loading the observations_table (might take time) %s\" , k ) \n d = xr.open_dataset(v , engine = 'h5netcdf' , group = 'observations_table') \n \n if 'observations_table' not in list( self.attributes.keys() ): # saving the attributes to be re-applied at the end\n self.attributes['observations_table'] = {}\n for var in d.variables:\n self.attributes['observations_table'][var] = {}\n self.attributes['observations_table'][var]['description'] = d[var].description\n self.attributes['observations_table'][var]['external_table'] = d[var].external_table\n \n \n logging.info(\"*** Loading the source configuration %s\" , k ) \n try: \n d = xr.open_dataset(v , engine = 'h5netcdf' , group = 'source_configuration')\n d = d.isel(hdrlen=[0])\n data[k]['source_configuration'] = d.to_dataframe() ### USELESS ? \n logging.debug('Done with %s source_configuration' , k )\n except: \n data[k]['source_configuration']= pd.DataFrame(np.array( [ [ self.data[k]['source_file'] ] ] ) , columns=['source_file'] ) \n \n if k == 'era5_1': # reading the whole era5_1 feedback (including reanalysis)\n d = xr.open_dataset(v , engine = 'h5netcdf' , group = 'era5fb') \n data[k]['era5fb'] = d.to_dataframe() \n logging.debug('Done with %s era5 feedback ', k )\n \n \"\"\" Reading the CDM tables that do not depend on specific stations or observations (fixed values), for the first file only \"\"\" \n if list(datasets.keys()).index(k) == 0 :\n for t in [ 'crs' , 'observed_variable', 'units' , 'z_coordinate_type' , 'station_type']: \n \n d = xr.open_dataset(v , engine = 'h5netcdf' , group = t) \n #data['cdm_tables'][t] = d.to_dataframe() ### USELESS ?\n data['cdm_tables'][t] = d \n \n d.close() \n ds.close()\n\n \"\"\" Reading the name of the original source file \"\"\"\n source_configuration[k] = {} \n source_configuration[k]['source_file'] = [ c for c in v.split('/') if '.nc' in c][0]\n\n \n \"\"\" Storing the station configurations \"\"\" \n self.source_configuration = source_configuration \n \n \"\"\" Making all date_times \"\"\" \n self.make_all_datetime()\n \n \n \"\"\" feedback columns \"\"\"\n if 'era5_1' in list (self.data.keys() ):\n self.fb_columns = list(self.data['era5_1']['era5fb'].columns ) \n else:\n self.fb_columns = ['empty']", "def _get_start_params(self, start_params=None):\n if start_params is None:\n if hasattr(self, 'start_params'):\n start_params = self.start_params\n elif self.exog is not None:\n # fails for shape (K,)?\n start_params = [0] * self.exog.shape[1]\n else: # pragma: no cover\n raise ValueError(\"If exog is None, then start_params should \"\n \"be specified\")\n return start_params", "def _pasrse_data_start_end(self, data):\n first = data['obs_time'].iloc[0]\n last = data['obs_time'].iloc[-1]\n\n return (first, last)", "def format_start(self):\n logging.info(\" itr h => cost set troom droom tout dout = t rwd\")\n logging.info(\" %7.1f %4.1f %7.1f %7.1f %4.1f %4.1f\" % (\n self.state['heat_cost'],\n self.state['set_temp'],\n self.state['room_temp'],\n self.state['room_temp_change'],\n self.state['outside_temp'],\n self.state['outside_temp_change'],\n ))", "def get_start_time(self):\n start = datetime.strptime(\n self.get_handler().SOURCE_START_DATE.split('.')[0],\n '%Y%m%d%H%M%S'\n )\n return start", "def hgvs_start(self):\n try:\n return self.hp.parse(self.term).posedit.pos.start\n except hgvs.exceptions.HGVSParseError:\n # Log me\n # print(self.term)\n return None", "def __init__(self, start_node):\n self.start_node = start_node", "def test_parse_hgts_riatahgt(self):\n with open(self.riatahgt_output_hgt_fp, 'r') as f:\n output = parse_hgts(f, 'riata-hgt')\n self.assertEqual(int(output), 1)", "def starting_nodes(self):\n return self.starting_nodes_ #abstract requires this exists!", "def create_lat_lon_date_data(gt_id,\n target_horizon,\n experiment,\n past_gt_ids=[\"contest_precip\", \"contest_tmp2m\"],\n forecast_models=[\"nmme\",\"nmme0\"],\n other_lat_lon_date_features=[\"contest_rhum.sig995\",\n \"contest_pres.sfc.gauss\"]):\n\n time_start = time.time()\n\n # Add forecasts to list of forecast IDs\n forecast_variable = get_forecast_variable(gt_id) # 'prate' or 'tmp2m'\n forecast_ids = ['{}-{}-{}'.format(forecast, forecast_variable, target_horizon)\n for forecast in forecast_models]\n\n # -----------\n # Generate relevant variable and column names\n # -----------\n\n # Identify measurement variable name\n measurement_variable = get_measurement_variable(gt_id) # 'tmp2m' or 'prate'\n\n # Keep track of relevant column names\n gt_col = measurement_variable\n clim_col = measurement_variable+\"_clim\"\n anom_col = measurement_variable+\"_anom\"\n\n # Inverse of standard deviation of anomalies for each start_date\n anom_inv_std_col = anom_col+\"_inv_std\"\n\n # --------\n # Prepare experiment cache directory and saved file names\n # --------\n\n # Name of cache directory for storing non-submission-date specific\n # intermediate files\n cache_dir = os.path.join('results', experiment, 'shared',\n '{}_{}'.format(gt_id, target_horizon))\n # e.g., cache_dir = 'results/regression/shared/contest_precip_34w'\n\n # if cache_dir doesn't exist, create it\n if not os.path.isdir(cache_dir):\n os.makedirs(cache_dir)\n\n # Filenames for data file to be stored in cache_dir\n lat_lon_date_data_file = os.path.join(\n cache_dir, \"lat_lon_date_data-{}_{}.h5\".format(gt_id, target_horizon))\n\n # --------\n # Load mask indicating which grid points count in the contest (1=in, 0=out)\n # --------\n print \"Loading contest mask\"\n t = time.time()\n mask_df = get_contest_mask()\n print \"Elapsed: {}s\".format(time.time() - t)\n\n # --------\n # Creates and saves lat_lon_date_data dataframe\n # --------\n # Load masked lat lon date features restricted to years >= get_first_year(gt_id)\n # Note: contest lat lon date features and forecasts are pre-masked, so there\n # is no need to mask explcitily\n print \"Loading lat lon date features\"\n num_gt_ids = len(past_gt_ids)\n # For each measurement,\n # get number of days between start date of observation period used for prediction\n # (2 weeks + 1 submission day behind for most predictors) and start date of\n # target period (2 or 4 weeks ahead)\n past_start_deltas = [get_start_delta(target_horizon, past_gt_id)\n for past_gt_id in past_gt_ids]\n other_start_deltas = [get_start_delta(target_horizon, other_gt_id)\n for other_gt_id in other_lat_lon_date_features]\n # Additionally keep track of days between forecast date and start date of\n # target period\n forecast_delta = get_forecast_delta(target_horizon)\n\n lat_lon_date_data = get_lat_lon_date_features(\n gt_ids=other_lat_lon_date_features + other_lat_lon_date_features\n + other_lat_lon_date_features,\n gt_masks=None,\n gt_shifts=other_start_deltas +\n [2*delta for delta in other_start_deltas] +\n [365]*len(other_lat_lon_date_features),\n forecast_ids=forecast_ids + forecast_ids,\n forecast_masks=None,\n forecast_shifts=[None]*len(forecast_ids) + [forecast_delta]*len(forecast_ids),\n anom_ids=[gt_id] + past_gt_ids + past_gt_ids + past_gt_ids,\n anom_masks=None,\n anom_shifts=[None] + past_start_deltas +\n [2*delta for delta in past_start_deltas] +\n [365]*len(past_gt_ids),\n first_year=get_first_year(gt_id)\n )\n\n print \"Loading additional lat lon date features\"\n t = time.time()\n # Add CFSv2 mean as feature\n if 'cfsv2' in forecast_models:\n cfsv2_models = ['cfsv2_op_delta_2w_1d_6h', 'cfsv2_op_delta_2w_1d_12h',\n 'cfsv2_op_delta_2w_1d_18h', 'cfsv2_op_delta_2w_2d_0h',\n 'cfsv2_op_delta_2w_2d_6h', 'cfsv2_op_delta_2w_2d_12h',\n 'cfsv2_op_delta_2w_2d_18h', 'cfsv2_op_delta_2w_3d_0h']\n lat_lon_date_data['cfsv2_mean'] = lat_lon_date_data[cfsv2_models].mean(axis=1)\n lat_lon_date_data[\"cfsv2_mean_shift\"+str(start_delta)] = lat_lon_date_data[\n [model+\"_shift\"+str(start_delta) for model in cfsv2_models]].mean(axis=1)\n # Add inverse of standard deviation of anomalies for each start_date\n lat_lon_date_data[anom_inv_std_col] = \\\n 1.0/lat_lon_date_data.groupby([\"start_date\"])[anom_col].transform('std')\n\n print \"Elapsed: {}s\".format(time.time() - t)\n\n # Save lat lon date features to disk\n print \"Saving lat lon date features to \"+lat_lon_date_data_file\n t = time.time()\n lat_lon_date_data.to_hdf(lat_lon_date_data_file, key=\"data\", mode=\"w\")\n subprocess.call(\"chmod a+w \"+lat_lon_date_data_file, shell=True)\n print \"Elapsed: {}s\".format(time.time() - t)\n print \"Finished generating lat_lon_date_data matrix.\"\n print \"Total time elapsed: {}s\".format(time.time()-time_start)\n return list(lat_lon_date_data)", "def __init__(self,fn_dts,fn_contigs,wnd_size):\n\n wnd = int(fn_dts.split(\"/\")[-1].split(\"_bp\")[0])\n assert int(wnd)==wnd_size\n self.wnd_size = wnd_size\n print fn_dts\n self.wnd_DTS = DenseTrackSet(fn_contigs,\n fn_dts,\n overwrite=False,\n openMode='r')\n \n self.contigs = self.wnd_DTS.mContigNameLen\n self.starts=self.wnd_DTS[\"starts\"]\n self.ends=self.wnd_DTS[\"ends\"]\n self.cps=self.wnd_DTS[\"copy\"]", "def __init__(self, path=None):\n # Verify path to data set.\n if path is None:\n path = Path(os.path.abspath(__file__))\n _root = path.parents[2]\n self.data_path = _root / \"GTS/01BasicInputData\"\n else:\n self.data_path = Path(path)\n\n logger.info(f\"GTS-ISC data located at: {self.data_path}.\")\n assert self.data_path.is_dir()\n\n # ========= CONSTANTS ==========================================================================================\n # Swiss to gts coordinates.\n self.gts_coordinates = np.array((667400, 158800, 1700))\n\n # Name of boreholes.\n self.borehole_types = {\n \"FBS\": np.array([1, 2, 3]),\n \"SBH\": np.array([1, 3, 4]), # Note the skip of numbering for SBH\n \"INJ\": np.array([1, 2]),\n \"PRP\": np.array([1, 2, 3]),\n \"GEO\": np.array([1, 2, 3, 4]),\n }\n\n self.boreholes = [\n bh_set + str(bh_num)\n for bh_set in self.borehole_types\n for bh_num in self.borehole_types[bh_set]\n ]\n\n # Name of shearzones\n self.shearzone_types = {\"S1\": np.array([1, 2, 3]), \"S3\": np.array([1, 2])}\n\n self.shearzones = [\n sz_set + \"_\" + str(sz_num)\n for sz_set in self.shearzone_types\n for sz_num in self.shearzone_types[sz_set]\n ]\n\n # ============ LOAD DATA =======================================================================================\n\n # 1. Step: Load all available data. ============================================================================\n # Load borehole data\n self.borehole_geometry = self._borehole_data()\n\n # Load borehole structure data\n self.borehole_structures = self._borehole_structure_data()\n\n # Load tunnel structures (only shear-zones and fractures)\n self.tunnel_structures = self._tunnel_shearzone_data()\n\n # Load interpolation-ready shear-zone - borehole intersections\n # i.e. 1-1 (-0) mapping between shear-zones and boreholes.\n self.shearzone_borehole_geometry = self._shearzone_borehole_data()\n\n # 2. Step: All characterized structures ========================================================================\n self.structures = self._full_structure_geometry()", "def extract_start_state(self):\n time = rospy.get_time()\n ref_time = time - self.last_time\n future_time = ref_time + self.update_rate\n\n return df.compute_output3D(self.solution, self.order, self.time[self.index], future_time)", "def write_starting_points(self):\n num_params = self.f['/parameters/parameterNames'].shape[0]\n num_starting_points = 100\n np.random.seed(0)\n starting_points = self.f.require_dataset(\n '/optimizationOptions/randomStarts',\n [num_params, num_starting_points], 'f8')\n lower = self.f['/parameters/lowerBound'][:]\n upper = self.f['/parameters/upperBound'][:]\n starting_points[:] = np.transpose(\n np.random.rand(num_starting_points, num_params) * (\n upper - lower) + lower)\n\n if 'nominalValue' in self.parameter_df:\n self.f['/parameters/nominalValues'] = \\\n self.parameter_df.nominalValue[\n self.parameter_df.estimate == 1]", "def tpm3_1_8_start_genomic():\n return \"TPM3\", \"NC_000001.11\", 154191901, 154192135, -1", "def getStartVertex(self):", "def initialize_simulator(self, startTime=None):\n \n # Load the inputs and check if any problem. If any exits.\n # Align inputs while loading.\n if not self.load_input(align = True):\n return False\n \n # Load the outputs and check if any problems. If any exits.\n if not self.load_outputs():\n return False\n \n # Take the time series: the first because now they are all the same (thanks to alignment)\n time = self.inputs[0].get_data_series().index\n \n # Define the initial time for the initialization\n if startTime == None:\n # Start time not specified, start from the beginning\n index = 0\n else:\n \n # Check that the type of start time is of type datetime\n if not isinstance(startTime, datetime.datetime):\n raise TypeError(\"The parameter startTime has to be of datetime.datetime type\")\n \n # Start time specified, start from the closest point\n if (startTime >= time[0]) and (startTime <= time[-1]):\n index = 0\n for t in time:\n if t < startTime:\n index += 1\n else:\n break\n else:\n index = 0\n raise IndexError(\"The value selected as initialization start time is outside the time frame\")\n \n # Once the index is know it can be used to define the start_time\n # If the offset is specified then use it as start time\n start_time = time[index]\n \n # Take all the data series\n Ninputs = len(self.inputs)\n start_input = numpy.zeros((1, Ninputs))\n start_input_1 = numpy.zeros((1, Ninputs))\n start_input_2 = numpy.zeros((1, Ninputs))\n i = 0\n if index == 0:\n for inp in self.inputs:\n dataInput = numpy.matrix(inp.get_data_series().values).reshape(-1,1)\n start_input[0, i] = dataInput[index,0]\n i += 1\n else:\n for inp in self.inputs:\n dataInput = numpy.matrix(inp.get_data_series().values).reshape(-1,1)\n start_input_1[0, i] = dataInput[index-1,0]\n start_input_2[0, i] = dataInput[index,0]\n \n # Linear interpolation between the two values\n dt0 = (time[index] - start_time).total_seconds()\n dT1 = (start_time - time[index-1]).total_seconds()\n DT = (time[index] - time[index-1]).total_seconds()\n \n # Perform the interpolation\n start_input[0, i] = (dt0*start_input_1[0, i] + dT1*start_input_2[0, i])/DT\n \n i += 1\n \n # Initialize the model for the simulation\n self.opts[\"initialize\"] = True\n \n try:\n # Simulate from the initial time to initial time + epsilon\n # thus we have 2 points\n \n # Create the input objects for the simulation that initializes\n input_u = numpy.hstack((start_input, start_input))\n input_u = input_u.reshape(2, -1)\n \n time = pd.DatetimeIndex([start_time, start_time])\n\n # Run the simulation, remember that\n # time has to be a dateteTimeIndex and Input has to be a numpy.matrix\n self.simulate(time=time, input=input_u)\n self.opts[\"initialize\"] = False\n \n # Initialize the selected variables and parameters to the values indicated \n # Done after very small simulation because there can be some internal parameters that defines\n # the initial value and may override the initialization with the indicated values\n # THIS DOESN'T WORK WITH MODELICA CONSTANTS!\n for v in self.variables:\n v.modify_initial_value_in_fmu(self.fmu)\n for p in self.parameters:\n p.modify_initial_value_in_fmu(self.fmu)\n \n return True\n \n except ValueError:\n logger.error(\"First simulation for initialize the model failed\")\n return False", "def genStartGraph(Xs, numnodes, td, fitinfo):\n if fitinfo.startGraph==\"cn_valid\":\n graph = conceptualNetwork(Xs, numnodes, td=td, valid=True, fitinfo=fitinfo)\n elif fitinfo.startGraph==\"pf_valid\":\n graph = pathfinder(Xs, numnodes, valid=True, td=td)\n elif (fitinfo.startGraph==\"rw\" or fitinfo.startGraph==\"nrw\"):\n graph = naiveRandomWalk(Xs,numnodes)\n elif fitinfo.startGraph==\"fully_connected\":\n graph = fullyConnected(numnodes)\n elif fitinfo.startGraph==\"empty_graph\":\n graph = np.zeros((numnodes,numnodes)).astype(int) # useless...\n else:\n graph = np.copy(fitinfo.startGraph) # assume a graph has been passed as a starting point\n return graph", "def _load_source_tick_data(tick_data: Sequence[Dict[str, Any]]):\n\n source_values = array('d')\n source_volumes = array('d')\n source_times = array('d')\n\n tick = tick_data[0]\n\n last_value = tick['C']\n last_volume = tick['BV']\n last_time = tick['T']\n\n source_values.append(last_value)\n source_volumes.append(last_volume)\n source_times.append(last_time)\n\n for tick in tick_data[1:]:\n close_time = tick['T']\n\n while int(close_time - last_time) > config['tick_interval_secs']:\n last_time += config['tick_interval_secs']\n source_values.append(last_value)\n source_volumes.append(0.0)\n source_times.append(last_time)\n\n last_value = tick['C']\n last_volume = tick['BV']\n last_time = tick['T']\n\n source_values.append(last_value)\n source_volumes.append(last_volume)\n source_times.append(last_time)\n\n return (source_values, source_times, source_volumes)", "def loadSeqToSourceMap(self, start_job, input_set):\n try:\n con = self.getSFFDatabaseConnection()\n if start_job:\n db_output=con.cursor().callproc('load_seq_to_source_package.array_insert',\n input_set)\n return True\n except Exception, e:\n print 'Exception caught: %s.\\nThe error is: %s' % (type(e), str(e))\n return False", "def locus_start(self):\n return int(open(self.locus_file).read().split('\\t')[3])", "def get_xmin(self, start, hours, param):\n\n # Process variable\n data = self.get_hour_data(start, param)\n for hour in range(1, hours):\n try:\n data = np.amin([data, self.get_hour_data(start + hour, param)],\n axis=0)\n except ValueError as e:\n continue\n return data", "def _get_start(self):\n return self._start", "def prepare_lines_data(self):\n for l_hd in self.hour_data:\n if not self.node_from or not self.node_to:\n print('ERROR! line %i-%i has no node(s)' % (self.node_from_code, self.node_to_code))\n if l_hd.state and self.node_from.get_node_hour_state(l_hd.hour) \\\n and self.node_to.get_node_hour_state(l_hd.hour):\n if not self.type:\n node_start = self.node_from_code\n node_finish = self.node_to_code\n base_coeff = 0\n k_pu = 0\n else:\n node_start = self.node_to_code\n node_finish = self.node_from_code\n base_coeff = self.node_to.voltage_class / self.node_from.voltage_class\n k_pu = math.sqrt(math.pow(self.kt_re, 2) + math.pow(self.kt_im, 2))\n lag = math.atan(self.kt_im / self.kt_re) if self.kt_re else 0\n\n self.eq_db_lines_data.append((\n l_hd.hour, node_start, node_finish, self.parallel_num, self.type,\n max(self.node_from.voltage_class, self.node_to.voltage_class), base_coeff,\n l_hd.r, l_hd.x, l_hd.g, -l_hd.b, k_pu, lag, -l_hd.b_from, -l_hd.b_to\n ))", "def start(self) -> pdarray:\n return self._starts", "def extract_gti_data(hdu_in):\n data = hdu_in.data\n exposure = hdu_in.header['EXPOSURE']\n tstop = hdu_in.header['TSTOP']\n return (data, exposure, tstop)", "def set_start_time(self, *args, **kwargs):\n return _uhd_swig.usrp_source_sptr_set_start_time(self, *args, **kwargs)", "def read_head(hed_fname, reaches=None):\n with flopy.utils.HeadFile(hed_fname) as b:\n data = b.get_data()\n if reaches is not None:\n reaches[\"head\"] = data[reaches[\"k\"], reaches[\"i\"], reaches[\"j\"]]\n return data", "def test_data_source_soaps_get(self):\n pass", "def get_start_point(self):\n return self.first_point", "def main(hotstart_input, hotstart_bcg, hotstart_output):\r\n fpath_in = hotstart_input\r\n fpath_out = hotstart_output\r\n fpath_bcg = hotstart_bcg\r\n\r\n with nc.Dataset(fpath_in, 'r') as src, \\\r\n nc.Dataset(fpath_bcg, 'r') as bcg, \\\r\n nc.Dataset(fpath_out, \"w\") as dst:\r\n # copy attributes\r\n for name in src.ncattrs():\r\n dst.setncattr(name, src.getncattr(name))\r\n # copy dimensions\r\n print(\"Copy dimensions...\")\r\n for name, dimension in src.dimensions.items():\r\n dst.createDimension(\r\n name,\r\n (len(dimension) if not dimension.isunlimited()\r\n else None))\r\n # Copy variables\r\n print(\"Copy variables...\")\r\n for name, variable in src.variables.items():\r\n print(\"Variable: \", name)\r\n dimensions = variable.dimensions\r\n dst.createVariable(\r\n name, variable.datatype, dimensions)\r\n if name == 'SED3D_bedfrac':\r\n dst.variables[name][:] = bcg.variables[name][:]\r\n else:\r\n dst.variables[name][:] = src.variables[name][:]", "def predict_start():\n data = request.json\n\n if data:\n predictor.pred_dict[\"start_date\"] = data[\"start_date\"]\n else:\n pass\n\n return 'Non tam praeclarum est scire latine, quam turpe nescire'", "def create_start_data(self):\n\t\tdef inputMesh(feature_size):\n\t\t\tc1= np.expand_dims(np.array([0,-0.9]),0)\n\t\t\tc2= np.expand_dims(np.array([-0.9,0.9]),0)\n\t\t\tc3= np.expand_dims(np.array([0.9,0.9]),0)\n\t\t\tx1 = np.expand_dims(np.pad(np.array([0,-0.9]),(0,feature_size-2),'constant',constant_values=(0,0)),0)\n\t\t\tx2 = np.expand_dims(np.pad(np.array([-0.9,0.9]),(0,feature_size-2),'constant',constant_values=(0,0)),0)\n\t\t\tx3 = np.expand_dims(np.pad(np.array([0.9,0.9]),(0,feature_size-2),'constant',constant_values=(0,0)),0)\n\t\t\tedge_index = np.transpose(np.array([[0, 1], [0, 2], [1, 0], [1, 2], [2, 0], [2, 1]])) # COO format\n\t\t\treturn np.concatenate((c1,c2,c3),axis=0), np.concatenate((x1,x2,x3),axis=0),edge_index\n\n\t\tc, x, edge_index = inputMesh(self.params.feature_size)# x is c with zeros appended, x=f ..pixel2mesh\n\t\tdata_list_x = []\n\t\tdata_list_c = []\n\t\tdata_list_pid = []\n\t\tfor i in range(self.params.batch_size):\n\t\t\tdata_list_x.append(Data(x=torch.Tensor(x).type(dtypeF), edge_index=torch.Tensor(edge_index).type(dtypeL)))\n\t\t\tdata_list_c.append(Data(x=torch.Tensor(c).type(dtypeF), edge_index=torch.Tensor(edge_index).type(dtypeL)))\n\t\t\tdata_list_pid.append(Data(x=torch.zeros(c.shape[0],1).type(dtypeL).requires_grad_(False)))\n\t\tbatch_x = Batch.from_data_list(data_list_x)\n\t\tbatch_c = Batch.from_data_list(data_list_c)\n\t\tbatch_pid = Batch.from_data_list(data_list_pid)\n\t\treturn batch_x, batch_c, batch_pid", "def test_load_gtis(self):\n fits_file = os.path.join(self.datadir, 'monol_testA.evt')\n hen.io.load_gtis(fits_file)", "def prep_projected(self):\n\n self.config.logger.info(\"Preparing projected land use data...\")\n\n # set start time\n t0 = time.time()\n\n if self.config.gcamwrapper_df is not None:\n\n self.config.logger.info(f\"Using projected GCAM data from `gcamwrapper` data frame\")\n projected_land_cover_file = proc.format_gcam_data(self.config.gcamwrapper_df,\n f_out='',\n start_year=self.config.start_year,\n through_year=self.config.end_year,\n region_name_field='gcam_region_name',\n region_id_field='gcam_region_id',\n basin_name_field='glu_name',\n basin_id_field='basin_id',\n output_to_csv=False)\n\n elif self.config.gcam_database is not None:\n\n self.config.logger.info(f\"Using projected GCAM data from: {self.config.gcam_database}\")\n projected_land_cover_file = rdr.read_gcam_land(self.config.gcam_database_dir,\n self.config.gcam_database_name,\n self.config.gcam_query, self.d_bsnnm_id,\n self.config.metric, self.config.crop_type)\n\n\n else:\n self.config.logger.info(f\"Using projected GCAM data from: {self.config.projected_lu_file}\")\n projected_land_cover_file = self.config.projected_lu_file\n\n # extract and process data contained from the land allocation GCAM output file\n gcam_data = rdr.read_gcam_file(projected_land_cover_file,\n self.gcam_landclasses,\n start_yr=self.config.start_year,\n end_yr=self.config.end_year,\n timestep=self.config.timestep,\n scenario=self.config.scenario,\n region_dict=self.d_regnm_id,\n agg_level=self.config.agg_level,\n area_factor=self.config.proj_factor,\n metric_seq=self.metric_sequence_list,\n logger=self.config.logger)\n\n # unpack variables\n self.user_years, self.gcam_ludata, self.gcam_aez, self.gcam_landname, self.gcam_regionnumber, self.allreg, \\\n self.allregnumber, self.allregaez, self.allaez, self.metric_id_array, self.sequence_metric_dict = gcam_data\n\n self.config.logger.info('PERFORMANCE: Projected landuse data prepared in {0} seconds'.format(time.time() - t0))", "def __init__(self, data, startLat, startLon, delta, numX, numY):\n self.data = data\n self.startLat = startLat\n self.startLon = startLon\n self.delta = delta\n self.xCells = numX\n self.yCells = numY", "def ReadData(self, tstep):\n fname = self.fname[tstep]\n t0 = self.tind[tstep]\n \n print 'Reading SUNTANS data at time: %s...'%datetime.strftime(self.timei[tstep],'%Y-%m-%d %H:%M:%S') \n nc = Dataset(fname)\n \n self.time = nc.variables['time'][t0]\n \n self.temp = nc.variables['temp'][t0,:,:]\n self.salt = nc.variables['salt'][t0,:,:]\n self.uc = nc.variables['uc'][t0,:,:]\n self.vc = nc.variables['vc'][t0,:,:]\n self.nu_v = nc.variables['nu_v'][t0,:,:]\n self.rho = nc.variables['rho'][t0,:,:]\n self.tau_x = nc.variables['tau_x'][t0,:]\n self.tau_y = nc.variables['tau_y'][t0,:]\n self.eta = nc.variables['eta'][t0,:]", "def addSource(self, data):\n # read input\n self.example_ids.append(data[\"example_id\"])\n self.src_char.append(torch.LongTensor(data['char_id']).contiguous())\n #src: snt_length x n_feature, contiguous means in memory in C order\n self.src.append(torch.LongTensor([data[\"snt_id\"],data[\"lemma_id\"],data[\"pos_id\"],data[\"ner_id\"]]).t().contiguous())\n #source, before preprocessing into tensor, includes labels and tokens\n if \"mwe\" not in data:\n data[\"mwe\"] = 'O' * len(data[\"tok\"])\n self.src_source.append([data[\"tok\"],data[\"lem\"],data[\"pos\"],data[\"ner\"],data[\"mwe\"],data[\"anchors\"]])", "def format_start_time(self, data):\n return data", "def get_first_timepoints(sj):\n print('> Get first timepoints {}'.format(sj))\n sj_parameters = pickle.load(open(jph(pfo_subjects_parameters, sj), 'r'))\n\n study = sj_parameters['study']\n category = sj_parameters['category']\n\n root_subject = jph(root_study_rabbits, 'A_data', study, category, sj)\n pfi_DWI_Eddi_corrected = jph(root_subject, 'z_tmp', 'z_DWI', '{}_DWI_eddy.nii.gz'.format(sj))\n\n im = nib.load(pfi_DWI_Eddi_corrected)\n\n im_new = set_new_data(im, new_data=im.get_data()[..., :S0_timepoints])\n\n pfi_only_S0 = jph(root_output, '{}_DWI_only_S0.nii.gz'.format(sj))\n nib.save(im_new, pfi_only_S0)", "def visualizeTrainData(self, filePrefix):\n # route index\n flightNum = self.routes.index(filePrefix)\n\n # concatenate the buy or wait info to get the total datas\n y_train = self.y_train.reshape((self.y_train.shape[0],1))\n y_train_price = self.y_train_price.reshape((self.y_train_price.shape[0],1))\n\n X_train = np.concatenate((self.X_train, y_train, y_train_price), axis=1)\n\n # choose one route datas\n X_train = X_train[np.where(X_train[:, flightNum]==1)[0], :]\n\n # remove dummy variables\n # feature 0: departure date; feature 1: observed date state\n # feature 2: minimum price; feature 3: maximum price\n # feature 4: current price; feature 5: expected minimum price;\n # feature 6: current price\n X_train = X_train[:, 8:15]\n\n # group by the feature: departure date\n departureDates_train = np.unique(X_train[:, 0])\n\n # get the final datas, the observed data state should be from large to small(i.e. for time series)\n length_test = []\n for departureDate in departureDates_train:\n indexs = np.where(X_train[:, 0]==departureDate)[0]\n datas = X_train[indexs, :]\n length_test.append(len(datas))\n print departureDate\n print datas", "def main(datafilepath):\n #create midline\n sectionsize = 10000\n TrackData = TrackMaker(sectionsize) # 10000\n moving_window = sectionsize*2\n midline = TrackData[0] \n sections = TrackData[2]\n #midline = midline[sections[0]:sections[5],:] #only work with the midline of the trial \n #steergaze_df = pd.read_feather(datafilepath)\n steergaze_df = pd.read_csv(datafilepath, sep=',',header=0)\n #steergaze_df.reset_index()\n master_steergaze = pd.DataFrame()\n datafolder = os.path.split(datafilepath)[0] \n\n #TODO: due to grouping the future path cuts - off at end of slalom, use the continuous trajectory across roadsections for fp mapping\n\n #modes taken from gaze_through_midline_densities.py\n entry = find_closest_index(midline, [-23, 69])\n firstobject = find_closest_index(midline, [25, 52])\n gazemodes = [entry, firstobject]\n\n mid_diff = np.linalg.norm(np.diff(midline, axis=0, prepend = np.array([[0,0]])), axis = 1)\n midline_dist_array = np.cumsum(mid_diff)\n\n tree = spatial.cKDTree(midline)\n\n #for trial in picked_trials:\t\n for block, blockdata in steergaze_df.groupby(['ID','block']):\n\n print(block)\n begin = timer()\n\n\n blockdata = blockdata.copy()\n blockdata.sort_values('currtime', inplace=True)\n # blockdata.reset_index()\n\n ####pick target\n \"\"\"\n condition = blockdata.condition.values[0]\n target_centres = targets.loc[targets['condition']==int(condition),:]\n #pprint(target_centres)\n\n target_centres = target_centres.reset_index(drop=True)\n #pick starting position.\n start_x = np.sign(blockdata['posx']).values[0]\n #select targets with opposite sign for xcentre, these will be the ones encountered in that block\n target_centres = target_centres.loc[np.sign(target_centres['xcentre'])!=start_x,:] \n target_circles = dp.target_position_circles(target_centres)\n\n \"\"\"\n\n traj_x = blockdata['posx'].values\n traj_z = blockdata['posz'].values\n trajectory = np.transpose(np.array([traj_x, traj_z]))\n\n yaw = blockdata['yaw'].values\n \n #gaze_on_screen = blockdata['hangle'].values, blockdata['vangle'].values\n gaze_on_screen = np.transpose(np.array([blockdata['hangle'].values, blockdata['vangle'].values]))\n\n #print(yaw[0])\n #index = i\n #\tviewpoint = blockdata['posx'].values, blockdata['posz'].values\n roadsection = blockdata['roadsection'].values\n\n #find time headway along MIDLINE \n \"\"\"\n start = timer()\n #idx, *_ = find_closest_index(midline, trajectory[0,:])\n idx = [find_closest_index(midline, viewpoint) for viewpoint in trajectory] \n print(idx[:10])\n print(timer()-start)\n \"\"\"\n\n #closest_indexes = [closest_node(midline, viewpoint) for viewpoint in trajectory] \n #closest indexes\n #print(np.take(midline, 5, axis = 0, mode = 'wrap'))\n #print(np.take(midline, len(midline), axis = 0, mode = 'wrap'))\n #print(np.take(midline, 0, axis = 0, mode = 'wrap'))\n _, closest_indexes = tree.query(trajectory) \n\n end_of_view = closest_indexes + moving_window\n\n #futuremid = np.take(midline, range(closest_indexes[0], end_of_view[0]), axis = 0, mode = 'wrap')\n def takemid(c,e):\n return (np.take(midline, range(c, e), axis = 0, mode = 'wrap'))\n\n start = timer()\n ml_idx, ml_screen_refs, ml_world_refs, ml_th = zip(*[\n closest_on_screen_point(takemid(c,e), t, y, g) \n for c, e, t, y, g in zip(closest_indexes, end_of_view, trajectory, yaw, gaze_on_screen)\n ])\n print(timer() - start) \n \n print(ml_screen_refs.shape)\n print(type(ml_screen_refs))\n ml_screen_refs = ml_screen_refs.reshape(-1, 2)\n ml_world_refs = ml_world_refs.reshape(-1, 2)\n print(ml_th)\n\n blockdata['midline_ref_onscreen_x'] = ml_screen_refs[:, 0]\n blockdata['midline_ref_onscreen_z'] = ml_screen_refs[:, 1]\n blockdata['midline_ref_world_x'] = ml_world_refs[:, 0]\n blockdata['midline_ref_world_z'] = ml_world_refs[:, 1]\n blockdata['th_along_midline'] = ml_th\n\n #find closest point on FUTURE PATH, with th calc along the path \n \n traj_index = range(len(trajectory))\n fp_idx, fp_screen_refs, fp_world_refs, fp_th = zip(*[\n closest_on_screen_point(trajectory[i:(i+1000),:], t, y, g) \n for i, t, y, g in zip(traj_index, trajectory, yaw, gaze_on_screen)\n ])\n #future_traj = trajectory[index:(index+window_fp), :]\n #fp_world_ref, fp_idx, dists, fp_angles = closest_on_screen_point(future_traj, viewpoint, yaw, gaze_on_screen)\n print(fp_screen_refs.shape)\n print(type(fp_screen_refs))\n fp_screen_refs = fp_screen_refs.reshape(-1, 2)\n fp_world_refs = fp_world_refs.reshape(-1, 2)\n print(ml_th)\n\n blockdata['futurepath_ref_onscreen_x'] = fp_screen_refs[:, 0]\n blockdata['futurepath_ref_onscreen_z'] = fp_screen_refs[:, 1]\n blockdata['futurepath_ref_world_x'] = fp_world_refs[:, 0]\n blockdata['futurepath_ref_world_z'] = fp_world_refs[:, 1]\n blockdata['th_along_futurepath'] = fp_th\n \n \n\n #TODO: current method runs into problems if the viewpoint is just before the midline resets (i.e. very large midline_dist_array value).\n #but not a problem for current analysis because trial starts from beginning of midline.\n #th_to_entry\n mid_dist_viewpoint = midline_dist_array[idx]\n\n mid_dist_entry = midline_dist_array[gazemodes[0]]\n th_to_entry = (mid_dist_entry - mid_dist_viewpoint) / 8.0 #if it's negative you have passed the point\n blockdata.loc[index,'veh_th_to_entry'] = th_to_entry\n\n #th_to_object\n mid_dist_object = midline_dist_array[gazemodes[1]]\n th_to_object = (mid_dist_object - mid_dist_viewpoint) / 8.0 #if it's negative you have passed the point\n blockdata.loc[index,'veh_th_to_object'] = th_to_object\t\t\n \n \"\"\"\n trialcode = row['trialcode']\n #plot\t\t\t \n #print(\"th_along_midline\", ml_timeheadway)\n #print('ml_ref', ml_world_ref)\n #print(\"th_along_futurepath\", fp_timeheadway)\n #print(\"fp_ref\", fp_world_ref)\n\n world_gaze = dp.angles_to_world(gaze_on_screen, viewpoint, yaw)\n #print(\"world_gaze\", world_gaze)\n\n plt.ylim(angles_limits_bottom[1],angles_limits_top[1])\n plt.xlim(angles_limits_bottom[0],angles_limits_top[0])\n\n plt.plot(ml_angles[:,0],ml_angles[:,1], 'C3o', markersize = .5, )\n plt.plot(fp_angles[:,0],fp_angles[:,1], 'C2o', markersize = .5)\n plt.plot(ml_screen_ref[0],ml_screen_ref[1], 'C1o', markersize = 5, markeredgecolor = 'k')\n plt.plot(fp_screen_ref[0],fp_screen_ref[1], 'C0o', markersize = 5, markeredgecolor = 'k')\n\n plt.plot(gaze_on_screen[0],gaze_on_screen[1], 'mo', markersize = 5, markeredgecolor = 'k')\n plt.title(str(trialcode))\n\n\n plt.pause(.016) \n plt.cla()\n\n plt.show()\n \"\"\"\n\t\t\n #master_steergaze = pd.concat([master_steergaze, blockdata])\n\n\n compute_time = timer()-begin\n print(\"Processing block took %f seconds\" % compute_time)\n\n\n print(\"APPENDING DATA FRAME\")\n outfilepath = datafolder + '/trout_gazeandsteering_addthfrompath2.csv'\n\n with open(outfilepath, 'a', newline = '') as sgfile:\n blockdata.to_csv(sgfile, mode='a', header=sgfile.tell()==0)\n\n #master_steergaze.to_csv(datafolder + '/trout_gazeandsteering_addthfrompath.csv')\n\n #master_steergaze.to_feather(datafilepath)", "def head(self, start: int = 0, end: int = 5):\n pprint(self.data[start:end])", "def test_index_start(self):\n sp_file = os.path.join(\"tests\", \"data\", \"geolife\", \"geolife_staypoints.csv\")\n sp = ti.read_staypoints_csv(sp_file, tz=\"utc\", index_col=\"id\", crs=\"epsg:4326\")\n\n # reproject to WGS_1984_UTM_Zone_49N\n sp = sp.to_crs(\"epsg:32649\")\n\n distance_metric_ls = [\"haversine\", \"euclidean\"]\n agg_level_ls = [\"dataset\", \"user\"]\n for distance_metric in distance_metric_ls:\n for agg_level in agg_level_ls:\n _, locations = sp.as_staypoints.generate_locations(\n method=\"dbscan\", epsilon=10, num_samples=1, distance_metric=distance_metric, agg_level=agg_level\n )\n assert (locations.index == np.arange(len(locations))).any()", "def set_start_time(self, *args, **kwargs):\n return _uhd_swig.usrp_source_set_start_time(self, *args, **kwargs)", "def Lstart(gridname='BLANK', tag='BLANK'):\n print(alp)\n \n # put top level information from input into a dict\n Ldir = dict()\n Ldir['gridname'] = gridname\n Ldir['tag'] = tag\n \n import subprocess\n if os.path.isfile(alp + '/user_get_lo_info.sh'): \n subprocess.call([alp + '/user_get_lo_info.sh'])\n else:\n subprocess.call([alp + '/get_lo_info.sh'])\n Ldir_temp = csv_to_dict(alp + '/lo_info.csv')\n Ldir.update(Ldir_temp)\n\n # and add a few more things\n Ldir['gtag'] = Ldir['gridname'] + '_' + Ldir['tag']\n Ldir['grid'] = Ldir['data'] + 'grids/' + Ldir['gridname'] + '/'\n Ldir['forecast_days'] = 3\n \n return Ldir", "def source_input(env, \r\n number, \r\n counter,\r\n generation,\r\n generation_list_come,\r\n generation_list_wait,\r\n generation_list_begin,\r\n generation_list_finish,\r\n df_simtime,\r\n generation_list_name,\r\n g1_list_name): \r\n# global g1_list_name\r\n for i in range(number):\r\n if i == 0:\r\n t = generation_list_come[i]#到达时间服从指数分布,此处的t为间隔时间\r\n else:\r\n t = generation_list_come[i] - generation_list_come[i-1]\r\n yield env.timeout(t)\r\n serve_time = np.random.choice(df_simtime['sim_time'])#得到模拟数据\r\n # print(serve_time)\r\n c = document(env, \r\n g1_list_name[i], \r\n generation, \r\n counter, \r\n time_in_fac,\r\n generation_list_begin,\r\n generation_list_wait,\r\n generation_list_finish,\r\n serve_time,\r\n generation_list_name)\r\n env.process(c)", "def read_segmentation_gt(gt_file):\n with open(gt_file, 'rt') as f_handle:\n reader = csv.reader(f_handle, delimiter='\\t')\n start_times = []\n end_times = []\n labels = []\n for row in reader:\n if len(row) == 3:\n start_times.append(float(row[0]))\n end_times.append(float(row[1]))\n labels.append((row[2]))\n return np.array(start_times), np.array(end_times), labels", "def getStart(self) -> long:\n ...", "def load_tfile_data(self, path, start=None, stop=None, dss=28):\n filename = path+\".\"+str(int(self.name))\n datafile = open(filename,\"r\")\n labels = datafile.readline().strip().split()\n logger.debug(\"load_tfile_data: labels: %s\", labels)\n datafile.close()\n labels.insert(0,'DOY')\n labels.insert(0,'Year')\n logger.debug(\"load_tfile_data: new labels: %s\", labels)\n \n # colums are: Year DOY UTC Epoch Chan Tsys Int Az El Diode Level CryoTemp\n # i4 i4 S8 f8 S2 f4 f4 f4 f4 i4 f4 f4\n data = numpy.loadtxt(filename,skiprows=1,\n dtype = {'names': tuple(labels),\n 'formats': ('i4','i4','S8','f8',\n 'S2','f4','f4','f4','f4','i4','f4', 'f4')})\n return data", "def dataLoader(stationDict, startDate, endDate):\n\n # Generate a URL\n url = ('https://waterservices.usgs.gov/nwis/dv/?format=json' +\n # Specify the sites to download\n '&sites=' + stationDict['DatasetExternalID'] +\n # Specify the start date\n '&startDT=' + datetime.strftime( startDate, '%Y-%m-%d' ) +\n #Specify the end data\n '&endDT=' + datetime.strftime( endDate, '%Y-%m-%d' ) +\n # Specify that we want streamflow\n '&parameterCd=00060' +\n # Specify that we want daily means\n '&statCd=00003' +\n # Allow all sites\n '&siteStatus=all' )\n \n # Get the data\n response = requests.get(url)\n\n # Check the status code\n if response.status_code != 200:\n return \n else:\n response = response.json()\n \n # Create a dataframe from the data\n df = pd.DataFrame(response['value']['timeSeries'][0]['values'][0]['value'])\n\n # Set the index to the dateTime index\n df.set_index(pd.DatetimeIndex(pd.to_datetime(df['dateTime'])), inplace = True)\n del df['dateTime'] # Delete the redundant column\n\n # Replace missing data with NaN's\n df['value'].replace(to_replace = '-999999', value = np.nan, inplace = True)\n\n # Convert to numeric\n df['value'] = pd.to_numeric(df['value'])\n \n # Remove any duplicate data in the dataset\n df = df[~df.index.duplicated(keep='last')] # Remove duplicates from the dataset\n df = df[~df.index.isnull()]\n\n # Rename the columns\n df.columns = ['USGS | ' + stationDict['DatasetExternalID'] + ' | Flag', 'USGS | ' + stationDict['DatasetExternalID'] + ' | Streamflow | CFS']\n del df['USGS | ' + stationDict['DatasetExternalID'] + ' | Flag']\n\n # Return the data frame\n return df", "def get_start_time(self):\n return min([m.get_start_time() for m in self._mappers])", "def read_start_params(path_or_database):\n database = load_database(**_process_path_or_database(path_or_database))\n optimization_problem = read_last_rows(\n database=database,\n table_name=\"optimization_problem\",\n n_rows=1,\n return_type=\"dict_of_lists\",\n )\n start_params = optimization_problem[\"params\"][0]\n return start_params", "def dat_file(path):\n info = {}\n pattern = re.compile(\"(.+):\\s*(.+)\")\n with open(path) as fid:\n in_header = True\n while in_header:\n line = fid.readline().strip()\n match = pattern.match(line)\n if match:\n key, value = match.groups()\n info[key] = value\n elif line.startswith('=='):\n in_header = False\n else:\n continue\n\n # time axis\n line = fid.readline()\n times = tuple(map(float, line.split()[2:]))\n tstep = (times[1] - times[0]) / 1000\n tstart = times[0] / 1000\n nsamples = len(times)\n time = UTS(tstart, tstep, nsamples)\n\n # data\n n_locs = int(info['Locations'])\n n_times = int(info['Time samples'])\n data = np.fromfile(fid, 'float64', sep=\" \")\n data = data.reshape((n_locs, n_times + 3))\n locs = data[:, :3]\n data = data[:, 3:]\n source = Scalar(\"source\", np.arange(n_locs))\n src = NDVar(data, (source, time), info, 'src')\n\n return src", "def __init__(self):\r\n self.label = \"Create Inflow File From ECMWF Runoff\"\r\n self.description = (\"Creates RAPID NetCDF input of water inflow \" +\r\n \"based on ECMWF runoff results and previously created weight table.\")\r\n self.canRunInBackground = False\r\n #CJB self.header_wt = ['StreamID', 'area_sqm', 'lon_index', 'lat_index', 'npoints']\r\n self.header_wt = ['rivid', 'area_sqm', 'lon_index', 'lat_index', 'npoints']\r\n #SDR added new structure to fit new ecmwf ##.runoff.nc file order\r\n #self.dims_oi = [['lon', 'lat', 'time'], ['longitude', 'latitude', 'time']]\r\n self.dims_oi = [['lon', 'lat', 'time'], ['longitude', 'latitude', 'time'], ['time','lon','lat']] # Line Added/Modified CJB 20190108\r\n #self.vars_oi = [[\"lon\", \"lat\", \"time\", \"RO\"], ['longitude', 'latitude', 'time', 'ro']]\r\n self.vars_oi = [[\"lon\", \"lat\", \"time\", \"RO\"], ['longitude', 'latitude', 'time', 'ro'], [\"time\", \"lon\", \"lat\", \"RO\"]] # Line Added/Modified CJB 20190108\r\n self.length_time = {\"LowRes\": 61, \"Low3HrRes\": 40, \"LowResFull\": 85,\"HighRes\": 125, \"High3HrRes\":3} # *** MJS What is High3HrRes for? Doesn't seem to be used.\r\n #self.length_time = {\"LowResFull\": 85,\"HighRes\": 125}\r\n self.length_time_opt = {\"LowRes-6hr\": 60, \"LowRes-3hr\": 40,\r\n \"LowResFull-3hr-Sub\": 48, \"LowResFull-6hr-Sub\": 36,\r\n \"HighRes-1hr\": 90, \"HighRes-3hr\": 48, \"HighRes-6hr\": 40, # *** MJS HighRes-3hr was changed to 40 before; why?\r\n \"HighRes-3hr-Sub\": 18, \"HighRes-6hr-Sub\": 16}\r\n self.errorMessages = [\"Missing Variable 'time'\",\r\n \"Incorrect dimensions in the input ECMWF runoff file.\",\r\n \"Incorrect variables in the input ECMWF runoff file.\",\r\n \"Incorrect time variable in the input ECMWF runoff file\",\r\n \"Incorrect number of columns in the weight table\",\r\n \"No or incorrect header in the weight table\",\r\n \"Incorrect sequence of rows in the weight table\"]", "def getSrc(self):\n xml = open(self.model, 'r')\n keywd1 = ['RA', 'DEC', 'PointSource']\n ra = []\n dec = []\n nam = []\n sep = []\n target = SkyCoord(ra=self.ra*u.degree, dec=self.dec*u.degree, frame='icrs') \n for line in xml :\n if keywd1[0] in line:\n ra.append( float(line.split('\"')[-2]) )\n if keywd1[1] in line:\n dec.append( float(line.split('\"')[-2]) )\n s = SkyCoord(ra=ra[-1]*u.degree, dec=dec[-1]*u.degree, frame='icrs')\n sep.append(target.separation(s).deg)\n if keywd1[2] in line:\n nam.append( line.split('\"')[3].split()[-1] ) # no '3FGL'\n xml.close()\n\n if self.csys == 'GAL':\n srcPos = SkyCoord(np.array(ra)*u.degree, np.array(dec)*u.degree, frame='icrs')\n ra, dec = srcPos.galactic.l.deg, srcPos.galactic.b.deg\n\n srcs = Table([ra, dec, nam, sep], names=('RA', 'DEC', 'Name', 'Separation'))\n return srcs", "def extract_header(tgt_file):\n with open(tgt_file) as tf:\n h_lines = []\n for t_line in tf:\n s_line = t_line.strip().split()\n if len(s_line) < 2:\n h_lines.append(t_line)\n continue\n try:\n # If we have a timestep, this is not a header line\n int(s_line[0])\n break\n except ValueError:\n h_lines.append(t_line)\n return h_lines", "def prep_data_fn(self, st_train_dt, end_train_dt, st_val_dt, end_val_dt, st_test_dt, end_test_dt):\n df = self.get_prep_data()\n train = df[(df['ft_data_dt'] >= st_train_dt) & (df['ft_data_dt'] <= end_train_dt)]\n val = df[(df['ft_data_dt'] >= st_val_dt) & (df['ft_data_dt'] <= end_val_dt)].sample(frac=0.4, random_state=2021)\n test = df[(df['ft_data_dt'] >= st_test_dt) & (df['ft_data_dt'] <= end_test_dt)]\n print(f'----train----')\n print(train[['ft_data_dt', 'target', 'idd']].groupby(['ft_data_dt', 'target']).agg(['count']))\n print(f'----validation----')\n print(val[['ft_data_dt', 'target', 'idd']].groupby(['ft_data_dt', 'target']).agg(['count']))\n print(f'----test----')\n print(test[['ft_data_dt', 'target', 'idd']].groupby(['ft_data_dt', 'target']).agg(['count']))\n self.set_train(train)\n self.set_validation(val)\n self.set_test(test)\n train_X = train[[c for c in train.columns if c not in ['idd', 'ft_data_dt', 'target']]]\n train_y = train['target']\n val_X = val[[c for c in train.columns if c not in ['idd', 'ft_data_dt', 'target']]]\n val_y = val['target']\n test_X = test[[c for c in train.columns if c not in ['idd', 'ft_data_dt', 'target']]]\n test_y = test['target']\n self.set_train_X(train_X)\n self.set_train_y(train_y)\n self.set_val_X(val_X)\n self.set_val_y(val_y)\n self.set_test_X(test_X)\n self.set_test_y(test_y)", "def load_gdp_distribution_data(data_file):\n header = parse_data_header(data_file)\n with open(data_file, 'r') as data:\n data = np.genfromtxt(data, delimiter=' ', skip_header=6)\n # Set the areas in which there is no data to 0\n data[data == header['NODATA_value']] = 0\n return header, data", "def __display_table(self, start_node):\n\n print(\"\\n\\n------------------------------------Table------------------------------------------\")\n print(\"Vertex\\t\\t|\\tShortest Dist from vertex \"+start_node.getId()+\"\\t|\\tPrevious vertex\")\n \n temp = sorted(self.__node, key=lambda x: x.getShortestDist())\n for node in temp:\n prev_node_id = node.getPrevNode().getId() if node.getPrevNode() != None else '-'\n print( node.getId()+\"\\t\\t|\\t\\t\\t\"+str(node.getShortestDist())+\"\\t\\t|\\t\\t\"+ prev_node_id)", "def __init__(self, data=None):\n if data is not None:\n self.extract_gdelt(data)", "def load_target_grid(self):\n\n # load the target grid name (expected to be in the settings.txt file)\n self.grid_name = (self.st['directory_metadata'][0] +\n self.st[\"target_grid\"][0])\n\n if os.path.exists(self.grid_name):\n\n # open the metadata file\n self.file = netCDF4.Dataset(self.grid_name)\n\n # laod lat/lon\n self.lat = self.file.variables[\"latitude\"][:, :]\n self.lon = self.file.variables[\"longitude\"][:, :]\n\n try:\n\n # Atributos globais para serem lidos no thredds\n self.GRIDTYPE = getattr(self.file, \"GRIDTYPE\")\n self.MAP_PROJ = getattr(self.file, \"MAP_PROJ\")\n self.CEN_LON = getattr(self.file, \"CEN_LON\")\n self.MAP_PROJ_CHAR = getattr(self.file, \"MAP_PROJ_CHAR\")\n self.STAND_LON = getattr(self.file, \"STAND_LON\")\n self.TRUELAT1 = getattr(self.file, \"TRUELAT1\")\n self.TRUELAT2 = getattr(self.file, \"TRUELAT2\")\n self.CEN_LAT = getattr(self.file, \"CEN_LAT\")\n self.DX = getattr(self.file, \"DX\")\n self.DY = getattr(self.file, \"DY\")\n self.MOAD_CEN_LAT = getattr(self.file, \"MOAD_CEN_LAT\")\n\n except ValueError:\n pass\n\n # Close the file\n self.file.close()\n\n else:\n\n l1 = \"WARNING\"\n l2 = \"Target Grid: %s not found\" % self.grid_name\n l3 = \"Can't proceed\"\n l4 = \"Shutting down the program\"\n print(\"\")\n print(int(max([len(l1), len(l2), len(l3), len(l4)]) / 2 -\n len(l1) / 2) * \" \" + l1)\n print(l2)\n print(l3)\n print(l4)\n print(\"\")\n sys.exit()", "def gpt2_1w (station, dmjd,dlat,dlon,hell,it):\n\n# need to find diffpod and difflon\n if (dlon < 0):\n plon = (dlon + 2*np.pi)*180/np.pi;\n else:\n plon = dlon*180/np.pi;\n# transform to polar distance in degrees\n ppod = (-dlat + np.pi/2)*180/np.pi; \n\n# % find the index (line in the grid file) of the nearest point\n# \t % changed for the 1 degree grid (GP)\n ipod = np.floor(ppod+1); \n ilon = np.floor(plon+1);\n \n# normalized (to one) differences, can be positive or negative\n#\t% changed for the 1 degree grid (GP)\n diffpod = (ppod - (ipod - 0.5));\n difflon = (plon - (ilon - 0.5));\n\n\n# change the reference epoch to January 1 2000\n print('Modified Julian Day', dmjd)\n dmjd1 = dmjd-51544.5 \n\n pi2 = 2*np.pi\n pi4 = 4*np.pi\n\n# mean gravity in m/s**2\n gm = 9.80665;\n# molar mass of dry air in kg/mol\n dMtr = 28.965E-3 \n# dMtr = 28.965*10^-3 \n# universal gas constant in J/K/mol\n Rg = 8.3143 \n\n# factors for amplitudes, i.e. whether you want time varying\n if (it==1):\n print('>>>> no refraction time variation ')\n cosfy = 0; coshy = 0; sinfy = 0; sinhy = 0;\n else: \n cosfy = np.cos(pi2*dmjd1/365.25)\n coshy = np.cos(pi4*dmjd1/365.25) \n sinfy = np.sin(pi2*dmjd1/365.25) \n sinhy = np.sin(pi4*dmjd1/365.25) \n cossin = np.matrix([1, cosfy, sinfy, coshy, sinhy])\n# initialization of new vectors\n p = 0; T = 0; dT = 0; Tm = 0; e = 0; ah = 0; aw = 0; la = 0; undu = 0;\n undul = np.zeros(4)\n Ql = np.zeros(4)\n dTl = np.zeros(4)\n Tl = np.zeros(4)\n pl = np.zeros(4)\n ahl = np.zeros(4)\n awl = np.zeros(4)\n lal = np.zeros(4)\n Tml = np.zeros(4)\n el = np.zeros(4)\n#\n pgrid, Tgrid, Qgrid, dTgrid, u, Hs, ahgrid, awgrid, lagrid, Tmgrid = read_4by5(station,dlat,dlon,hell)\n#\n for l in [0,1,2,3]:\n KL = l #silly to have this as a variable like this \n# transforming ellipsoidal height to orthometric height:\n# Hortho = -N + Hell\n undul[l] = u[KL] \n hgt = hell-undul[l] \n# pressure, temperature at the height of the grid\n T0 = Tgrid[KL,0] + Tgrid[KL,1]*cosfy + Tgrid[KL,2]*sinfy + Tgrid[KL,3]*coshy + Tgrid[KL,4]*sinhy;\n tg = float(Tgrid[KL,:] *cossin.T)\n# print(T0,tg)\n\n p0 = pgrid[KL,0] + pgrid[KL,1]*cosfy + pgrid[KL,2]*sinfy + pgrid[KL,3]*coshy + pgrid[KL,4]*sinhy;\n \n# humidity \n Ql[l] = Qgrid[KL,0] + Qgrid[KL,1]*cosfy + Qgrid[KL,2]*sinfy + Qgrid[KL,3]*coshy + Qgrid[KL,4]*sinhy;\n \n# reduction = stationheight - gridheight\n Hs1 = Hs[KL]\n redh = hgt - Hs1;\n\n# lapse rate of the temperature in degree / m\n dTl[l] = dTgrid[KL,0] + dTgrid[KL,1]*cosfy + dTgrid[KL,2]*sinfy + dTgrid[KL,3]*coshy + dTgrid[KL,4]*sinhy;\n \n# temperature reduction to station height\n Tl[l] = T0 + dTl[l]*redh - 273.15;\n\n# virtual temperature\n Tv = T0*(1+0.6077*Ql[l]) \n c = gm*dMtr/(Rg*Tv) \n \n# pressure in hPa\n pl[l] = (p0*np.exp(-c*redh))/100 \n \n# hydrostatic coefficient ah\n ahl[l] = ahgrid[KL,0] + ahgrid[KL,1]*cosfy + ahgrid[KL,2]*sinfy + ahgrid[KL,3]*coshy + ahgrid[KL,4]*sinhy;\n \n# wet coefficient aw\n awl[l] = awgrid[KL,0] + awgrid[KL,1]*cosfy + awgrid[KL,2]*sinfy + awgrid[KL,3]*coshy + awgrid[KL,4]*sinhy;\n\t\t\t\t\t \n# water vapor decrease factor la - added by GP\n lal[l] = lagrid[KL,0] + lagrid[KL,1]*cosfy + lagrid[KL,2]*sinfy + lagrid[KL,3]*coshy + lagrid[KL,4]*sinhy;\n\t\t\t\t\t \n# mean temperature of the water vapor Tm - added by GP\n Tml[l] = Tmgrid[KL,0] + Tmgrid[KL,1]*cosfy + Tmgrid[KL,2]*sinfy + Tmgrid[KL,3]*coshy + Tmgrid[KL,4]*sinhy;\n\t\t\t\t\t \t\t \n# water vapor pressure in hPa - changed by GP\n e0 = Ql[l]*p0/(0.622+0.378*Ql[l])/100; # % on the grid\n aa = (100*pl[l]/p0)\n bb = lal[l]+1\n el[l] = e0*np.power(aa,bb) # % on the station height - (14) Askne and Nordius, 1987\n \n dnpod1 = np.abs(diffpod); # % distance nearer point\n dnpod2 = 1 - dnpod1; # % distance to distant point\n dnlon1 = np.abs(difflon);\n dnlon2 = 1 - dnlon1;\n \n# pressure\n R1 = dnpod2*pl[0]+dnpod1*pl[1];\n R2 = dnpod2*pl[2]+dnpod1*pl[3];\n p = dnlon2*R1+dnlon1*R2;\n \n# temperature\n R1 = dnpod2*Tl[0]+dnpod1*Tl[1];\n R2 = dnpod2*Tl[2]+dnpod1*Tl[3];\n T = dnlon2*R1+dnlon1*R2;\n \n# temperature in degree per km\n R1 = dnpod2*dTl[0]+dnpod1*dTl[1];\n R2 = dnpod2*dTl[2]+dnpod1*dTl[3];\n dT = (dnlon2*R1+dnlon1*R2)*1000;\n \n# water vapor pressure in hPa - changed by GP\n R1 = dnpod2*el[0]+dnpod1*el[1];\n R2 = dnpod2*el[2]+dnpod1*el[3];\n e = dnlon2*R1+dnlon1*R2;\n \n# hydrostatic\n R1 = dnpod2*ahl[0]+dnpod1*ahl[1];\n R2 = dnpod2*ahl[2]+dnpod1*ahl[3];\n ah = dnlon2*R1+dnlon1*R2;\n \n# wet\n R1 = dnpod2*awl[0]+dnpod1*awl[1];\n R2 = dnpod2*awl[2]+dnpod1*awl[3];\n aw = dnlon2*R1+dnlon1*R2;\n \n# undulation\n R1 = dnpod2*undul[0]+dnpod1*undul[1];\n R2 = dnpod2*undul[2]+dnpod1*undul[3];\n undu = dnlon2*R1+dnlon1*R2;\n\n# water vapor decrease factor la - added by GP\n R1 = dnpod2*lal[0]+dnpod1*lal[1];\n R2 = dnpod2*lal[2]+dnpod1*lal[3];\n la = dnlon2*R1+dnlon1*R2;\n\t\t\n# mean temperature of the water vapor Tm - added by GP\n R1 = dnpod2*Tml[0]+dnpod1*Tml[1];\n R2 = dnpod2*Tml[2]+dnpod1*Tml[3];\n Tm = dnlon2*R1+dnlon1*R2; \n\n return p, T, dT,Tm,e,ah,aw,la,undu", "def readHeaderXTR(self, headerFile: str) -> None:\n with open(headerFile, \"r\") as f:\n lines = f.readlines()\n sectionLines = {}\n # let's get data\n for line in lines:\n line = line.strip()\n line = line.replace(\"'\", \" \")\n # continue if line is empty\n if line == \"\":\n continue\n if \"[\" in line:\n sec = line[1:-1]\n sectionLines[sec] = []\n else:\n sectionLines[sec].append(line)\n # the base class is built around a set of headers based on ATS headers\n # though this is a bit more work here, it saves lots of code repetition\n headers = {}\n # recording information (start_time, start_date, stop_time, stop_date, ats_data_file)\n fileLine = sectionLines[\"FILE\"][0]\n fileSplit = fileLine.split()\n headers[\"sample_freq\"] = np.absolute(float(fileSplit[-1]))\n timeLine = sectionLines[\"FILE\"][2]\n timeSplit = timeLine.split()\n # these are the unix time stamps\n startDate = float(timeSplit[1] + \".\" + timeSplit[2])\n datetimeStart = datetime.utcfromtimestamp(startDate)\n stopDate = float(timeSplit[3] + \".\" + timeSplit[4])\n datetimeStop = datetime.utcfromtimestamp(stopDate)\n headers[\"start_date\"] = datetimeStart.strftime(\"%Y-%m-%d\")\n headers[\"start_time\"] = datetimeStart.strftime(\"%H:%M:%S.%f\")\n headers[\"stop_date\"] = datetimeStop.strftime(\"%Y-%m-%d\")\n headers[\"stop_time\"] = datetimeStop.strftime(\"%H:%M:%S.%f\")\n # here calculate number of samples\n deltaSeconds = (datetimeStop - datetimeStart).total_seconds()\n # calculate number of samples - have to add one because the time given in SPAM recording is the actual time of the last sample\n numSamples = int(deltaSeconds * headers[\"sample_freq\"]) + 1\n # put these in headers for ease of future calculations in merge headers\n headers[\"num_samples\"] = numSamples\n # spam datasets only have the one data file for all channels\n headers[\"ats_data_file\"] = fileSplit[1]\n # data information (meas_channels, sample_freq)\n chanLine = sectionLines[\"CHANNAME\"][0]\n # this gets reformatted to an int later\n headers[\"meas_channels\"] = chanLine.split()[1]\n numChansInt = int(headers[\"meas_channels\"])\n # deal with the channel headers\n chanHeaders = []\n for iChan in range(0, numChansInt):\n chanH = self.chanDefaults()\n # set the sample frequency from the main headers\n chanH[\"sample_freq\"] = headers[\"sample_freq\"]\n # line data - read through the data in the correct channel order\n chanLine = sectionLines[\"CHANNAME\"][iChan + 1]\n chanSplit = chanLine.split()\n dataLine = sectionLines[\"DATA\"][iChan + 1]\n dataSplit = dataLine.split()\n # channel input information (gain_stage1, gain_stage2, hchopper, echopper)\n chanH[\"gain_stage1\"] = 1\n chanH[\"gain_stage2\"] = 1\n # channel output information (sensor_type, channel_type, ts_lsb, pos_x1, pos_x2, pos_y1, pos_y2, pos_z1, pos_z2, sensor_sernum)\n chanH[\"ats_data_file\"] = fileSplit[1]\n chanH[\"num_samples\"] = numSamples\n\n # channel information\n # spams often use Bx, By - use H within the software as a whole\n chanH[\"channel_type\"] = consistentChans(chanSplit[2])\n # the sensor number is a bit of a hack - want MFSXXe or something - add MFS in front of the sensor number - this is liable to break\n # at the same time, set the chopper\n calLine = sectionLines[\"200{}003\".format(iChan + 1)][0]\n calSplit = calLine.split()\n if isMagnetic(chanH[\"channel_type\"]):\n chanH[\"sensor_sernum\"] = calSplit[\n 2\n ] # the last three digits is the serial number\n sensorType = calSplit[1].split(\"_\")[1][-2:]\n chanH[\"sensor_type\"] = \"MFS{:02d}\".format(int(sensorType))\n if \"LF\" in calSplit[1]:\n chanH[\"hchopper\"] = 1\n else:\n chanH[\"sensor_type\"] = \"ELC00\"\n if \"LF\" in calLine:\n chanH[\"echopper\"] = 1\n\n # data is raw voltage of sensors\n # both E and H fields need polarity reversal (from email with Reinhard)\n # get scaling from headers\n scaling = float(dataSplit[-2])\n if isElectric(chanH[\"channel_type\"]):\n # the factor of 1000 is not entirely clear\n lsb = 1000.0 * scaling\n # volts to millivolts and a minus to switch polarity giving data in mV\n lsb = -1000.0 * lsb\n else:\n # volts to millivolts and a minus to switch polarity giving data in mV\n # scaling in header file is ignored because it duplicates static gain correction in calibration\n lsb = -1000.0\n chanH[\"ts_lsb\"] = lsb\n\n # the distances\n if chanSplit[2] == \"Ex\":\n chanH[\"pos_x1\"] = float(dataSplit[4]) / 2\n chanH[\"pos_x2\"] = chanH[\"pos_x1\"]\n if chanSplit[2] == \"Ey\":\n chanH[\"pos_y1\"] = float(dataSplit[4]) / 2\n chanH[\"pos_y2\"] = chanH[\"pos_y1\"]\n if chanSplit[2] == \"Ez\":\n chanH[\"pos_z1\"] = float(dataSplit[4]) / 2\n chanH[\"pos_z2\"] = chanH[\"pos_z1\"]\n\n # append chanHeaders to the list\n chanHeaders.append(chanH)\n\n # check information from raw file headers\n self.headersFromRawFile(headers[\"ats_data_file\"], headers)\n # return the headers and chanHeaders from this file\n return headers, chanHeaders", "def getDataBefore(self, data: ghidra.program.model.listing.Data) -> ghidra.program.model.listing.Data:\n ...", "def parse_log_start_time(log_data):\n try:\n # Get the log starting time\n time_match = search(\n r\"Log Started at (\\w+, \\w+ \\d{2}, \\d{4} \\d{2}:\\d{2}:\\d{2})\",\n log_data)\n log_start_time = datetime.strptime(\n time_match.group(1), \"%A, %B %d, %Y %H:%M:%S\")\n\n # Get the timezone of the log\n timezone_match = search(\n r\"<\\d{2}:\\d{2}> \\w+ \\w+: [(]g_timezone,([^)]*)[)]\", log_data)\n timezone_info = timezone(timedelta(hours=int(timezone_match.group(1))))\n\n return log_start_time.replace(tzinfo=timezone_info)\n except Exception:\n print(\"Something is wrong with the log file!\")", "def preprocess(self,\n HUC8, \n state, \n start, \n end,\n drainmax = 400, \n extra_outlets = None,\n overwrite = False, \n verbose = True, \n vverbose = False, \n parallel = True, \n extract = True, \n delineate = True,\n landuse = True, \n landstats = True, \n build = True, \n climate = True, \n gagedata = True,\n subbasinplots = False, \n watershedplots = True, \n landplots = True,\n landpercents = False, \n flowplots = True, \n metstatplots = True,\n metgageplots = True,\n ):\n\n # check the network is mounted on Unix-like systems\n\n if os.name != 'nt':\n\n if not os.path.ismount(self.network):\n print('\\nerror: network ' +\n '{} does not seem to be mounted\\n'.format(self.network))\n raise\n\n # keep track of how long it takes\n\n go = time.time()\n\n # if the destination folder does not exist, make it\n\n if not os.path.isdir(self.output): os.mkdir(self.output)\n\n # if the destination folder for the HUC8 does not exist, make it\n\n its = self.output, HUC8\n output = '{}/{}'.format(*its)\n if not os.path.isdir(output): os.mkdir(output)\n\n # make a subdirectory for hydrography data\n\n self.hydrography = '{}/{}/hydrography'.format(*its)\n if not os.path.isdir(self.hydrography): os.mkdir(self.hydrography)\n\n # make a directory for HSPF calculations\n\n hspfdirectory = '{}/{}/hspf'.format(*its)\n if not os.path.isdir(hspfdirectory): os.mkdir(hspfdirectory)\n\n # make a list of all the years for the CDL extraction\n\n years = [start.year]\n \n t = start\n while t < end:\n if t.year not in years: years.append(t.year)\n t += datetime.timedelta(days = 1)\n\n # extract the data for the HUC8 from the sources\n\n if extract: self.extract(HUC8, start, end)\n\n # delineate the subbasins and the hydrography data\n\n if delineate: self.delineate(HUC8)\n\n # download and extract land use data\n\n if landuse: self.extract_CDL(HUC8, state, years)\n\n # build the watershed object\n\n if build: self.build(HUC8, start, end, years)\n\n # download and extract the climate data\n\n if climate: self.climate(HUC8, start, end)\n\n if verbose: \n\n print('completed preprocessing watershed in ' +\n '{:.1f} seconds\\n'.format((time.time() - go)))", "def init(self, start):\r\n\t\tself.start = start\r\n\t\tself.time = 0\r\n\t\tself.t = []\r\n\t\tself.ch = []", "def load_hgn(params, device, dtype):\n # Define networks\n encoder = EncoderNet(seq_len=params[\"optimization\"][\"input_frames\"],\n in_channels=params[\"dataset\"][\"rollout\"][\"n_channels\"],\n **params[\"networks\"][\"encoder\"],\n dtype=dtype).to(device)\n transformer = TransformerNet(\n in_channels=params[\"networks\"][\"encoder\"][\"out_channels\"],\n **params[\"networks\"][\"transformer\"],\n dtype=dtype).to(device)\n hnn = HamiltonianNet(**params[\"networks\"][\"hamiltonian\"],\n dtype=dtype).to(device)\n decoder = DecoderNet(\n in_channels=params[\"networks\"][\"transformer\"][\"out_channels\"],\n out_channels=params[\"dataset\"][\"rollout\"][\"n_channels\"],\n **params[\"networks\"][\"decoder\"],\n dtype=dtype).to(device)\n\n # Define HGN integrator\n integrator = Integrator(delta_t=params[\"dataset\"][\"rollout\"][\"delta_time\"],\n method=params[\"integrator\"][\"method\"])\n \n # Instantiate Hamiltonian Generative Network\n hgn = HGN(encoder=encoder,\n transformer=transformer,\n hnn=hnn,\n decoder=decoder,\n integrator=integrator,\n device=device,\n dtype=dtype,\n seq_len=params[\"dataset\"][\"rollout\"][\"seq_length\"],\n channels=params[\"dataset\"][\"rollout\"][\"n_channels\"])\n return hgn", "def initialize(self):\n\t\ttable = NetworkTables.getTable('SmartDashboard')\n\t\tinitial_pos = table.getData('initial_pos')\n\t\t#XXX Doing this will start the path following immediately. May want this\n\t\t# in the execute or something\n\t\tpath = self.dt.which_path(initial_pos)", "def test_parse_hgts_jane4(self):\n with open(self.jane4_output_hgt_fp, 'r') as f:\n output = parse_hgts(f, 'jane4')\n self.assertEqual(int(output), 1)", "def get_data_source(self, *args, **kwargs):\n if kwargs.get('exp',None) is not self.exp:\n self.xtc_dir = None\n\n self.set_exp_defaults(**kwargs)\n# if self.get_kwarg('camrecord'):\n# host = os.uname()[1]\n# xtc_dir = \"/reg/d/camera/{:}/\"\n\n if self._kwargs.get('iocrc'):\n if self._kwargs.get('iocrc') in 'local':\n host = os.uname()[1]\n else:\n host = self._kwargs.get('iocrc')\n \n self.xtc_dir = \"/reg/d/cameras/{:}/daq/xtc\".format(host)\n\n if self.exp.startswith('dia'):\n folder = 'dia'\n else:\n folder = self.instrument\n \n if self.xtc_dir:\n default_dir = True\n else:\n self.xtc_dir = \"/reg/d/psdm/{:}/{:}/xtc\".format(folder, self.exp)\n default_dir = True\n \n if not self.h5_dir:\n self.h5_dir = \"/reg/d/psdm/{:}/{:}/hdf5\".format(folder, self.exp)\n\n if self.live:\n data_source = psutils.live_source(monshmserver=self.monshmserver)\n \n else:\n# if len(self.runs) == 0:\n# self.exp = _default_exp['exp']\n# self.run = _default_exp['run']\n# self.instrument = self.exp[0:3]\n## self.runs = experiment_info.experiment_runs(self.instrument.upper(),self.exp)\n# print 'No runs taken yet for ',self.exp\n# print 'Using default experiment {exp} and run {run}'.format(\n# exp=self.exp,run=self.run)\n\n if len(self.runs) > 0 and self.run > len(self.runs):\n print 'Run number {:} too large'.format(self.run)\n print 'Looking to load last run from experiment {:}'.format(self.exp)\n self.run = -1\n \n if len(self.runs) > 0 and self.run <= 0:\n while -self.run < len(self.runs)-1 and \\\n len(self.runs[-1+self.run].get('xtc_files',[])) == 0:\n self.run -= 1\n \n if self.run:\n self.run = self.runs[self.run]['num']\n else:\n self.run = 0\n\n if len(self.runs[-1+self.run].get('xtc_files',[])) == 0:\n data_source = None\n self._kwargs['noload'] = True\n \n if self.run <= 0:\n data_source = None\n self._kwargs['noload'] = True\n else:\n try:\n self.exper_id = self.runs[self.run-1]['exper_id']\n data_source = \"exp={exp}:run={run}\".format(exp=self.exp,run=self.run)\n if self.ffb:\n data_source += \":one-stream\"\n # data_source += \":live\"\n self.xtc_dir = \"/reg/d/ffb/{instrument}/{exp}/xtc\".format(\n instrument=self.instrument,exp=self.exp)\n elif self.h5:\n data_source += \":h5\"\n elif self.indexed:\n if self.idx:\n data_source += \":idx\"\n self.smd = False\n else:\n data_source += \":smd\"\n\n if self.xtc_dir and not default_dir and not self.h5:\n data_source += \":dir={:}\".format(self.xtc_dir)\n except:\n data_source = None\n self._kwargs['noload'] = True\n print 'No data source'\n\n return data_source", "def _init_net_delay_data(self):\n if self._net_delay_raw_data is None:\n return\n\n json_data = json_util.load_content(self._net_delay_raw_data)\n for row in json_data:\n app_id = int(row['app'])\n src_node_id = int(row['src_node'])\n dst_node_id = int(row['dst_node'])\n net_delay = float(row['net_delay'])\n self._net_delay_data[app_id][src_node_id][dst_node_id].append(net_delay)", "def getFirstData(self) -> ghidra.program.model.listing.Data:\n ...", "def _init_special_vars(self, T_start=None, T_end=None):\n self.min_energy = np.min(self.event_list_T[1][T_start:T_end])\n self.max_energy = np.max(self.event_list_T[1][T_start:T_end])\n self.min_time = np.min(self.event_list_T[0][T_start:T_end])\n self.max_time = np.max(self.event_list_T[0][T_start:T_end])", "def original_start(self):\n if \"originalStart\" in self._prop_dict:\n return datetime.strptime(self._prop_dict[\"originalStart\"].replace(\"Z\", \"\"), \"%Y-%m-%dT%H:%M:%S.%f\")\n else:\n return None", "def get_starter_header_info(authpt, hks, session: Session):\n resp = session.post(const.POST_STARTER_HEADER_INFO.format(authpt, hks))\n json_resp = json.loads(resp.content)\n\n return json_resp", "def _WriteTaskStart(self, task_start):\n if self.storage_type != definitions.STORAGE_TYPE_TASK:\n raise IOError('Task start not supported by storage type.')\n\n stream_name = 'task_start.{0:06d}'.format(self._last_task)\n if self._HasStream(stream_name):\n raise IOError('Task start: {0:06d} already exists.'.format(\n self._last_task))\n\n task_start_data = self._SerializeAttributeContainer(task_start)\n\n data_stream = _SerializedDataStream(\n self._zipfile, self._temporary_path, stream_name)\n data_stream.WriteInitialize()\n data_stream.WriteEntry(task_start_data)\n data_stream.WriteFinalize()" ]
[ "0.55448586", "0.54904854", "0.5319753", "0.5157752", "0.515219", "0.5077915", "0.5074858", "0.50618047", "0.49980226", "0.4995144", "0.4934863", "0.4929119", "0.49235275", "0.49122941", "0.49056458", "0.489825", "0.48901126", "0.4874309", "0.4871687", "0.48503634", "0.4832765", "0.4831706", "0.48259488", "0.4811187", "0.48023063", "0.4792749", "0.4784197", "0.4780106", "0.47731048", "0.47674128", "0.47636193", "0.47557616", "0.4755427", "0.47500208", "0.47468758", "0.47423694", "0.47423196", "0.47258413", "0.47244513", "0.47212473", "0.47211844", "0.47206584", "0.471794", "0.47152144", "0.47143376", "0.47114986", "0.47075626", "0.47039425", "0.4703408", "0.46998474", "0.46970278", "0.46912938", "0.46894494", "0.4680529", "0.46767017", "0.4676464", "0.46758375", "0.46737695", "0.4666852", "0.465936", "0.4656347", "0.46456608", "0.46452415", "0.46434253", "0.46418586", "0.46330434", "0.46296296", "0.461736", "0.4603553", "0.4596296", "0.45927924", "0.45923325", "0.45873654", "0.45869476", "0.4586345", "0.45861843", "0.45841473", "0.4581325", "0.4573593", "0.45720574", "0.45713016", "0.45700127", "0.45631903", "0.45622575", "0.45579806", "0.45573497", "0.455262", "0.45525458", "0.45461634", "0.4542108", "0.454138", "0.45389742", "0.4523609", "0.45235544", "0.45157367", "0.45124248", "0.45086893", "0.4507619", "0.4502984", "0.44955394" ]
0.580808
0
returns the topological height at (x, y)
def getHeight(self, x, y): if x > self.maxX or y > self.maxY or x < 0 or y < 0: return 10000000 # effectively infinity return self.data[y][x][0]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def height(self):\n return self.i_node.distance(self.n_node)", "def _height1(self): #works but n^2 time\n return max(self.depth(p) for p in self.positions() if self.is_leaf(p))", "def height(self):\n return self.upper_right.y - self.lower_left.y", "def _height1(self): # works, but O(n^2) worst-case time\n return max(self.depth(p) for p in self.positions() if self.is_leaf(p))", "def height(self):\n return self.y.max() - self.y.min()", "def _height(self, node):\n if node.value is not None and not node.nodes:\n return node.height\n elif node.value is not None and node.nodes:\n return max(flatten([node.height, ] +\n map(self._height, node.nodes.values())))\n elif node.value is None and node.nodes:\n return max(flatten(map(self._height, node.nodes.values())))\n else:\n raise Exception('should never have gotten here')", "def get_height(self):\n def _get_height(node, height=None):\n if not height:\n height = self._get_level(node) + 1\n if node.left:\n height = _get_height(node.left, height+1)\n if node.right:\n height = max(height, _get_height(node.right, height+1))\n if not node.left and not node.right:\n height = self._get_level(node)\n return height\n return _get_height(self.root)", "def _height1(self): # works, but O(n^2) worst-case time\n return max(self.depth(p) for p in self.positions() if self.is_leaf(p))", "def height(self, p=None):\n if p is None:\n p = self.root()\n return self._height2(p) # start _height2 recursion", "def height(self):\n yy = self.yy\n return max(yy) - min(yy)", "def height(self, p=None):\n\n if p is None:\n p = self.root()\n return self._height2(p) # start height2 recursion", "def height(self, p=None):\n if p is None:\n p = self.root()\n return self._height2(p) # start _height2 recursion", "def height(self, p = None):\n if p is None:\n p = self.root()\n return self._height2(p) # start _height2 recursion", "def height(node):\n\n if node is None:\n return 0\n\n left_height = height(node.left)\n right_height = height(node.right)\n\n return max(left_height, right_height) + 1", "def height(t):\n if t.is_empty:\n return 0\n else:\n left = height(t.left)\n right = height(t.right)\n \n return 1 + max([left, right])", "def height(self):\n return abs(self.end[1] - self.start[1])", "def height(self):\n return self.maxy - self.miny", "def height(node): \n if node is None:\n return -1\n \n # select the top two heights:\n max_height_1, max_height_2 = -1, -1\n for child in node.children:\n h = height(child) + 1\n if h > max_height_1:\n max_height_1, max_height_2 = h, max_height_1\n elif h > max_height_2:\n max_height_2 = h\n \n self.diameter = max(self.diameter, max_height_1 + max_height_2 + 2)\n \n return max_height_1", "def node_y_dimensionality(self) -> int:\n return int(self.graph_tuple_stats.node_y_dimensionality or 0)", "def _height1(self, p):\n return max(self.depth(p) for p in self.positions() if self.is_leaf(p))", "def get_height_tree(self):\n layers = self.breadth_first_traversal()\n \n if all(node is None for node in layers[-1]):\n del layers[-1]\n \n height = len(layers) - 1\n return height", "def height(self):\n if self.is_empty():\n return 0\n elif self.is_leaf():\n return 0\n else:\n if self.has_left():\n if self.has_right():\n return 1+max(self.get_left().height(), self.get_right().height())\n else:\n return 1+self.get_left().height()\n else:\n return 1+self.get_right().height()", "def get_height_iterative(self):\n max_so_far = 0\n nodes_queue = deque()\n nodes_queue.append((self.root, 0))\n while nodes_queue:\n node, depth = nodes_queue.popleft()\n max_so_far = max(max_so_far, depth)\n if node.left:\n nodes_queue.append((node.left, depth + 1))\n if node.right:\n nodes_queue.append((node.right, depth + 1))\n return max_so_far", "def height(self, p=None):\n if p is None:\n p = self.root()\n return self._height2(p)", "def height(self, p=None):\n if p is None:\n p = self.root()\n return self._height2(p)", "def height(poly):\n num = len(poly) - 1\n if abs(poly[num][2] - poly[0][2]) > abs(poly[1][2] - poly[0][2]):\n return dist(poly[num], poly[0])\n elif abs(poly[num][2] - poly[0][2]) < abs(poly[1][2] - poly[0][2]):\n return dist(poly[1], poly[0])\n else:\n return min(dist(poly[num], poly[0]), dist(poly[1], poly[0]))", "def _height(node):\n\n if not node:\n return 0\n\n return 1 + max(_height(node.left), _height(node.right))", "def get_height(self, treenode=self):\n\t\treturn self.__get_height(treenode)", "def _height2(self, p):\n if self.is_leaf(p):\n return 0\n else:\n return 1 + max(self._height2(c) for c in self.children(p))", "def _height2(self, p):\n if self.is_leaf(p):\n return 0\n else:\n return 1 + max(self._height2(c) for c in self.children(p))", "def _height2(self, p):\n if self.is_leaf(p):\n return 0\n else:\n return 1 + max(self._height2(c) for c in self.children(p))", "def _height2(self, p):\n if self.is leaf(p):\n return 0\n else:\n return 1 + max(self._height2(c) for c in self.children(p))", "def height(self, n=None):\n if n is None:\n n = self.root()\n return self._height2(n) # start height 2 recursion", "def height(self):\n return 1 + max(self.left.height, self.right.height)", "def height(self) -> int:\n return self._obj[self.y_dim].size", "def height(self):\n # TODO: Check if left child has a value and if so calculate its height\n left_height = ... if self.left is not None else -1\n # TODO: Check if right child has a value and if so calculate its height\n right_height = ... if self.right is not None else -1\n # Return one more than the greater of the left height and right height\n return 1 + max(left_height, right_height)", "def height(self):\n # Check if root node has a value and if so calculate its height\n return self.root.height() if self.root is not None else -1", "def height(self):\n self._updateExtents()\n return self._mHeight", "def height(self) -> int:", "def height(self) -> int:", "def height(self) -> int:", "def _height2(self, p): # time is linear in size of subtree\n if self.is_leaf(p):\n return 0\n else:\n return 1 + max(self._height2(c) for c in self.children(p))", "def geoidHeight(self):\n return self._geoidhgt", "def _height2(self, p): # time is linear in size of subtree\n if self.is_leaf(p):\n return 0\n else:\n return 1 + max(self._height2(c) for c in self.children(p))", "def height_at(self, x, z):\n\n return self.heightmap[x * 16 + z]", "def height(T):\r\n if T.isLeaf:\r\n return 0\r\n return 1 + height(T.child[0])", "def get_height(self):\n return self.calc_height(self.root)", "def height(self):\n left_height = self.left.height() if self.left else 0\n right_height = self.right.height() if self.right else 0\n return 1 + max([left_height, right_height])", "def _height2(self, n):\n if self.is_leaf(n):\n return 0\n else:\n return 1 + max(self._height2(c) for c in self.children(n))", "def height(self, x):\n\t\treturn np.interp(x, self.x, self.z)", "def calculate_height(self):\n return self.endY - self.startY", "def height(self, p):\n if self._heights is None:\n return None\n return self._heights[p.index()]", "def height(self) -> int:\n if self.root is None:\n return -1\n\n return self.height_helper(self.root)", "def graph_y_dimensionality(self) -> int:\n return int(self.graph_tuple_stats.graph_y_dimensionality or 0)", "def height(node):\r\n \r\n height = 0\r\n temp = node\r\n while temp != None:\r\n temp = temp.parent\r\n height += 1\r\n return height", "def height(self) -> int:\n # binary search tree == empty\n if self.root is None:\n return -1\n\n #count number\n return self.height_helper(self.root)", "def height(self):\n return self.get_delta_value(self.Y_INDEX)", "def height (self):\n return self._h", "def get_height(self):\n if self.root is None:\n return 0\n else:\n return self._get_height(self.root) # Start at the root", "def _height(self,p):\n if self.is_leap():\n return 0\n else:\n return 1 + max(self._height(c) for c in self.children(p))", "def get_dimension_height(self):\n pass", "def height(root:Node) -> int:\n current = root.left\n depth = 0\n maxdepth = [0]\n #track the value and whether it has a branchpoint or not (bool)\n seen = dict()\n\n #do the left side first, then the right\n\n while current is not None:\n if current.val not in seen:\n if (current.left is not None) and (current.right is not None):\n seen.update({current.val:True})\n else:\n seen.update({current.val:False})\n depth +=1\n maxdepth.append(depth)\n if current.left is not None:\n current = current.left\n elif current.right is not None:\n current = current.right\n else:\n current = None\n\n print(' maxdepth left so far is {}'.format(maxdepth))\n\n current = root.right\n depth = 0\n\n while current is not None:\n if current.val not in seen:\n if (current.left is not None) and (current.right is not None):\n seen.update({current.val: True})\n else:\n seen.update({current.val: False})\n depth +=1\n maxdepth.append(depth)\n if current.right is not None:\n current = current.right\n elif current.left is not None:\n current = current.left\n else:\n current = None\n print(' maxdepth right so far is {}'.format(maxdepth))\n\n return max(maxdepth)", "def edge_dxy(self):\r\n loc = self.loc\r\n rect = loc.coord\r\n p1 = rect[0]\r\n p2 = rect[1]\r\n edx = p2[0] - p1[0] # Find edge direction\r\n edy = p2[1] - p1[1]\r\n return edx, edy", "def height(self):\n return self.__size[1]", "def get_height_of_surface_gate(data, setup={}):\n idx = get_index_of_surface_gate(data, setup)\n nt = range(len(idx))\n return data['alt'][nt, idx]", "def get_height(self):\n leftheight = -1\n if (self._leftchild):\n leftheight = self._leftchild.get_height()\n rightheight = -1\n if (self._rightchild):\n rightheight = self._rightchild.get_height()\n return 1 + max(leftheight, rightheight)", "def height(self):\n if self.children == []:\n return 1 \n else:\n arr = []\n for child in self.children:\n result = 1 + child.height()\n arr.append(result)\n return max(arr)", "def get_height(self, p1: Point3D, p2: Point3D, p3: Point3D) -> float:\n return self._heightmap[self._get_heightmap_key(p1,p2,p3)]", "def height(self):\n try:\n return max(elem.height for elem in self[1:])+1\n except ValueError:\n return 0", "def getHeight(*args):", "def getHeight(*args):", "def getHeight(*args):", "def getHeight(*args):", "def getHeight(*args):", "def getHeight(*args):", "def getHeight(*args):", "def getHeight(*args):", "def getHeight(*args):", "def getHeight(*args):", "def getHeight(*args):", "def getHeight(*args):", "def getHeight(*args):", "def getHeight(*args):", "def getHeight(*args):", "def get_height(self):\r\n return self.state['h']", "def get_height(self, node):\n if node is None or self.result is False:\n return 0\n left_height = self.get_height(node.left)\n right_height = self.get_height(node.right)\n \n if abs(left_height-right_height) > 1:\n self.result = False\n return max(left_height, right_height) + 1", "def get_terrain_height(x, prev_height, max_height, next_tank_pos, tank_size, next_feature_pos):\n dist_x = next_feature_pos[0] - x\n dist_y = next_feature_pos[1] - prev_height\n if dist_x != 0:\n height_diff = dist_y / dist_x\n # if the feature contains a tank, we need to get there faster, because there will be the flat spot for the tank\n if next_tank_pos < next_feature_pos[0]:\n height_diff *= 2\n else:\n height_diff = dist_y\n next_height = math.ceil(prev_height + height_diff)\n return random.randrange(max(next_height - NOISE_SIZE, 1), min(next_height + NOISE_SIZE, max_height))", "def get_height_iterative_augmentNode(self):\n max_so_far = 0\n nodes_queue = deque()\n self.root.depth = 0\n nodes_queue.append(self.root)\n while nodes_queue:\n curr = nodes_queue.popleft()\n max_so_far = max(max_so_far, curr.depth)\n for node in [curr.left, curr.right]:\n if node is None:\n continue\n node.depth = curr.depth + 1\n nodes_queue.append(node)\n return max_so_far", "def get_height(self, current_node: Node = None, current_height: int = 0):\n current_node = current_node if current_node else self.root\n left_height = (self.get_height(current_node.left, current_height + 1) if current_node.left\n else current_height)\n right_height = (self.get_height(current_node.right, current_height + 1)\n if current_node.right else current_height)\n return max(left_height, right_height)", "def get_dimensions(self):\n x = max(self.bodies, key=lambda p: p.position[0]).position[0]\n y = max(self.bodies, key=lambda p: p.position[1]).position[1]\n return max(x, y) * 1.2", "def _get_height(self, node):\n # Base Case.\n if node is None:\n return 0 \n\n # Recursion.\n left = self._get_height(node.left)\n right = self._get_height(node.right)\n\n # Count the height of the tree.\n if left > right: \n return left + 1\n\n else: \n return right + 1", "def height(self, obj):\n if (obj.__class__.__name__ == 'PhysicalObject') or (issubclass(obj.__class__, laygo2.object.PhysicalObject)):\n return self.height(obj.bbox)\n else:\n _i = self.bbox(obj)\n return abs(_i[1, 1] - _i[0, 1])", "def height(self) -> float:\n top = 0\n height_ = 0\n for part in self.line_parts:\n if part.state.rise > 0 and part.state.rise > top:\n top = part.state.rise\n if part.state.size > height_:\n height_ = part.state.size\n\n return height_ + self.top_margin + top", "def height(self):\n return self.client.call('GET', self.name + 'height')", "def height(self) -> int:\n return self.root.height if not self.empty() else 0", "def get_height(self, vars: np.ndarray, i: int, j: int) -> float:\n\n if 1 <= i <= self.n - 2 and 1 <= j <= self.n - 2:\n return vars[(self.n - 2) * (i - 1) + (j - 1)]\n else:\n return self.r(i/(self.n-1), j/(self.n-1))", "def height(self):\n return (self.__height)", "def compute_leg_height(self, _A, _B):\n tmp = self.leg_length**2 - ((_B.x - _A.x) / 2)**2\n return Coordinate(x=(_A.x + _B.x)/2, y=(_A.y + _B.y)/2, z=np.sqrt(tmp))", "def height1(self):\n if self.is_empty():\n return 0\n elif self.is_leaf():\n return 1\n else:\n if self.get_left():\n if self.get_right():\n return 1 + max(self.get_left().height1(), self.get_right().height())\n else:\n return 1 + self.get_left().heigh1()\n else:\n return 1 + self.get_right().height1()", "def height(root: Node):\n return (max(height(root.left), height(root.right)) + 1) if root else 0" ]
[ "0.6800607", "0.65334195", "0.65241534", "0.6479777", "0.6462834", "0.63903254", "0.6358888", "0.6354578", "0.6334228", "0.63333184", "0.62644345", "0.6259376", "0.62438977", "0.62394667", "0.6223324", "0.6222117", "0.62176883", "0.6216847", "0.62075406", "0.6189507", "0.61804366", "0.61419976", "0.6124716", "0.61037725", "0.61037725", "0.6092213", "0.60656893", "0.6052646", "0.60373825", "0.60373825", "0.60373825", "0.6031145", "0.602727", "0.6008362", "0.59551775", "0.59358793", "0.5922388", "0.5903858", "0.58966255", "0.58966255", "0.58966255", "0.5877181", "0.58687115", "0.5863631", "0.58620614", "0.58609474", "0.58354497", "0.58343905", "0.58260447", "0.58198243", "0.5819688", "0.58170116", "0.5806417", "0.5791012", "0.57897186", "0.57872117", "0.57808113", "0.5776208", "0.5776204", "0.57692665", "0.57593966", "0.5754173", "0.5743746", "0.5740353", "0.57360816", "0.57350034", "0.5713777", "0.57092756", "0.5708844", "0.5698584", "0.5698584", "0.5698584", "0.5698584", "0.5698584", "0.5698584", "0.5698584", "0.5698584", "0.5698584", "0.5698584", "0.5698584", "0.5698584", "0.5698584", "0.5698584", "0.5698584", "0.5698555", "0.5678811", "0.56688756", "0.56675434", "0.5665271", "0.56537575", "0.5643604", "0.56369466", "0.5636089", "0.5629995", "0.5618492", "0.5616148", "0.55965185", "0.5581184", "0.5580013", "0.55719674" ]
0.7280083
0
returns the level of water at the point (x, y)
def getWater(self, x, y): if x > self.maxX or y > self.maxY or x < 0 or y < 0: raise Exception("accessed an invalid position in method getWater") return self.data[y][x][1]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_water_level(self):\n return self.water_level", "def get_current_water_level(self):\n \n url = f'http://waterservices.usgs.gov/nwis/iv/?format=json&sites={self.site_number}&parameterCd=00060,00065&siteStatus=all'\n\n response = requests.request(\"GET\", url)\n data = json.loads(response.text)\n \n #parses json response to get only value of current water level for given river\n current_water_level = data['value']['timeSeries'][0]['values'][0]['value'][0]['value']\n \n return current_water_level", "def get_water_level(df):\n\n water,lat = [],[]\n #gets just ocean photons\n df = df.loc[df.Conf_ocean == 4]\n if len(df) == 0:\n return None\n #getting photons +- 2 of the median height of photons\n df = df.loc[(df.Height > df.Height.median() - 2) & (df.Height < df.Height.median() + 2)]\n\n #creating a df with just the latitude and height\n sea_level = pd.DataFrame([df.Height,df.Latitude]).T.dropna()\n sea_level.columns = ['water','latitude']\n\n #getting photons +- 1.25 of the median height of photons\n sea_level = sea_level.loc[(sea_level.water > sea_level.water.median() -1.25) & (sea_level.water < sea_level.water.median() +1.25)]\n\n #fitting linear line to remaining points\n z = np.polyfit(sea_level.latitude, sea_level.water,1)\n f = np.poly1d(z)\n\n #getting points with <2m abs error\n sea_level['abs_diff'] = np.abs(sea_level.water - f(sea_level.latitude))\n sea_level = sea_level.loc[sea_level.abs_diff < 2]\n #fitting a parabolic function to the remaining points\n z2 = np.polyfit(sea_level.latitude, sea_level.water,2)\n f2 = np.poly1d(z2)\n\n return f2", "def get_geologic_level(self, point: Point) -> int:\n if point == self.target:\n return 0\n if point.y == 0:\n return point.x * 16807\n if point.x == 0:\n return point.y * 48271\n return self.get_erosion_level(to_above(point)) * self.get_erosion_level(to_left(point))", "def elevation(x, y):\n file = os.path.abspath(\"..\") + \"\\Shape\\Shape.vrt\"\n layer = gdal.Open(file)\n gt = layer.GetGeoTransform()\n rasterx = int((x - gt[0]) / gt[1])\n rastery = int((y - gt[3]) / gt[5])\n print('elevation =', layer.GetRasterBand(1).ReadAsArray(rasterx, rastery, 1, 1)[0][0], 'm above sea level')", "def power_level(x, y, serial):\n rack_id = x + 10\n return (rack_id * y + serial) * rack_id // 100 % 10 - 5", "def supply_region(self, x, y):\n if x>350 and x<450 and y>0 and y<100:\n return 1\n elif x>350 and x<450 and y>400 and y<500:\n return 2\n return 0", "def elevation(self):\n return self.altitude - self.heightAboveGround", "def elevation(source):\n srcAzEl = subarrayControl.s.azel(source, 0.0);\n return srcAzEl[1];", "def test_welch(x, y, level):\n return test_t(x, y, level, equal_var=False)", "def get_ly(self):\r\n return self.dy * self.ny - self.oy", "def first_level_functionality(file, y=0, x=0, z=0):\n\n fileobj = open(file,'r')\n\n count_above_seaLevel = 0\n count_total = 0\n\n for row in fileobj:\n # Split the argument into words using str.split()\n altitude = float(row.split()[2])\n\n if altitude > 0:\n count_total += 1\n\n if altitude > z:\n #explain double for loop here\n count_above_seaLevel += 1\n\n area_above_seaLevel = int(x * y * count_above_seaLevel)\n\n #comment explain this\n percentage_landArea_above_seaLevel = \\\n 100 * round(count_above_seaLevel/count_total,4)\n\n fileobj.close()\n\n print(\n \"The land area above water in this area at +\",\n z,\n \"meters will be\",\n area_above_seaLevel,\n \"square kilometers, which is\",\n percentage_landArea_above_seaLevel,\n \"% of the current land area above water.\")", "def set_level(self, x, level):\n return x * 10 ** ((level - self.ref_level) / 20)", "def retrieve_pixel_value(lon, lat, data_source):\n dataset = gdal.Open(data_source)\n\n gt = dataset.GetGeoTransform()\n the_band = dataset.GetRasterBand(1)\n px = int((lon - gt[0]) / gt[1]) # x pixel\n py = int((lat - gt[3]) / gt[5]) # y pixel\n\n buf = the_band.ReadRaster(px, py, 1, 1, buf_type=gdal.GDT_Int16)\n elev = struct.unpack(\"h\", buf)\n\n return elev[0]", "def get_temperature(elevation, sea_level):\n if elevation <= sea_level:\n return 0.8\n else:\n return (-1.0 / (1.0 - sea_level)) * (elevation - sea_level) + 1.0", "def buff_region(self, x, y):\n if x>120 and x<220 and y>275 and y<375:\n return 1\n elif x>580 and x<680 and y>125 and y<225:\n return 2\n return 0", "def get(self, x: int, y: int, /) -> int:", "def calculate_dew_point(temp, hum):\n return temp - (100 - hum) / 5", "def get_level(rol):\n\treturn rol.level", "def elevation(self, rover):\n\t\tcurrent_tile = rover.planet.tiles[rover.y][rover.x]\n\t\t#current_tile is slope\n\t\tif current_tile.is_slope():\n\t\t\t#self is slope current_tile is slope\n\t\t\tif self.is_slope():\n\t\t\t\tif current_tile.high_elevation == self.low_elevation:\n\t\t\t\t\treturn \"/\"\n\t\t\t\tif current_tile.low_elevation == self.high_elevation:\n\t\t\t\t\treturn \"\\\\\"\n\t\t\t\tif self.high_elevation < current_tile.low_elevation:\n\t\t\t\t\treturn \"-\"\n\t\t\t\tif self.low_elevation > current_tile.high_elevation:\n\t\t\t\t\treturn \"+\"\n\t\t\t\tif self.low_elevation == current_tile.low_elevation\\\n\t\t\t\t\tand self.high_elevation == current_tile.high_elevation:\n\t\t\t\t\treturn \" \"\n\t\t\t#self is flat current_tile is slope\n\t\t\telse:\n\t\t\t\tif self.low_elevation > current_tile.high_elevation:\n\t\t\t\t\treturn \"+\"\n\t\t\t\tif self.low_elevation < current_tile.low_elevation:\n\t\t\t\t\treturn \"-\"\n\t\t\t\treturn \" \"\n\n\n\t\telse: #current_tile is flat\n\t\t\t#self is slope current_tile is flat\n\t\t\tif self.is_slope():\n\t\t\t\tif self.low_elevation == current_tile.low_elevation:\n\t\t\t\t\treturn \"/\"\n\t\t\t\tif self.high_elevation == current_tile.low_elevation:\n\t\t\t\t\treturn \"\\\\\"\n\t\t\t\tif self.low_elevation > current_tile.low_elevation:\n\t\t\t\t\treturn \"+\"\n\t\t\t\tif self.high_elevation < current_tile.low_elevation:\n\t\t\t\t\treturn \"-\"\n\t\t\t#self is flat current_tile is flat\n\t\t\telse:\n\t\t\t\tif self.low_elevation > current_tile.low_elevation:\n\t\t\t\t\treturn \"+\"\n\t\t\t\tif self.high_elevation < current_tile.low_elevation:\n\t\t\t\t\treturn \"-\"\n\t\t\treturn \" \"", "def getLevel(self):\n return self.level", "def calculateLighting(x,y,z, xnormal, ynormal, znormal):\n dummy = 0\n clr = dislin.getlit(x,y,z,xn,yn,zn,dummy)", "def get_level(self, channel=None):\n return int(self.getSensorData(\"FILLING_LEVEL\", channel))", "def getLightSensor() -> int:\n pass", "def get_pixel(self, x, y):\n if x < 0 or x > 7 or y < 0 or y > 15:\n # Ignore out of bounds pixels.\n return\n if y < 8:\n return self.get_led( y * 16 + x)\n else:\n return self.get_led((y-8) * 16 + (x+8))", "def get_velocity_sink(strength, xs, ys, X, Y):\r\n u = strength / (2 * math.pi) * (X - xs) / ((X - xs)**2 + (Y - ys)**2)\r\n v = strength / (2 * math.pi) * (Y - ys) / ((X - xs)**2 + (Y - ys)**2)\r\n \r\n return u, v", "def walled_in(x: int, y: int) -> bool:\r\n threshold = 2\r\n level = 0\r\n if wall_check(x, y, True):\r\n threshold = 1\r\n if not wall_check(x-1, y+1, False):\r\n if example[x-1, y+1] == 0:\r\n if mark_traveller(x-1, y+1, \"SW\"):\r\n level += 1\r\n if level >= threshold:\r\n return True\r\n if not wall_check(x - 1, y - 1, False):\r\n if example[x - 1, y - 1] == 0:\r\n if mark_traveller(x - 1, y - 1, \"SE\"):\r\n level += 1\r\n if level >= threshold:\r\n return True\r\n if not wall_check(x + 1, y - 1, False):\r\n if example[x + 1, y - 1] == 0:\r\n if mark_traveller(x + 1, y - 1, \"NE\"):\r\n level += 1\r\n if level >= threshold:\r\n return True\r\n if not wall_check(x + 1, y + 1, False):\r\n if example[x + 1, y + 1] == 0:\r\n if mark_traveller(x + 1, y + 1, \"NW\"):\r\n level += 1\r\n if level >= threshold:\r\n return True\r\n return False", "def level(self):\n return self.init_v[2]", "def get_level(cls, curve_value):\n return curve_value & (2 ** cls.level_bits - 1)", "def elevation(self):\n return self.container['elevation']", "def calc_level(xp, dominion):\n if xp < 3:\n xp_potential = 1\n if xp >= 3 and xp < 6:\n xp_potential = 2\n if xp >= 6 and xp < 12:\n xp_potential = 3\n if xp >= 12 and xp < 24:\n xp_potential = 4\n if xp >= 24 and xp < 48:\n xp_potential = 5\n if xp >= 48 and xp < 72:\n xp_potential = 6\n if xp >= 72 and xp < 96:\n xp_potential = 7\n if xp >= 96 and xp < 130:\n xp_potential = 8\n if xp >= 130 and xp < 170:\n xp_potential = 9\n if xp >= 170:\n xp_potential = 10\n if dominion < 2:\n dom_potential = 1\n if dominion >= 2 and dominion < 4:\n dom_potential = 2\n if dominion >= 4 and dominion < 10:\n dom_potential = 3\n if dominion >= 10 and dominion < 22:\n dom_potential = 4\n if dominion >= 22 and dominion < 38:\n dom_potential = 5\n if dominion >= 38 and dominion < 57:\n dom_potential = 6\n if dominion >= 57 and dominion < 76:\n dom_potential = 7\n if dominion >= 76 and dominion < 95:\n dom_potential = 8\n if dominion >= 95 and dominion < 124:\n dom_potential = 9\n if dominion >= 124:\n dom_potential = 10\n return min(xp_potential, dom_potential)", "def _elevation(self, node):\n return self.graph_provider.get_coords(node)['z']", "def __getxyB(x, y):\n\t\treturn x*3+y", "def get_erosion_level(self, point: Point) -> int:\n return (self.grid[point] + self.depth) % 20183", "def get_region(self, point: Point) -> int:\n return self.get_erosion_level(point) % 3", "def get_at(self,x,y):\n\t\tif ( not self._validate(x,y )):\n\t\t\treturn\n\n\t\tif (self.Grid[y] & (1 << x)) != 0:\n\t\t\treturn 1\n\t\telse:\n\t\t\treturn 0", "def getLevel(unique_name):", "def _determine_level(levels, points):\n import operator\n level = None\n sorted_levels = sorted(levels.iteritems(), key=operator.itemgetter(1))\n for el in sorted_levels:\n if points <= el[1]:\n level = el[0]\n break\n\n max_level = max(levels.iterkeys(), key=lambda threshold: levels[threshold])\n if points >= levels[max_level]:\n level = max_level\n return level", "def toLevelCoordinate(self):\n return self.currentLevel.transformToLevelCoordinate(self.rect.midbottom)", "def get_luminosity(name):\n all_data = mc.get('sensor_values')\n name = _lookup(name)\n try:\n return all_data[name][3]\n except KeyError:\n raise KeyError(\"No sensor with that name\")", "def det_to_world(self, x, y):\n ra, dec = self._wcs.all_pix2world(x, y, 0)\n return ra, dec", "def cargo_water(self):\n return self._cargo_water", "def resistance(self, level=1):\n if level == 1:\n res = (2 * self.pivot_point) - self.last_low\n elif level == 2:\n res = self.pivot_point + (self.last_high - self.last_low)\n elif level == 3:\n res = self.last_high + 2*(self.pivot_point - self.last_low)\n else:\n raise ValueError('Not a valid level. Must be 1, 2, or 3')\n return res", "def get_elevation(self):\n return self.elevation", "def find_water_overflow(water_x, water_y, step):\n while True:\n next_cell = grid[water_y + 1][water_x]\n current_cell = grid[water_y][water_x]\n if current_cell == \"#\":\n water_x -= step\n return water_x, False\n elif next_cell == \".\":\n sources.append((water_x, water_y))\n return water_x, True\n elif current_cell == \"|\" and next_cell == \"|\":\n return water_x, True\n water_x += step", "def lorentz(self, X, xm, amp, w):\n return amp / (1 + ((X - xm) / (w / 2)) ** 2)", "def det_to_world(self, x, y):\n ra, dec = x, y\n return ra, dec", "def elevation(self):\n\n\t\twidth = self.no_name_level[0]\n\t\theight = self.no_name_level[1]\n\t\ttile = self.no_name_level[2]\n\t\tx = self.no_name_level[3]\n\t\ty = self.no_name_level[4]\n\t\t\n\t\ttiles = []\n\t\tfor i in tile:\n\t\t\ti = i[:-1]\n\t\t\ttiles.append(i)\t\n\t\ttiles_arranged = [tiles[i:i + width] for i in range(0, len(tile), width)]\n\t\n\t\tplanet_co = []\n\t\t\n\t\tfor i in tiles_arranged:\n\t\t\t\n\t\t\tplanet = []\n\t\t\tfor n in i:\n\t\t\t\tn = n.split(',')\n\t\t\t\tif len(n) != 3:\n\t\t\t\t\ta = ['-']\n\t\t\t\t\tn += a\n\t\t\t\t\t\n\t\t\t\t\tplanet.append(n)\n\t\t\t\telse:\n\t\t\t\t\tplanet.append(n)\n\t\t\t\t\t\n\t\t\tplanet_co.append(planet)\n\t\t\t\n\t\n\t\tplanet_map = Planet(planet_co, width, height)\n\t\tcoordinates = Planet(planet_co, width, height)\n\t\tcoordinates = Planet.coordinates(coordinates)\n\t\tplanet_map = Planet.coordinates_dict(planet_map)#this is my map in dictionary format(coordinates : tile)\n\t\t\n\t\tfor y1 in coordinates:\n\t\t\tif coordinates.index(y1) == y:\n\t\t\t\ty_value = coordinates.index(y1)\n\t\t\t\tfor x1 in y1:\n\t\t\t\t\tif x1 == [x, y]:\n\t\t\t\t\t\tx_value = y1.index(x1)\n\t\trover_d = coordinates[y_value][x_value]\n\t\n\t\tx1 = x_value + 1\n\t\tx2 = x_value + 2\n\t\ty1 = y_value + 1\n\t\ty2 = y_value + 2\n\t\n\t\tif x1 == len(coordinates[1]):\n\t\t\tx1 == 0\n\t\tif y1 == len(coordinates):\n\t\t\ty1 == 0\n\t\n\t\tif x2 > len(coordinates[1]):\n\t\t\tx2 = 1\n\t\tif y2 > len(coordinates[1]):\n\t\t\ty2 == 1\n\t\n\t\tfront2 = coordinates[y2][x_value]\n\t\tfront1 = coordinates[y1][x_value]\n\t\tback1 = coordinates[y_value-1][x_value]\n\t\tback2 = coordinates[y_value-2][x_value]\n\t\tright1 = coordinates[y_value][x1]\n\t\tright2 = coordinates[y_value][x2]\n\t\tleft1 = coordinates[y_value][x_value-1]\n\t\tleft2 = coordinates[y_value][x_value-2]\n\t\n\t\n\t\tfront1_right1 = coordinates[y1][x1]\n\t\tfront1_right2 = coordinates[y1][x2]\n\t\tfront2_right1 = coordinates[y2][x1]\n\t\tfront2_right2 = coordinates[y2][x2]\n\t\tfront1_left1 = coordinates[y1][x_value-1]\n\t\tfront1_left2 = coordinates[y1][x_value-2]\n\t\tfront2_left1 = coordinates[y2][x_value-1]\n\t\tfront2_left2 = coordinates[y2][x_value-2]\n\t\n\t\tback1_right1 = coordinates[y_value-1][x1]\n\t\tback1_right2 = coordinates[y_value-1][x2]\n\t\tback2_right1 = coordinates[y_value-2][x1]\n\t\tback2_right2 = coordinates[y_value-2][x2]\n\t\tback1_left1 = coordinates[y_value-1][x_value-1]\n\t\tback1_left2 = coordinates[y_value-1][x_value-2]\n\t\tback2_left1 = coordinates[y_value-2][x_value-1]\n\t\tback2_left2 = coordinates[y_value-2][x_value-2]\n\t\t\n\t\tco_f2r2 = planet_map[str(front2_right2)]\n\t\tco_f2r1 = planet_map[str(front2_right1)]\n\t\tco_f2 = planet_map[str(front2)]\n\t\tco_f2l1 = planet_map[str(front2_left1)]\n\t\tco_f2l2 = planet_map[str(front2_left2)]\n\t\tco_f1r2 = planet_map[str(front1_right2)]\n\t\tco_f1r1 = planet_map[str(front1_right1)]\n\t\tco_f1 = planet_map[str(front1)]\n\t\tco_f1l1 = planet_map[str(front1_left1)]\n\t\tco_f1l2 = planet_map[str(front1_left2)]\n\t\tco_r2 = planet_map[str(right2)]\n\t\tco_r1 = planet_map[str(right1)]\n\t\tco_rover = planet_map[str([x, y])]\n\t\tco_l1 = planet_map[str(left1)]\n\t\tco_l2 = planet_map[str(left2)]\n\t\tco_b1r2 = planet_map[str(back1_right2)]\n\t\tco_b1r1 = planet_map[str(back1_right1)]\n\t\tco_b1 = planet_map[str(back1)]\n\t\tco_b1l1 = planet_map[str(back1_left1)]\n\t\tco_b1l2 = planet_map[str(back1_left2)]\n\t\tco_b2r2 = planet_map[str(back2_right2)]\n\t\tco_b2r1 = planet_map[str(back2_right1)]\n\t\tco_b2 = planet_map[str(back2)]\n\t\tco_b2l1 = planet_map[str(back2_left1)]\n\t\tco_b2l2 = planet_map[str(back2_left2)]\n\t\n\t\tfirst_lineco = [co_f2l2, co_f2l1, co_f2, co_f2r1, co_f2r2]\n\t\tsecond_lineco = [co_f1l2, co_f1l1, co_f1, co_f1r1, co_f1r2]\n\t\tthird_lineco = [co_l2, co_l1, co_rover, co_r1, co_r2]\n\t\tfourth_lineco = [co_b1l2, co_b1l1, co_b1, co_b1r1, co_b1r2]\n\t\tfifth_lineco = [co_b2l2, co_b2l1, co_b2, co_b2r1, co_b2r2]\n\n\t\tfirst_line = ['|']\n\t\tsec_line = ['|']\n\t\tthird_line = ['|']\n\t\tfourth_line = ['|']\n\t\tfifth_line = ['|']\n\t\tfor i in first_lineco:\n\t\t\tif i[2] == '-' and co_rover[2] == '-':\n\t\t\t\tif int(i[1]) == int(co_rover[1]):\n\t\t\t\t\tfirst_line.append(' |')\n\t\t\t\telif int(i[1]) < int(co_rover[1]):\n\t\t\t\t\tfirst_line.append(\"-|\")\n\t\t\t\telse:\n\t\t\t\t\tfirst_line.append('+|')\n\t\t\tif i[2] == '-' and co_rover[2] != '-':\n\t\t\t\tif int(co_rover[2]) == int(i[1]):\n\t\t\t\t\tfirst_line.append(' |')\n\t\t\t\telif int(co_rover[2]) > int(i[1]):\n\t\t\t\t\tfirst_line.append(\"-|\")\n\t\t\t\telse:\n\t\t\t\t\tif int(i[1]) == int(co_rover[1]):\n\t\t\t\t\t\tfirst_line.append(' |')\n\t\t\t\t\t\n\t\t\t\t\telif int(i[1]) > int(co_rover[1]):\n\t\t\t\t\t\tfirst_line.append('+|')\n\t\t\tif i[2] != '-' and co_rover[2] == '-':\n\t\t\t\tif int(co_rover[1]) == int(i[2]):\n\t\t\t\t\tfirst_line.append('/|')\n\t\t\t\telif int(co_rover[1]) < int(i[2]):\n\t\t\t\t\tfirst_line.append(\"+|\")\n\t\t\t\telse:\n\t\t\t\t\tif int(i[1]) == int(co_rover[1]):\n\t\t\t\t\t\tfirst_line.append(\"\\|\")\n\t\t\t\t\t\n\t\t\t\t\telif int(i[1]) < int(co_rover[1]):\n\t\t\t\t\t\tfirst_line.append('-|')\n\t\t\tif i[2] != '-' and co_rover[2] != '-':\n\t\t\t\tif int(i[2]) == int(co_rover[2]):\n\t\t\t\t\tfirst_line.append(' |')\n\t\t\t\telif int(i[2]) < int(co_rover[2]):\n\t\t\t\t\tif int(co_rover[2]) == int(i[1]):\n\t\t\t\t\t\tfirst_line.append(\"'\\'|\")\n\t\t\t\t\telif int(co_rover[2]) > int(i[1]):\n\t\t\t\t\t\tfirst_line.append('-|')\n\t\t\t\telif int(i[2]) > int(co_rover[2]):\n\t\t\t\t\tif int(i[2]) == int(co_rover[1]):\n\t\t\t\t\t\tfirst_line.append(\"/|\")\n\t\t\t\t\telif int(i[2]) > int(co_rover[1]):\n\t\t\t\t\t\tfirst_line.append(\"+|\")\n\n\n\n\t\tfor i in second_lineco:\n\t\t\tif i[2] == '-' and co_rover[2] == '-':\n\t\t\t\tif int(i[1]) == int(co_rover[1]):\n\t\t\t\t\tsec_line.append(' |')\n\t\t\t\telif int(i[1]) < int(co_rover[1]):\n\t\t\t\t\tsec_line.append(\"-|\")\n\t\t\t\telse:\n\t\t\t\t\tsec_line.append('+|')\n\t\t\tif i[2] == '-' and co_rover[2] != '-':\n\t\t\t\tif int(co_rover[2]) == int(i[1]):\n\t\t\t\t\tsec_line.append(' |')\n\t\t\t\telif int(co_rover[2]) > int(i[1]):\n\t\t\t\t\tsec_line.append(\"-|\")\n\t\t\t\telse:\n\t\t\t\t\tif int(i[1]) == int(co_rover[1]):\n\t\t\t\t\t\tsec_line.append(' |')\n\t\t\t\t\t\n\t\t\t\t\telif int(i[1]) > int(co_rover[1]):\n\t\t\t\t\t\tsec_line.append('+|')\n\t\t\tif i[2] != '-' and co_rover[2] == '-':\n\t\t\t\tif int(co_rover[1]) == int(i[2]):\n\t\t\t\t\tsec_line.append('/|')\n\t\t\t\telif int(co_rover[1]) < int(i[2]):\n\t\t\t\t\tsec_line.append(\"+|\")\n\t\t\t\telse:\n\t\t\t\t\tif int(i[1]) == int(co_rover[1]):\n\t\t\t\t\t\tsec_line.append(\"'\\'|\")\n\t\t\t\t\t\n\t\t\t\t\telif int(i[1]) < int(co_rover[1]):\n\t\t\t\t\t\tsec_line.append('-|')\n\t\t\tif i[2] != '-' and co_rover[2] != '-':\n\t\t\t\tif int(i[2]) == int(co_rover[2]):\n\t\t\t\t\tsec_line.append(' |')\n\t\t\t\telif int(i[2]) < int(co_rover[2]):\n\t\t\t\t\tif int(co_rover[2]) == int(i[1]):\n\t\t\t\t\t\tsec_line.append(\"'\\'|\")\n\t\t\t\t\telif int(co_rover[2]) > int(i[1]):\n\t\t\t\t\t\tsec_line.append('-|')\n\t\t\t\telif int(i[2]) > int(co_rover[2]):\n\t\t\t\t\tif int(i[2]) == int(co_rover[1]):\n\t\t\t\t\t\tsec_line.append(\"/|\")\n\t\t\t\t\telif int(i[2]) > int(co_rover[1]):\n\t\t\t\t\t\tsec_line.append(\"+|\")\n\t\n\t\tfor i in third_lineco:\n\t\t\tif i[2] == '-' and co_rover[2] == '-':\n\t\t\t\tif int(i[1]) == int(co_rover[1]):\n\t\t\t\t\tthird_line.append(' |')\n\t\t\t\telif int(i[1]) < int(co_rover[1]):\n\t\t\t\t\tthird_line.append(\"-|\")\n\t\t\t\telse:\n\t\t\t\t\tthird_line.append('+|')\n\t\t\tif i[2] == '-' and co_rover[2] != '-':\n\t\t\t\tif int(co_rover[2]) == int(i[1]):\n\t\t\t\t\tthird_line.append(' |')\n\t\t\t\telif int(co_rover[2]) > int(i[1]):\n\t\t\t\t\tthird_line.append(\"-|\")\n\t\t\t\telse:\n\t\t\t\t\tif int(i[1]) == int(co_rover[1]):\n\t\t\t\t\t\tthird_line.append(' |')\n\t\t\t\t\t\n\t\t\t\t\telif int(i[1]) > int(co_rover[1]):\n\t\t\t\t\t\tthird_line.append('+|')\n\t\t\tif i[2] != '-' and co_rover[2] == '-':\n\t\t\t\tif int(co_rover[1]) == int(i[2]):\n\t\t\t\t\tthird_line.append('/|')\n\t\t\t\telif int(co_rover[1]) < int(i[2]):\n\t\t\t\t\tthird_line.append(\"+|\")\n\t\t\t\telse:\n\t\t\t\t\tif int(i[1]) == int(co_rover[1]):\n\t\t\t\t\t\tthird_line.append(\"'\\'|\")\n\t\t\t\t\t\n\t\t\t\t\telif int(i[1]) < int(co_rover[1]):\n\t\t\t\t\t\tthird_line.append('-|')\n\t\t\tif i[2] != '-' and co_rover[2] != '-':\n\t\t\t\tif int(i[2]) == int(co_rover[2]):\n\t\t\t\t\tthird_line.append(' |')\n\t\t\t\telif int(i[2]) < int(co_rover[2]):\n\t\t\t\t\tif int(co_rover[2]) == int(i[1]):\n\t\t\t\t\t\tthird_line.append(\"'\\'|\")\n\t\t\t\t\telif int(co_rover[2]) > int(i[1]):\n\t\t\t\t\t\tthird_line.append('-|')\n\t\t\t\telif int(i[2]) > int(co_rover[2]):\n\t\t\t\t\tif int(i[2]) == int(co_rover[1]):\n\t\t\t\t\t\tthird_line.append(\"/|\")\n\t\t\t\t\telif int(i[2]) > int(co_rover[1]):\n\t\t\t\t\t\tthird_line.append(\"+|\")\n\t\n\t\tfor i in fourth_lineco:\n\t\t\tif i[2] == '-' and co_rover[2] == '-':\n\t\t\t\tif int(i[1]) == int(co_rover[1]):\n\t\t\t\t\tfourth_line.append(' |')\n\t\t\t\telif int(i[1]) < int(co_rover[1]):\n\t\t\t\t\tfourth_line.append(\"-|\")\n\t\t\t\telse:\n\t\t\t\t\tfourth_line.append('+|')\n\t\t\tif i[2] == '-' and co_rover[2] != '-':\n\t\t\t\tif int(co_rover[2]) == int(i[1]):\n\t\t\t\t\tfourth_line.append(' |')\n\t\t\t\telif int(co_rover[2]) > int(i[1]):\n\t\t\t\t\tfourth_line.append(\"-|\")\n\t\t\t\telse:\n\t\t\t\t\tif int(i[1]) == int(co_rover[1]):\n\t\t\t\t\t\tfourth_line.append(' |')\n\t\t\t\t\t\n\t\t\t\t\telif int(i[1]) > int(co_rover[1]):\n\t\t\t\t\t\tfourth_line.append('+|')\n\t\t\tif i[2] != '-' and co_rover[2] == '-':\n\t\t\t\tif int(co_rover[1]) == int(i[2]):\n\t\t\t\t\tfourth_line.append('/|')\n\t\t\t\telif int(co_rover[1]) < int(i[2]):\n\t\t\t\t\tfourth_line.append(\"+|\")\n\t\t\t\telse:\n\t\t\t\t\tif int(i[1]) == int(co_rover[1]):\n\t\t\t\t\t\tfourth_line.append(\"'\\'|\")\n\t\t\t\t\t\n\t\t\t\t\telif int(i[1]) < int(co_rover[1]):\n\t\t\t\t\t\tfourth_line.append('-|')\n\t\t\tif i[2] != '-' and co_rover[2] != '-':\n\t\t\t\tif int(i[2]) == int(co_rover[2]):\n\t\t\t\t\tfourth_line.append(' |')\n\t\t\t\telif int(i[2]) < int(co_rover[2]):\n\t\t\t\t\tif int(co_rover[2]) == int(i[1]):\n\t\t\t\t\t\tfourth_line.append(\"'\\'|\")\n\t\t\t\t\telif int(co_rover[2]) > int(i[1]):\n\t\t\t\t\t\tfourth_line.append('-|')\n\t\t\t\telif int(i[2]) > int(co_rover[2]):\n\t\t\t\t\tif int(i[2]) == int(co_rover[1]):\n\t\t\t\t\t\tfourth_line.append(\"/|\")\n\t\t\t\t\telif int(i[2]) > int(co_rover[1]):\n\t\t\t\t\t\tfourth_line.append(\"+|\")\n\t\n\t\tfor i in fifth_lineco:\n\t\t\tif i[2] == '-' and co_rover[2] == '-':\n\t\t\t\tif int(i[1]) == int(co_rover[1]):\n\t\t\t\t\tfifth_line.append(' |')\n\t\t\t\telif int(i[1]) < int(co_rover[1]):\n\t\t\t\t\tfifth_line.append(\"-|\")\n\t\t\t\telse:\n\t\t\t\t\tfifth_line.append('+|')\n\t\t\tif i[2] == '-' and co_rover[2] != '-':\n\t\t\t\tif int(co_rover[2]) == int(i[1]):\n\t\t\t\t\tfifth_line.append(' |')\n\t\t\t\telif int(co_rover[2]) > int(i[1]):\n\t\t\t\t\tfifth_line.append(\"-|\")\n\t\t\t\telse:\n\t\t\t\t\tif int(i[1]) == int(co_rover[1]):\n\t\t\t\t\t\tfifth_line.append(' |')\n\t\t\t\t\t\n\t\t\t\t\telif int(i[1]) > int(co_rover[1]):\n\t\t\t\t\t\tfifth_line.append('+|')\n\t\t\tif i[2] != '-' and co_rover[2] == '-':\n\t\t\t\tif int(co_rover[1]) == int(i[2]):\n\t\t\t\t\tfifth_line.append('/|')\n\t\t\t\telif int(co_rover[1]) < int(i[2]):\n\t\t\t\t\tfifth_line.append(\"+|\")\n\t\t\t\telse:\n\t\t\t\t\tif int(i[1]) == int(co_rover[1]):\n\t\t\t\t\t\tfifth_line.append(\"'\\'|\")\n\t\t\t\t\t\n\t\t\t\t\telif int(i[1]) < int(co_rover[1]):\n\t\t\t\t\t\tfifth_line.append('-|')\n\t\t\tif i[2] != '-' and co_rover[2] != '-':\n\t\t\t\tif int(i[2]) == int(co_rover[2]):\n\t\t\t\t\tfifth_line.append(' |')\n\t\t\t\telif int(i[2]) < int(co_rover[2]):\n\t\t\t\t\tif int(co_rover[2]) == int(i[1]):\n\t\t\t\t\t\tfifth_line.append(\"'\\'|\")\n\t\t\t\t\telif int(co_rover[2]) > int(i[1]):\n\t\t\t\t\t\tfifth_line.append('-|')\n\t\t\t\telif int(i[2]) > int(co_rover[2]):\n\t\t\t\t\tif int(i[2]) == int(co_rover[1]):\n\t\t\t\t\t\tfifth_line.append(\"/|\")\n\t\t\t\t\telif int(i[2]) > int(co_rover[1]):\n\t\t\t\t\t\tfifth_line.append(\"+|\")\n\t\tthird_line2 = []\n\t\n\t\tfor n, i in enumerate(third_line):\n\t\t\tif n == 3:\n\t\t\t\ta = \"H|\"\n\t\t\t\t \n\t\t\t\tthird_line2.append(a)\n\t\t\telse:\n\t\t\t\tthird_line2.append(i)\n\t\tnumber1_line = \"\\n{}\\n{}\\n{}\\n{}\\n{}\\n\".format(\"\".join(fifth_line), \"\".join(fourth_line), \"\".join(third_line2),\"\".join(sec_line) , \"\".join(first_line))\n\t\t\n\t\treturn number1_line\n\n\n\n\n\t\tpass", "def intensity(self) -> int:", "def get_food_level(self):\n return self.plant", "def basic_ennemy_x(state):\n # Player horizon\n horiz = state.depth_buffer[65].astype(np.int32)\n # How is the distance varying along the horizon\n line = np.diff(horiz)\n line[np.abs(line) > 15] = 0\n imin = line.argmin()\n line[:imin] = 0\n imax = line.argmax()\n return (imin + imax) / 2 / len(horiz)", "def raw():\n return (((.25 * x) + .75) * x - 1.5) * x - 2", "def get_lux(self):\n\n svc = \"urn:micasaverde-com:serviceId:LightSensor1\"\n if not svc in self.services:\n raise RuntimeError, \"Device doesn't support the service\"\n\n return self.get_variable(svc, \"CurrentLevel\")", "def get_water_depth(buoy_number):\n\n buoy_info_url = 'https://www.ndbc.noaa.gov/station_page.php?station=' + str(buoy_number)\n buoy_info_rss = requests.get(buoy_info_url)\n soup = BeautifulSoup(buoy_info_rss.content, 'lxml')\n try:\n water_depth = float(soup.find('b', string='Water depth:').next_sibling[1:-2])\n except:\n raise AttributeError('The specified buoy does not have a listed water depth. It is possible it is a land-based'\n ' buoy -- NOAA web search is currently limited and cannot remove land-based results. Try'\n 'changing your coordinates to an area with deeper water.')\n return water_depth", "def le(self, x, y):", "def __call__(self, x):\n return self.slope * x + self.ordinate", "def get_level(self, level):\n return", "def topography(x,y):\n \n z = -x/10\n \n N = len(x)\n for i in range(N):\n # Step\n if 10 < x[i] < 12:\n z[i] += 0.4 - 0.05*y[i]\n \n # Constriction\n if 27 < x[i] < 29 and y[i] > 3:\n z[i] += 2\n \n # Pole\n if (x[i] - 34)**2 + (y[i] - 2)**2 < 0.4**2:\n z[i] += 2\n \n return z", "def start_region(self, x, y):\n if x>0 and x<100 and y>0 and y<100:\n return 1\n elif x>700 and x<800 and y>0 and y<100:\n return 2\n elif x>0 and x<100 and y>400 and y<500:\n return 3\n elif x>700 and x<800 and y>400 and y<500:\n return 4\n return 0", "def getLevels():", "def get_level(k):\r\n return int(log2(k))", "def y1(self, level):\n resolution = self.resolution(level)\n y1 = self.y0(level) + 64\n y1[-1] = (self.y_extent + resolution - 1) // resolution\n return y1", "def larkin_model(x,y,amplitude=1.,x_0=0.,y_0=0.,radius=1.,background = 0.):\n intensity = models.AiryDisk2D.evaluate(x,y,amplitude=amplitude,x_0=x_0,y_0=y_0,radius=radius)\n intensity = np.sqrt(np.abs(intensity))\n intensity += background\n return intensity", "def n_y(self, level):\n resolution = self.resolution(level)\n return (self.y_extent // resolution + 63) // 64", "def fetch_levels(self):\n rootLogger.info(\"[*] Fetching water levels...\")\n\n headers = {\"User-Agent\": \"Edwards Aquifer Bot - Follow on Twitter: @edwardsaquabot\"}\n\n response = requests.get(self.url, headers=headers, verify=True, timeout=60)\n if response.status_code != 200:\n rootLogger.error(\n \"HTTP status code: {} -- unsuccessfully retrieved: {}\".format(response.status_code, self.url)\n )\n return\n\n # Use beautiful soup to grab the levels...works, maybe not the best though.\n soup = BeautifulSoup(response.text, \"html.parser\")\n table = soup.find_all(\"table\")[1]\n\n # Today's Reading.\n column = table.find_all(\"td\")[0]\n today_water_level = column.find(\"span\").contents[0].strip()\n\n # Yesterday's Reading.\n column = table.find_all(\"td\")[2]\n yesterday_water_level = column.find(\"span\").contents[0].strip()\n\n # 10 Day Average Reading.\n column = table.find_all(\"td\")[4]\n ten_day_average = column.find(\"span\").contents[0].strip()\n\n return today_water_level, yesterday_water_level, ten_day_average", "def getLevel(self, *args):\n return _libsbml.SBMLExtension_getLevel(self, *args)", "def level(self):\n return self.game_data['player stats']['Level']", "def level(self):\n return self.__pin.pwm", "def trapped_water(elevations: List[int]) -> int:\n count_el: int = len(elevations)\n max_left = [0] * count_el\n max_right = [0] * count_el\n water_trapped = 0\n\n for index in range(1, count_el - 1):\n max_left[index] = max(max_left[index - 1], elevations[index - 1])\n\n for index in range(count_el - 2, 0, -1):\n max_right[index] = max(max_right[index + 1], elevations[index + 1])\n\n for index, elevation in enumerate(elevations):\n water_trapped += max(min(max_left[index], max_right[index]) - elevation, 0)\n\n return water_trapped", "def getLevel(self, *args):\n return _libsbml.LayoutExtension_getLevel(self, *args)", "def getLevel(self):\n return self._level", "def area(x, y):\n return x*y/2", "def height(self, x):\n\t\treturn np.interp(x, self.x, self.z)", "def get_luminosity(self, vel_disp):\n\t\tlog_L_V = self.slope*np.log10(vel_disp) + self.intercept\n\t\treturn log_L_V", "def energy(nx,ny):\n return 1+nx+ny", "def getHeight(self, x, y):\r\n if x > self.maxX or y > self.maxY or x < 0 or y < 0:\r\n return 10000000 # effectively infinity\r\n return self.data[y][x][0]", "def get_evaporation_latent_heat() -> float:\n theta = 28.0\n return 2500.8 - 2.3668 * theta", "def get_slope(self) -> str:\n return self.query('slope,?')", "def xyz(self):\n return self._xyz", "def get_wind_values(self):\n return (\n int(self.data[2]), # dir\n float(self.data[3]) / 10, # gust\n float(self.data[4]) / 10, # avg\n float(self.data[5]) / 10, # chill\n )", "def get_specific_heat() -> float:\n return 1006.0", "def get_sample(x, y):\n return noise[x][y]", "def get_level(tag: str) -> int:\n return TAG_LEVELS[tag]", "def resolution(self, level):\n return 2 ** (level - 1)", "def level(date=datetime.datetime.now(), location=1208, verbose=False):\n tide = getTide(date, location, verbose)\n print tide\n dates = [item[0] for item in tide]\n # determine which transition to take\n a, b = None, None\n for i in range(len(dates)-1):\n if date > dates[i] and date < dates[i+1]:\n a = tide[i]\n b = tide[i+1]\n # set variables\n adate = a[0]\n alevel = a[1]\n astate = a[2]\n bdate = b[0]\n blevel = b[1]\n # compute level by interpolation\n level = sinterp(date=date,\n lastextremedate=adate,\n deltatonext=bdate-adate,\n low=min(alevel, blevel),\n hub=abs(max(alevel, blevel)-min(alevel, blevel)),\n rising=(not astate),\n verbose=verbose)\n if verbose:\n print 'level:', level\n # return result\n return level", "def sea_still_water_pressure(z, t1, rho=1.025, g=9.81):\r\n\r\n if z <= t1:\r\n return rho * g * (t1 - z)\r\n else:\r\n return 0", "def get_position(k):\r\n l = get_level(k)\r\n return (l, k - 2**l)", "def ge(self, x, y):\n return self.le(y,x)", "def get_level(self, channel=None):\n return int(self.getSensorData(\"VALVE_STATE\", channel))", "def get_wind():\n return get_next_random(wind, WIND_MAX, WIND_MIN, WIND_DELTA)", "def query_point(tree, y, x):\n res = bit.query_point(tree[y], x)\n end_y = y - (y & -y)\n y -= 1\n\n while y != end_y:\n res -= bit.query_point(tree[y], x)\n y -= (y & -y)\n\n return res", "def getClearWaterDepth(inp):\n\ty90 = getY90(inp)\n\tinp = sorted(inp, key = lambda x: x[0])\n\ts = 0\n\tif inp[0][0] > 0:\n\t\ts += (1-inp[0][1]/2.0) * inp[0][0]\n\tfor i in xrange(1,len(inp)):\n\t\tprev = inp[i-1]\n\t\tcur = inp[i]\n\t\tif cur[0] > y90:\n\t\t\ttop = 0.9\n\t\t\tbase = prev[1]\n\t\t\theight = y90 - prev[0]\n\t\t\ts += (1 - (top + base)/2) * height\n\t\t\tbreak\n\t\tbase = prev[1]\n\t\ttop = cur[1]\n\t\theight = cur[0] - prev[0]\n\t\ts += (1 - (top + base)/2) * height\n\t\t\n\treturn s", "def getBottom( self, X, Y, Z):\n xb,yb,zb = self.transform( X,Y,Z)\n \n gauss = beam( xb,yb,zb, self.w[0], self.w[1], self.l)\n intensity = (2/np.pi)* self.mW/1000. /self.w[0]/self.w[1] *gauss # W um^-2\n \n latticeBot = 4*np.sqrt(self.retro*self.alpha) \\\n + 1 + self.retro - 2*np.sqrt(self.retro*self.alpha)\n\n return uL(self.l)*intensity * latticeBot", "def trap(height: List[int]) -> int:\n # No heights passed!\n if not height:\n return 0\n # Max from left\n max_L = 0\n L = len(height)\n left = [0] * L\n for i in range(L):\n if height[i] > max_L:\n max_L = height[i]\n left[i] = max_L\n # Max from right\n max_R = 0\n right = [0] * L\n for i in range(L-1, -1, -1):\n if height[i] > max_R:\n max_R = height[i]\n right[i] = max_R\n # Get water height / area at each point on map\n area = 0\n for i in range(1, L-1):\n area += max(0, min(left[i-1], right[i+1]) - height[i])\n return area", "def _get_slope(x, y):\n slope = linregress(x, y)\n return slope", "def get_Lo(self):\n return self.Lo", "def get_Lo(self):\n return self.Lo", "def kruskal_wallis(x, y):\n f_statistic, p_value = ss.kruskal(x, y)\n # if p_value < _ALPHA:\n # print(\n # 'ATTENTION: Kruskal-Wallis (' + p_value.__str__() + ') is smaller then alpha(' + _ALPHA.__str__() + '). ' +\n # 'This means that there is a different between the ranks and further testing is necessary')\n return f_statistic, p_value", "def z_halo(self): \n return self.coords_halo[2]", "def winding_number(x, y, primitive):\n\n wn = 0\n\n edges = zip(primitive[\"vertices\"][-1:] + primitive[\"vertices\"][:-1],\n primitive[\"vertices\"])\n for edge in edges:\n # check if cuts y parallel line at (x, y) &&\n if (edge[0][0] > x) != (edge[1][0] > x):\n # check what side of the edge is (x, y)\n # side > 0 => point is to de left of the edge\n # side = 0 => point is on the edge\n # side < 0 => point is to de right of the edge\n side = ((y - edge[0][1]) * (edge[1][0] - edge[0][0]) -\n (x - edge[0][0]) * (edge[1][1] - edge[0][1]))\n # if to the left, increase wn\n if side > 0: wn += 1\n # if to the right, decrease wn\n else: wn -= 1\n\n if wn != 0: return True\n return False" ]
[ "0.7158327", "0.67313606", "0.661933", "0.65803623", "0.65341234", "0.63737684", "0.59729874", "0.5847632", "0.58334917", "0.5827491", "0.58220893", "0.58033043", "0.57986677", "0.5751506", "0.56921387", "0.56882936", "0.5679975", "0.5662248", "0.56592685", "0.56507444", "0.56410825", "0.56353784", "0.56149995", "0.56101435", "0.56098944", "0.5595319", "0.55663246", "0.5505086", "0.54982936", "0.5497777", "0.5483305", "0.5469494", "0.5453959", "0.5448976", "0.5440108", "0.54293376", "0.542507", "0.5420814", "0.5419442", "0.54105806", "0.5407803", "0.5404895", "0.5389681", "0.5388845", "0.5376648", "0.5353994", "0.5352764", "0.5352479", "0.5350588", "0.5349358", "0.5341862", "0.53293407", "0.5329058", "0.53227943", "0.53192216", "0.5317329", "0.53068805", "0.5293125", "0.5285544", "0.52828413", "0.52827173", "0.5281858", "0.52802134", "0.5276655", "0.5271258", "0.52612174", "0.5260666", "0.5251893", "0.5236632", "0.523631", "0.52320653", "0.52273506", "0.52116436", "0.5203055", "0.5200728", "0.5198808", "0.51977247", "0.519478", "0.51919544", "0.5191891", "0.5185594", "0.5178836", "0.51780206", "0.51764935", "0.51718384", "0.5169911", "0.5154374", "0.5151107", "0.51469624", "0.514114", "0.5140693", "0.5137122", "0.5136497", "0.51312375", "0.5128605", "0.51140463", "0.51140463", "0.51134956", "0.510904", "0.510775" ]
0.74286795
0
This returns the raw internal state. Generally avoid using this if you can
def getAllData(self): return self.data
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def state_raw(self):\n return self._state_raw", "def __getstate__(self):\n return self.__dict__", "def __getstate__(self):\n state = self.__dict__.copy()\n self.__cleanState__(state)\n return state", "def _get_state(self):", "def __getstate__(self):\n\n\t\tresult = self.__dict__.copy()\n\n\t\t# Do not pickle references to mutable objects.\n\t\tdel result['_device']\n\t\tdel result['resources']\n\n\t\treturn result", "def __getstate__(self) -> dict:\n return self.__handle__.get_state()", "def load_state(self):\n return self.state.read()", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def _get_state(self):\n return self.__state", "def __getstate__(self):\n state: Dict[str, Any] = deepcopy(self.__dict__)\n del state['__db']\n return state", "def __getstate__(self):\n return self._", "def __getstate__(self):\n return {}", "def get_state(self):\n pass", "def __getstate__(self):\n state = self.__dict__.copy()\n # We can't and shouldn't use _conn, _metadata_conn and _logger in a separate process\n del state['_conn']\n del state['_metadata_conn']\n del state['_logger']\n return state", "def getstate(self):\n return {}", "def __getstate__(self):\n\n return self.get_DER()", "def __getstate__(self):\n copy = self.__dict__.copy()\n copy['_workaround'] = None\n return copy", "def get_state(self):\n return copy.deepcopy(self._state)", "def __getstate__(self):\n return dict(self.items())", "def __getstate__(self):\n return dict(self.items())", "def GetState(self):\n self.update()\n d = {}\n for n in ['imgtype', 'object', 'path', 'filename', 'nextfile', 'lastfile', 'filectr', 'observer', 'filter', 'filterid',\n 'guider', 'mirror', 'lastact', 'initialized', 'errors', 'highcap', 'preamp', 'hsspeed', 'vsspeed', 'cycletime',\n 'readouttime', 'mode', 'cool', 'tset', 'settemp', 'temp', 'tempstatus', 'imaging', 'shuttermode', 'exptime',\n 'xmin', 'xmax', 'ymin', 'ymax', 'roi', 'xbin', 'ybin', 'isdark', 'MonitorActive', 'lastact']:\n d[n] = self.__dict__.get(n)\n return d", "def __getstate__(self):\n state = self.__dict__\n state['_lock'] = None\n return state", "def native_state(self):\n return self._native_state", "def get_map_state(self):\n return self.serialize()", "def __getstate__(self):\n state = self.__dict__.copy()\n del state['_view']\n return state", "def __getstate__(self):\n s = time.time()\n self_dict = self.__dict__.copy()\n del self_dict['pool']\n # print('_GETSTATE UTIL', time.time()-s)\n return self_dict", "def state(self):\n return self._state.copy()", "def __getstate__(self):\n try: \n state = self.__dict__.copy()\n del state['_Funcs']\n return state\n except: \n return self.__dict__", "def __getstate__(self):\n self_dict = self.__dict__\n del self.temp_yaml\n return self_dict", "def state(self):\n return get_state(self.context)", "def get_state(self):\n raise NotImplementedError" ]
[ "0.8229839", "0.7681325", "0.76782733", "0.75114673", "0.7508105", "0.7503525", "0.7435647", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.74314433", "0.740906", "0.7377948", "0.728749", "0.7232251", "0.7212373", "0.72117275", "0.7172193", "0.717168", "0.714162", "0.713766", "0.713766", "0.7133545", "0.71332556", "0.71204126", "0.7115936", "0.71070266", "0.7072064", "0.70631766", "0.70453846", "0.70224065", "0.70217866", "0.7008655" ]
0.0
-1
Return true if if the line drawn through a, b, and c makes a right turn.
def rightTurnFiltered(a, b, c): def isSmall(number): """Function to check if a number is very small.""" return abs(number) < epsilon def floatVectortoFractionVector(vector): return [Fraction.from_float(x) for x in vector] def crossProduct(p1, p2, p3): """Compute the crossProduct of the vectors p2 - p1 and p3 - p1.""" return ( -(p1[1]*p2[0]) + p1[0]*p2[1] + p1[1]*p3[0] - p2[1]*p3[0] - p1[0]*p3[1] + p2[0]*p3[1] ) v1Norm = euclidean_distance(a, b) v2Norm = euclidean_distance(a, c) q = crossProduct(a, b, c) angle = q / (v1Norm * v2Norm) if ( isSmall(v1Norm) or isSmall(v2Norm) or isSmall(angle) ): q = crossProduct( floatVectortoFractionVector(a), floatVectortoFractionVector(b), floatVectortoFractionVector(c) ) return (q > 0)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_ccw(point_a, point_b, point_c):\r\n return is_on_line(point_a, point_b, point_c) > 0", "def is_right_angle(a, b, c):\n if a == 0 or b == 0 or c == 0:\n return False\n else :\n return (a == b + c) or (b == c + a) or (c == a + b)", "def test_right_turn_true(self):\n point1 = np.array([1, 1])\n point2 = np.array([2, 2])\n point3 = np.array([3, 1])\n\n right_angle1 = convex_hull.right_turn(point1, point2, point3) # first right turn angle (boolean)\n right_angle2 = convex_hull.right_turn(point1, point3, point2) # second right turn angle (boolean)\n\n self.assertTrue(right_angle1 and not right_angle2)", "def is_ccw(a, b, c):\n p = b - a\n q = c - a\n area = p.x * q.y - q.x * p.y\n\t # May want to throw an exception if area == 0\n return area > 0", "def isRight(self):\n\t\tif self.sq(self.a) == self.sq(self.b) + self.sq(self.c):\n\t\t\treturn True\n\t\telif self.sq(self.b) == self.sq(self.a) + self.sq(self.c):\n\t\t\treturn True\n\t\telif self.sq(self.c) == self.sq(self.a) + self.sq(self.b):\n\t\t\treturn True\n\t\treturn False", "def is_ccw(self, b: PointOrIterable, c: PointOrIterable) -> bool:\n result = self.ccw(b, c)\n if result == 0:\n raise ColinearPoints(self, b, c)\n return result > 0", "def can_left_arc(c, correct_arcs):\n try:\n return Arc(c.buffer[0], c.sentence[c.stack[-1]].deprel, c.stack[-1]) in correct_arcs\n except IndexError:\n return False", "def can_right_arc(c, correct_arcs):\n try:\n return Arc(c.stack[-1], c.sentence[c.buffer[0]].deprel, c.buffer[0]) in correct_arcs \\\n and has_all_children(c.buffer[0], c, correct_arcs)\n except IndexError:\n return False", "def can_move_diagonally_inside_palace(self, position):\n\n return position in self.get_palace_diagonal_blue() or\\\n position in self.get_palace_diagonal_red()", "def is_on_line(point_a, point_b, point_c):\r\n return (point_b[0] - point_a[0]) * (point_c[1] - point_a[1]) - (point_b[1] - point_a[1]) * (point_c[0] - point_a[0])", "def lines_cross(x1, y1, x2, y2, x3, y3, x4, y4):\n a1 = orientation(x1,y1, x3,y3, x4,y4)\n a2 = orientation(x2,y2, x3,y3, x4,y4)\n b1 = orientation(x3,y3, x1,y1, x2,y2)\n b2 = orientation(x4,y4, x1,y1, x2,y2)\n return ((a1 != a2 and b1 != b2)\\\n or ((x1 == x3 and y1 == y3) or (x2 == x3 and y2 == y3)\\\n or (x1 == x4 and y1 == y4) or (x2 == x4 and y2 == y4))\\\n or ((a1 == math.pi and a2 == math.pi)\\\n or b1 == math.pi and b2 == math.pi))", "def check_right_side():\n maze.turn_left()\n #print 'checked right side'\n if maze.go():\n maze.turn_right()\n maze.turn_right()\n maze.go()\n maze.turn_left()\n # print 'i can go right'\n return True\n else:\n #print \"i can't go right\"\n maze.turn_right()\n return False", "def _check(self, x, y):\n n = self.n\n # x direction\n xline = self.arr[y]\n if not self.x_regexes[y].match(xline):\n return False\n\n # y direction\n ypos = x + max(0, y + 1 - n)\n yline = []\n x1, y1 = ypos, 0\n while x1 >= 0 and y1 < 2 * n - 1:\n if x1 < len(self.arr[y1]):\n yline.append(self.arr[y1][x1])\n if y1 >= n - 1:\n x1 -= 1\n y1 += 1\n\n if not self.y_regexes[ypos].match(yline):\n return False\n\n # z direction\n zpos = x + max(0, n - 1 - y)\n zline = []\n x1, y1 = zpos, 2 * n - 2\n while x1 >= 0 and y1 >= 0:\n if x1 < len(self.arr[y1]):\n zline.append(self.arr[y1][x1])\n if y1 <= n - 1:\n x1 -= 1\n y1 -= 1\n\n if not self.z_regexes[zpos].match(zline):\n return False\n\n return True", "def clockwise(p1, p2, p3):\n\tv1 = p2 - p1\n\tv2 = p3 - p2\n\tc = (v2.x * v1.y) - (v1.x * v2.y)\n\tif c > 0:\n\t\treturn True\n\telse:\n\t\treturn False", "def is_triangle(a, b, c):\n a, b, c = sorted([a, b, c])\n return True if a > abs(b - c) and a < (b + c) else False", "def is_cr(self, y, t):\n return t == 0 and y != 0", "def are_vertices_clockwise(self,line):\r\n \r\n import numpy as np\r\n \r\n signed_area = 0\r\n for idx in range(line.shape[0]):\r\n \r\n x1 = line[idx,0]\r\n y1 = line[idx,1]\r\n if idx == line.shape[0]-1:\r\n x2 = line[0,0]\r\n y2 = line[0,1]\r\n else:\r\n x2 = line[idx+1,0]\r\n y2 = line[idx+1,1]\r\n \r\n signed_area += (x1 * y2 - x2 * y1)\r\n \r\n return (np.sign(signed_area) == -1.)", "def are_vertices_clockwise(self,line):\r\n \r\n import numpy as np\r\n \r\n signed_area = 0\r\n for idx in range(line.shape[0]):\r\n \r\n x1 = line[idx,0]\r\n y1 = line[idx,1]\r\n if idx == line.shape[0]-1:\r\n x2 = line[0,0]\r\n y2 = line[0,1]\r\n else:\r\n x2 = line[idx+1,0]\r\n y2 = line[idx+1,1]\r\n \r\n signed_area += (x1 * y2 - x2 * y1)\r\n \r\n return (np.sign(signed_area) == -1.)", "def isCollinear(a,b,c):\r\n #return slope(a, b) == slope(b, c) == slope(c, a) #DOES NOT WORK\r\n #return (b[0] - a[0]) * (c[1] - a[1]) == (c[0] - a[0]) * (b[1] - a[1]) \r\n #return distance(a,b) + distance(b,c) == distance(a,c)\r\n x1 = a[0]\r\n y1 = a[1]\r\n x2 = b[0]\r\n y2 = b[1]\r\n x3 = c[0]\r\n y3 = c[1] \r\n if (x1*(y2 - y3)) + (x2*(y3 - y1)) + (x3*(y1-y2)) == 0: \r\n return True\r\n else:\r\n return False", "def isAnyLineAt(self, x, y):\n return (self.isLineAt(x, y, 1, 0) or # Horizontal\n self.isLineAt(x, y, 0, 1) or # Vertical\n self.isLineAt(x, y, 1, 1) or # Diagonal up\n self.isLineAt(x, y, 1, -1)) # Diagonal down", "def answer_ok(a):\n (rightly_positioned, permutated) = a\n if (rightly_positioned + permutated > number_of_positions) \\\n or (rightly_positioned + permutated < len(colours) - number_of_positions):\n return False\n if rightly_positioned == 3 and permutated == 1:\n return False\n return True", "def is_straight_line(self, arr):\n # First pair of point (x0, y0) \n x0 = arr[0][0]\n y0 = arr[0][1]\n\n # Second pair of point (x1, y1) \n x1 = arr[len(arr) - 1][0]\n y1 = arr[len(arr) - 1][1]\n\n dx = x1 - x0\n dy = y1 - y0\n\n # Loop to iterate over the points \n for i in range(len(arr)):\n x = arr[i][0]\n y = arr[i][1]\n\n if (dx * (y - y1) - dy * (x - x1)) > self.movement_tolerance:\n return False\n\n return True", "def R_will_change_direction(point0, point1, point2):\n\n x0, y0 = point0[0], point0[1]\n x1, y1 = point1[0], point1[1]\n x2, y2 = point2[0], point2[1]\n\n try:\n m1 = (x1 - x2) / (y2 - y1)\n m2 = (y2 - y1) / (x2 - x1)\n x3 = ((m2 * x1) - (m1 * x0) - y1 + y0) / (m2 - m1)\n y3 = m1 * (x3 - x0) + y0\n except ZeroDivisionError:\n (x3, y3) = (x0, y1) if y1 == y2 else (x1, y0)\n\n return ((min(x1, x2) <= x3 <= max(x1, x2)) and (min(y1, y2) <= y3 <= max(y1, y2))), (x3, y3)", "def is_on_board(self, r, c):\r\n return 0 <= r <= 7 and 0 <= c <= 7", "def is_ok_line(line):\n card1 = line[0]\n card2 = line[1]\n card3 = line[2]\n\n if not is_coupled(card1.east, card2.west):\n return False\n if not is_coupled(card2.east, card3.west):\n return False\n return True", "def is_in_collision_line(self, a, b):\n return abs((b[0]-a[0])*self.x + (a[1]-b[1])*self.y + (a[0]-b[0])*b[1] + (b[1]-a[1])*a[0]) /\\\n sqrt((b[0]-b[1])**2 + (a[1]-b[1])**2 + 0.0000001)< self.r", "def is_colinear(self, b: PointType, c: PointType) -> bool:\n return self.ccw(b, c) == 0", "def isEquilateral(self):\n\t\treturn self.a == self.b == self.c", "def can_move(self, next_x, next_y):\n\t\tif self.battery == 0:\n\t\t\tif self.planet.tiles[next_y][next_x].is_shaded():\n\t\t\t\treturn False\n\t\tif self.planet.tiles[next_y][next_x].elevation(self) == \"+\":\n\t\t\treturn False\n\t\tif self.planet.tiles[next_y][next_x].elevation(self) == \"-\":\n\t\t\treturn False\n\t\treturn True", "def is_left(self, line):\n return line.angle() < 0", "def is_ok_three_lines(line1, line2, line3):\n card1 = line1[0]\n card2 = line1[1]\n card3 = line1[2]\n card4 = line2[0]\n card5 = line2[1]\n card6 = line2[2]\n\n card7 = line3[0]\n card8 = line3[1]\n card9 = line3[2]\n idents1 = [card.ident for card in line1]\n idents2 = [card.ident for card in line2]\n idents3 = [card.ident for card in line3]\n\n intersection = list(set(idents1) & set(idents2))\n if intersection:\n dprint(\"intersection 12\")\n return False\n\n intersection = list(set(idents1) & set(idents3))\n if intersection:\n return False\n\n intersection = list(set(idents2) & set(idents3))\n if intersection:\n return False\n\n print(\"??????????????\")\n show_triple(line1, line2, line3)\n print(\"??????????????\")\n\n if not is_ok_two_lines(line1, line2):\n return False\n if not is_ok_two_lines(line2, line3):\n return False\n\n return True", "def is_ate(self, snake_x, snake_y):\n if snake_x == self.x and snake_y == self.y:\n return True", "def checkStraightLine(coordinates: List[List[int]]) -> bool:\n\t# initializing our comparison slope value\n\tnum = coordinates[1][1] - coordinates[0][1]\n\tden = coordinates[1][0] - coordinates[0][0]\n\tif den == 0:\n\t\tslope = math.inf\n\telse:\n\t\tslope = num / den\n\n\t# checking the initial slope against all other slopes\n\tslope_check = 0\n\tfor i in range(2, len(coordinates)):\n\t\tnum = coordinates[i][1] - coordinates[i-1][1]\n\t\tden = coordinates[i][0] - coordinates[i-1][0]\n\t\tif den == 0:\n\t\t\tslope_check = math.inf\n\t\telse:\n\t\t\tslope_check = num/den\n\n\t\tif slope_check != slope:\n\t\t\treturn False\n\n\treturn True", "def is_solved(self):\n colors = ['green', 'blue', 'red', 'orange', 'white', 'yellow']\n for row in range(3):\n for column in range(3):\n if self.front[row][column] != colors[0]:\n return False\n for row in range(3):\n for column in range(3):\n if self.back[row][column] != colors[1]:\n return False\n for row in range(3):\n for column in range(3):\n if self.right[row][column] != colors[2]:\n return False\n for row in range(3):\n for column in range(3):\n if self.left[row][column] != colors[3]:\n return False\n for row in range(3):\n for column in range(3):\n if self.up[row][column] != colors[4]:\n return False\n for row in range(3):\n for column in range(3):\n if self.down[row][column] != colors[5]:\n return False\n return True", "def line_ccw(a, b, p):\n return (p[1] - a[1]) * (b[0] - a[0]) > (b[1] - a[1]) * (p[0] - a[0])", "def exsist_triangle(a: int, b: int, c: int):\n if a + b > c and a + c > b and b + c > a:\n print('Триугольник существует')\n if a == b == c:\n print('Триугольник равносторонний')\n elif a == b or a == c or b == c:\n print('Триугольник равнобедренный')\n else:\n print('Триугольник разносторонний')\n else:\n print('Триугольник не существует')", "def is_ok_two_lines(line1, line2):\n card1 = line1[0]\n card2 = line1[1]\n card3 = line1[2]\n card4 = line2[0]\n card5 = line2[1]\n card6 = line2[2]\n idents1 = [card.ident for card in line1]\n idents2 = [card.ident for card in line2]\n intersection = list(set(idents1) & set(idents2))\n if intersection:\n return False\n if not is_coupled(card1.south, card4.north):\n return False\n if not is_coupled(card2.south, card5.north):\n return False\n if not is_coupled(card3.south, card6.north):\n return False\n return True", "def sanity_check(left_line, right_line):\n\n # check horizontal separation distance\n if abs(right_line.line_base_pos - left_line.line_base_pos) > 4.0:\n #print(\"Line base positions too far from each other\")\n return False\n\n # check lines are roughly parallel\n # if base pos and raduius of both lines are ok, it should be enough\n # to check the X distances of a few points with respect to their y positions\n # so slice the Y points into chunks and check\n chunksize = 200\n length = min(len(left_line.ally), len(right_line.ally))\n\n # TODO: error handling\n if (right_line.allx is not None) and (left_line.allx is not None):\n bias = None\n for i in range(0, length, chunksize):\n\n # take x at car as bias\n if bias is None:\n bias = abs(right_line.allx[i] - left_line.allx[i]) * left_line.xm_per_pix\n else:\n if abs(bias - abs(right_line.allx[i] - left_line.allx[i])*left_line.xm_per_pix) > 1.0:\n #print(\"Lines are not parallel\")\n return False\n else:\n return False\n\n # check curvatures -- the curvatures for left and right should be roughly\n # in the same magitude -- check for error\n if abs(left_line.radius_of_curvature - right_line.radius_of_curvature) > 200:\n #print(\"Line radius of curvature too different\")\n return False\n\n return True", "def isclockwise(self):\n s = sum((seg[1][0] - seg[0][0]) * (seg[1][1] + seg[0][1])\n for seg in self.segment_tuples)\n return s > 0", "def is_legal_move(self, start_pos, end_pos, start_piece, end_piece_player_id, board):\r\n parsed_positions = self.parse_positions(start_pos, end_pos)\r\n start_row = parsed_positions[0]\r\n start_col = parsed_positions[1]\r\n end_row = parsed_positions[2]\r\n end_col = parsed_positions[3]\r\n\r\n # Case for Red's side\r\n if start_piece.get_player_id() == 'r':\r\n if not (3 <= end_col <= 5 and 0 <= end_row <= 2): # Returns False when is to move outside the palace\r\n return False\r\n else:\r\n if abs(start_col - end_col) == 1 and abs(start_row - end_row) == 1: # Checks if end_pos forces a move diagonally\r\n return True\r\n else:\r\n return False\r\n\r\n # Case for Black's side\r\n else:\r\n if not (3 <= end_col <= 5 and 7 <= end_row <= 9): # Returns False when is to move outside the palace\r\n return False\r\n else:\r\n if abs(start_col - end_col) == 1 and abs(start_row - end_row) == 1: # Checks if end_pos forces a move diagonally\r\n return True\r\n else:\r\n return False", "def check(self,a,x,y):\r\n return not self.exitsinrow(self.rows,x,a) and not self.existsincol(self.rows,y,a) and \\\r\n not self.exitsinblock(self.rows, x - x % 3, y - y % 3,a)", "def coplanar_points_are_on_same_side_of_line(a, b, p1, p2):\n check_shape_any(a, (3,), (-1, 3), name=\"a\")\n vg.shape.check(locals(), \"b\", a.shape)\n vg.shape.check(locals(), \"p1\", a.shape)\n vg.shape.check(locals(), \"p2\", a.shape)\n\n # Uses \"same-side technique\" from http://blackpawn.com/texts/pointinpoly/default.html\n along_line = b - a\n return vg.dot(vg.cross(along_line, p1 - a), vg.cross(along_line, p2 - a)) >= 0", "def line_score(direction: str, index: int = 0) -> bool:\n\n # to keep from searching nonexistant rows and columns:\n if index >= n+1:\n return False\n\n # we skip the header row and column with (1,n)\n for i in range(1, n):\n if direction == 'row':\n if 'cringo' in player.card[index][i] and 'cringo' in player.card[index][i+1]:\n pass\n else:\n return False\n if direction == 'column':\n if 'cringo' in player.card[i][index] and 'cringo' in player.card[i+1][index]:\n pass\n else:\n return False\n if direction == 'LR':\n if 'cringo' in player.card[i][i] and 'cringo' in player.card[i+1][i+1]:\n pass\n else:\n return False\n if direction == 'RL':\n if 'cringo' in player.card[i][(n+1)-i] and 'cringo' in player.card[i+1][(n+1)-(i+1)]:\n pass\n else:\n return False\n\n return True", "def triangle_point_intersects(a, b, c, s):\n return (\n line_ccw(a, b, c) == line_ccw(a, b, s) and\n line_ccw(b, c, a) == line_ccw(b, c, s) and\n line_ccw(c, a, b) == line_ccw(c, a, s)\n )", "def verify_legal_rotation(self, direction):\n test_figure = None\n if direction == \"CW\":\n test_figure = self.get_block_positions(self.active_piece.get_cw_rotation())\n elif direction == \"CCW\":\n test_figure = self.get_block_positions(self.active_piece.get_ccw_rotation())\n\n for b_x, b_y in test_figure:\n if b_x < 0 or b_x >= self.WIDTH:\n return False\n\n if b_y < 0 or b_y >= self.HEIGHT:\n return False\n\n if self.board[b_y][b_x] != 0:\n return False\n return True", "def finished(self) -> bool:\n p1_count = 0\n p2_count = 0\n ley_line_total = (self.side_length + 1) * 3\n for itype in self.current_ley_lines:\n for line in itype:\n if line[0] == '1':\n p1_count += 1\n if line[0] == '2':\n p2_count += 1\n return p1_count >= ley_line_total / 2 or p2_count >= ley_line_total / 2", "def castling_valid(self, turn, direction):\n \n opposite_colour = next_turn(turn)\n\n \n if self.board[direction[0]] and self.board[direction[-1]] != self.empty:\n if ((self.board[direction[0]].graphic) == piece_class.PIECEDICT[turn][piece_class.King] and \n (self.board[direction[-1]].graphic) == piece_class.PIECEDICT[turn][piece_class.Rook]):\n if self.board[direction[0]].move_track == False and self.board[direction[-1]].move_track == False:\n for i in self.path_dict[opposite_colour]:\n if i in self.coords:\n if self.coords.index(i) == direction[0]:\n \n return False\n \n if self.coords.index(i) == direction[1]:\n \n return False\n \n if self.coords.index(i) == direction[2]:\n \n return False\n \n if len(direction) == 4:\n if self.board[direction[1]] == self.empty:\n if self.board[direction[2]] == self.empty:\n \n return True\n \n if len(direction) == 5:\n if self.board[direction[1]] == self.empty:\n if self.board[direction[2]] == self.empty:\n if self.board[direction[3]] == self.empty:\n \n return True\n \n return False", "def isLineAt(self, x, y, dx, dy):\n\n initialValue = self.board[x][y]\n #checks a cell to see if there is a piece there\n if initialValue != 0:\n #loops though 3 times in a certain direction to see\n #if there is a winning configuration\n for i in range(3):\n xIndex = x + (dx * (i+1))\n yIndex = y + (dy * (i+1))\n if (-1 < xIndex < self.h) and (-1 < yIndex < self.w):\n if initialValue == self.board[xIndex][yIndex]:\n continue\n else:\n return False\n else:\n return False\n return True\n else:\n return False", "def can_rotate(self) -> (bool, list, list):\n arms, axis = self.get_arms()\n rotation = self.get_rotation()\n if rotation[1][0] == 0:\n return False\n coord_axis = np.array([[axis.x_obj], [axis.y_obj]])\n coord_arms = [np.array([[arm.x_obj], [arm.y_obj]])\n for arm in arms]\n coord_new_arms = []\n # Collecting arm coordinates in the situation there turnstile rotates\n for i in range(len(arms)):\n coord_arm = coord_arms[i]\n coord_new_arms.append(\n np.dot(rotation, coord_arm - coord_axis) + coord_axis)\n can_rotate = True\n for i in range(len(arms)):\n coord_arm = coord_arms[i]\n coord_new_arm = coord_new_arms[i]\n # Object turnstile should push\n coord_front = coord_arm + coord_new_arm - coord_axis\n coord_character = np.array(\n [[self.moving_character.x_obj], [self.moving_character.y_obj]])\n obj_front = self.grid.obj_list[\n coord_front[0][0], coord_front[1][0]]\n if not (isinstance(obj_front, ob.Void) or (coord_front == coord_character).all()):\n can_rotate = False\n # Object being at the destination of the arm\n obj_target = self.grid.obj_list[\n coord_new_arm[0][0], coord_new_arm[1][0]]\n if not isinstance(obj_target, (ob.Void, ob.TurnstileBloc)):\n can_rotate = False\n return can_rotate, coord_arms, coord_new_arms", "def _isLine(self):\n return (self.width == 0 and self.height > 1) or (self.height == 0 and self.width > 1)", "def _isLine(self):\n return (self.width == 0 and self.height > 1) or (self.height == 0 and self.width > 1)", "def sameDirection(cls, *vectors, e=10e-10):\n l = len(vectors)\n if l == 2:\n v1 = vectors[0]\n v2 = vectors[1]\n return (abs(v1.angle - v2.angle) % (2 * math.pi)) < e\n else:\n for i in range(l):\n for j in range(i + 1, l):\n if not cls.sameDirection(vectors[i], vectors[j]):\n return False\n return True", "def _isLineTanToArc(self, x1, y1, x2, y2, cx, cy, d):\n p = QPointF(x2, y2)\n # line start -> end\n v1 = QVector2D(p - QPointF(x1, y1)).normalized()\n # arc center -> arc start\n v2 = QVector2D(p - QPointF(cx, cy)).normalized()\n if abs(v1.dotProduct(v1, v2)) <= 1e-6:\n # TODO: handle case where arc turns back into the line\n return True\n else:\n return False", "def collinear(a:tuple, b:tuple, c:tuple)->bool:\n return ((b[1] - c[1]) * (a[0] - b[0])) == ((a[1] - b[1]) * (b[0] - c[0]))", "def is_cross(self, row, col):\n return self.field[row, col] == 'O'", "def dans_cercle(self, r, x, y):\r\n self.r_num(r)\r\n valid = (isinstance(x, int) or isinstance(x, float)) and \\\r\n (isinstance(y, int) or isinstance(y, float))\r\n if valid:\r\n if sqrt(x**2+y**2)<self.r:\r\n return True\r\n else:\r\n return False\r\n else:\r\n raise TypeError", "def is_clockwise(vertices):\n v = vertices\n area = ((v[1][0] - v[0][0]) * (v[1][1] + v[0][1]) +\n (v[2][0] - v[1][0]) * (v[2][1] + v[1][1]) +\n (v[0][0] - v[2][0]) * (v[0][1] + v[2][1])) / 2\n return (area > 0)", "def collison(direction):\n if direction == 3 and screen.inch(head[0]-1,head[1]) !=ord(' '):\n return True\n elif direction == 2 and screen.inch(head[0]+1,head[1]) !=ord(' '):\n return True\n elif direction == 1 and screen.inch(head[0],head[1]-1) !=ord(' '):\n return True\n elif direction == 0 and screen.inch(head[0],head[1]+1) !=ord(' '):\n return True \n else:\n return False", "def correct_player_turn(self, coords):\n x, y = coords\n if coords == None:\n return False\n elif len(coords) != 2:\n print(\"x and y, that's exactly 2 coordinates, can't be that hard to understand....\")\n return False\n elif self.board.out_of_bounds(x - 1, y - 1): # if coords out of bounds\n print('Position out of bounds!')\n return False\n elif not self.board.free_pos(x - 1, y - 1): # if coords already taken\n print('Position already taken!')\n return False\n return True", "def is_interfered(r, c, s1, s2):\n return r == 0 and c < s1 + s2 or r < s2 and c < s1", "def isInlineOfFire(self, shooter, target):\n if self == shooter or self == target or target == shooter:\n raise Exception(\n 'Must provide 3 distinct points to check line of fire'\n )\n\n tmpLeft = (shooter.y - self.y)*(target.x - self.x)\n tmpRight = (target.y - self.y)*(shooter.x - self.x)\n\n if tmpLeft == tmpRight:\n min_x = min(target.x, shooter.x)\n max_x = max(target.x, shooter.x)\n min_y = min(target.y, shooter.y)\n max_y = max(target.y, shooter.y)\n x_condition = min_x <= self.x <= max_x\n y_condition = min_y <= self.y <= max_y\n if x_condition and y_condition:\n return True\n return False", "def can_turnover(self):\n return True if len(self._turn_chars) != 0 else False", "def is_one_turn_ahead(point_a, point_b, distance):\n _, route_initial = interpolate_trajectory(world, [point_a.location, point_b.location])\n if estimate_route_distance(route_initial) < distance or \\\n estimate_route_distance(route_initial) > 3*distance:\n print (\"Rejected because it is too small\")\n return False\n route = clean_route(route_initial)\n\n print ( \" One curve test \")\n if len(route) != 1:\n print (\" reject because of size\")\n return False\n for point in route:\n # Check if there are any curve\n if point[2] == RoadOption.STRAIGHT:\n print (\" reject due to straight\")\n return False\n\n\n return True", "def crosses(self, other): # -> bool:\n ...", "def positive_slope(line:tuple)->bool:\n return line[0][1] < line[1][1] == line[0][0] < line[1][0]", "def one_step_right(self):\n if (self.column+ 1 >=len(self.maze[0])):\n return False\n elif (self.battery == 0):\n return False\n elif (self.maze[self.row][self.column+1] == False):\n return False\n else:\n self.column += 1\n self.battery -= 1\n return True", "def check_victory(button):\n # check if previous move caused a win on vertical line\n global buttons\n x, y = get_coordinates(button)\n tt = button['text']\n if buttons[0][y]['text'] == buttons[1][y]['text'] == buttons[2][y]['text'] != \" \":\n buttons[0][y].config(text=\"|\" + tt + \"|\")\n buttons[1][y].config(text=\"|\" + tt + \"|\")\n buttons[2][y].config(text=\"|\" + tt + \"|\")\n return True\n\n # check if previous move caused a win on horizontal line\n if buttons[x][0]['text'] == buttons[x][1]['text'] == buttons[x][2]['text'] != \" \":\n buttons[x][0].config(text=\"--\" + tt + \"--\")\n buttons[x][1].config(text=\"--\" + tt + \"--\")\n buttons[x][2].config(text=\"--\" + tt + \"--\")\n return True\n\n # check if previous move was on the main diagonal and caused a win\n if x == y and buttons[0][0]['text'] == buttons[1][1]['text'] == buttons[2][2]['text'] != \" \":\n buttons[0][0].config(text=\"\\\\\" + tt + \"\\\\\")\n buttons[1][1].config(text=\"\\\\\" + tt + \"\\\\\")\n buttons[2][2].config(text=\"\\\\\" + tt + \"\\\\\")\n return True\n\n # check if previous move was on the secondary diagonal and caused a win\n if x + y \\\n == 2 and buttons[0][2]['text'] == buttons[1][1]['text'] == buttons[2][0]['text'] != \" \":\n buttons[0][2].config(text=\"/\" + tt + \"/\")\n buttons[1][1].config(text=\"/\" + tt + \"/\")\n buttons[2][0].config(text=\"/\" + tt + \"/\")\n return True\n\n return False", "def valid(a,b,x,y):\n\t# Size of the square grid that encases rectagle x,y\n\tsquare = x + y - 2\n\t# Taxi cab distance (no diagonals) from (p_a, p_b) to (a,b)\n\tsteps = lambda p_a, p_b: abs(p_a - a) + abs(p_b - b)\n\t# Top/Bottom/Left/Right bound\n\tif min(a,b) < 0 or max(a,b) >= square: return False\n\t# Upper left/Lower right corner check\n\tif steps(0,0) < (x - 2) or steps(square - 1, square - 1) < (x - 2): return False \n\t# Lower left/Upper right corner check\n\telif steps(square - 1, 0) < (y - 2) or steps( 0, square - 1) < (y - 2): return False\n\treturn True", "def is_on(a, b, c):\r\n return(isCollinear(a, b, c) and (within(a[0], c[0], b[0]) if a[0] != b[0] else\r\n within(a[1], c[1], b[1])))", "def is_cue_line(point1, point2, image):\n if point1[0] <= point2[0]:\n pointL, pointR = point1, point2\n else:\n pointL, pointR = point2, point1\n deltaY = pointR[1] - pointL[1]\n deltaX = pointR[0] - pointL[0]\n if deltaX != 0:\n for x in range(pointL[0], pointR[0] + 1):\n dx = x - pointL[0]\n dy = dx * deltaY/deltaX\n y = pointL[1] + dy\n if not is_cue_color(image.getpixel((x,y))):\n return False\n else:\n up = min(point1[1], point2[1])\n down = max(point1[1], point2[1])\n x = point1[0]\n for y in range(up, down + 1):\n if not is_cue_color(image.getpixel((x, y))):\n return False\n\n return True", "def check_quadline(self, row: int, col: int, drow: int, dcol: int) -> bool:\n count = 1\n token = self.get_token(row, col)\n count_token = 1\n while self.check_bounds(row+drow, col+dcol) and count <= 3:\n if self.grid[row+drow][col+dcol] == token:\n row += drow\n col += dcol\n count_token += 1\n if count_token == 4:\n return True\n count += 1\n return False", "def has_win(self, r, c, mark):\n row = r - 1\n col = COL_MAP[c]\n cnt = 0\n board_range = xrange(-1 * PADDING, PADDING + 1)\n\n # check vertical\n for dr in board_range:\n cnt = cnt + 1 if self.piece_at(row + dr, col) == mark else cnt\n if cnt == WIDTH:\n return True\n else:\n cnt = 0\n\n # check horizontal\n for dc in board_range:\n cnt = cnt + 1 if self.piece_at(row, col + dc) == mark else cnt\n if cnt == WIDTH:\n return True\n else:\n cnt = 0\n\n # check diagonal rightdown\n for dd in board_range:\n cnt = cnt + 1 if self.piece_at(row + dd, col + dd) == mark else cnt\n if cnt == WIDTH:\n return True\n else:\n cnt = 0\n\n # check diagonal rightup\n for dd in board_range:\n cnt = cnt + 1 if self.piece_at(row - dd, col + dd) == mark else cnt\n if cnt == WIDTH:\n return True\n\n return False", "def check_point_right(nodeL, nodeR, city):\n A = get_city_points(city)\n B = get_node_points(nodeL)\n C = get_node_points(nodeR)\n slope = _slope(A, B)\n (F, G) = calibrator(A, B, slope)\n sign = math.copysign(1, ((G[0] - F[0]) * (C[1] - F[1]) - (G[1] - F[1]) * (C[0] - F[0])))\n\n if slope == \"horizontal\":\n if sign == 1:\n if A[0] > B[0]:\n return True\n else:\n return False\n else:\n if A[0] < B[0]:\n return True\n else:\n return False\n\n if slope == \"vertical\":\n if sign == 1:\n if A[1] < B[1]:\n return True\n else:\n return False\n else:\n if A[1] > B[1]:\n return True\n else:\n return False\n\n if slope == \"inclined\":\n if sign == 1:\n if A[1] < B[1]:\n return True\n else:\n return False\n else:\n if A[1] > B[1]:\n return True\n else:\n return False\n\n if slope == \"declined\":\n if sign == 1:\n if A[1] < B[1]:\n return True\n else:\n return False\n else:\n if A[1] > B[1]:\n return True\n else:\n return False", "def is_on_curve(self):\n if self.infinity:\n return True\n left = self.y * self.y\n right = self.x * self.x * self.x + self.ec.a * self.x + self.ec.b\n\n return left == right", "def game_won(self):\n\n # Makes sure every tile is colored,\n for column in self.board:\n for tile in column:\n if not tile.color:\n return False\n\n # Makes sure each color has a line.\n colors = set()\n for dot in self.dots:\n dot_tile = self.board[dot.x][dot.y]\n colors.add(dot.color)\n for dot in self.dots:\n dot_tile = self.board[dot.x][dot.y]\n # If we've already found a line for this color.\n if dot.color not in colors:\n continue\n # If this dot starts a line and ends at the other dot.\n if dot_tile.next and not dot_tile.line_end().is_dot:\n return False\n elif dot_tile.next:\n colors.remove(dot.color)\n # If colors isn't empty, not all colors have lines.\n return not colors", "def is_quadline(self, col: int) -> bool:\n row = self.depth(col)\n for i in range(-1, 2):\n for j in range(-1, 2):\n if not (i == 0 and j == 0) and self.check_quadline(row, col,\n i, j):\n return True\n return False", "def continues_to_right(self):\n if self.col_num == len(self.master_grid.matrix[0])-1:\n return False\n return (self.master_grid.matrix[self.row_num][self.col_num+1] \n == self.character)", "def has_arc(self, a, b):\n return self.matrix[a][b] != 0", "def check_left_side():\n maze.turn_right()\n #print 'checked left side'\n if maze.go():\n maze.turn_right()\n maze.turn_right()\n maze.go()\n maze.turn_right()\n # print 'i can go left'\n return True\n else:\n #print \"i can't go left\"\n maze.turn_left()\n return False", "def is_winning(self):\n\n current_board = self.current_board\n\n # check rows\n for row in current_board:\n row = set(row)\n if (\"X\" not in row and \"-\" not in row) or (\"O\" not in row and \"-\" not in row):\n return True\n\n # check columns\n for i in range(len(current_board)):\n column_to_check = set()\n \n for j in range(len(current_board)):\n column_to_check.add(current_board[j][i])\n\n if (\"X\" not in column_to_check and \"-\" not in column_to_check) or (\"O\" not in column_to_check and \"-\" not in column_to_check):\n return True\n \n # check diagonals\n forward_diagonal_check = set()\n backward_diagonal_check = set()\n \n for i in range(len(current_board)):\n forward_diagonal_check.add(current_board[i][i])\n backward_diagonal_check.add(current_board[i][len(current_board)-1-i])\n\n if forward_diagonal_check == {\"X\"} or forward_diagonal_check == {\"O\"}:\n return True\n\n if backward_diagonal_check == {\"X\"} or backward_diagonal_check == {\"O\"}:\n return True", "def is_legal_move(self, start_pos, end_pos, start_piece, end_piece_player_id, board):\r\n parsed_positions = self.parse_positions(start_pos, end_pos)\r\n start_row = parsed_positions[0]\r\n start_col = parsed_positions[1]\r\n end_row = parsed_positions[2]\r\n end_col = parsed_positions[3]\r\n\r\n # Case for Red\r\n if start_piece.get_player_id() == 'r':\r\n\r\n # Red soldier hasn't crossed river\r\n if 3 <= start_row <= 4:\r\n if end_row - start_row == 1 and start_col == end_col:\r\n return True\r\n\r\n # Red solider has crossed river\r\n else:\r\n # Checks if movement forces a diagonal\r\n if end_row - start_row == 1 and abs(end_col - start_col) == 1:\r\n return False\r\n # Rules out the diagonal and checks if movement is valid\r\n elif end_row - start_row == 1 or abs(end_col - start_col) == 1:\r\n return True\r\n else:\r\n return False\r\n\r\n # Case for Black\r\n else:\r\n # Black soldier hasn't crossed river\r\n if 5 <= start_row <= 6:\r\n if end_row - start_row == -1 and start_col == end_col:\r\n return True\r\n else:\r\n return False\r\n\r\n # Black soldier has crossed the river\r\n else:\r\n # Checks if movement forces a diagonal\r\n if end_row - start_row == -1 and abs(end_col - start_col) == 1:\r\n return False\r\n # Rules out the diagonal and checks if movement is valid\r\n elif end_row - start_row == -1 or abs(end_col - start_col) == 1:\r\n return True\r\n\r\n else:\r\n return False", "def protect_lone(self, board):\n res = False\n if (self.player):\n if (reduce(lambda x, y: x if (y > 1) else x + 1, board.p1vec) < \\\n reduce(lambda x, y: x if (y > 1) else x + 1, self.board.p1vec)):\n res = True\n else:\n if (reduce(lambda x, y: x if (y > 1) else x + 1, board.p2vec) < \\\n reduce(lambda x, y: x if (y > 1) else x + 1, self.board.p2vec)):\n res = True\n return res", "def check_correctness(turns, correct_digits_and_position):\n\n #global correct\n\n if correct_digits_and_position == 4:\n print('Congratulations! You are a codebreaker!')\n return True\n else:\n print('Turns left: ' + str(12 - turns))\n \n return False", "def isIsosceles(self):\n\t\treturn self.a == self.b or self.a == self.c or self.b == self.c", "def has_right_rauzy_move(self, winner):\n winner = interval_conversion(winner)\n loser = self._labels[1-winner][-1]\n\n # the same letter at the right-end (False)\n if self._labels[0][-1] == self._labels[1][-1] :\n return False\n\n # the winner (or loser) letter is repeated on the other interval (True)\n if self._labels[0][-1] in self._labels[1]: return True\n if self._labels[1][-1] in self._labels[0]: return True\n\n # the loser letters is the only letter repeated in the loser\n # interval (False)\n for i,c in enumerate((self._labels[1-winner])):\n if c != loser and c in self._labels[1-winner][i+1:]:\n return True\n\n return False", "def is_valid_cord(x, y, w, h):\n return x >=0 and x < w and y >= 0 and y < h;", "def is_cyclic(self):\n return self._.b[0] == 2 and self._.c[-1] in [1, 2] and \\\n all(x == 1 for x in self._.b[1:-1] + self._.c[1:-1])", "def terminal(self):\n # Horizontal check\n for i in range(3):\n b_ = True\n for j in range(2):\n if self.board[i][j] == None or self.board[i][j] != self.board[i][j + 1]:\n b_ = False\n \n if b_:\n self.winner = self.board[i][0]\n return True\n \n # Vertical check\n for j in range(3):\n b_ = True\n for i in range(2):\n if self.board[i][j] == None or self.board[i][j] != self.board[i + 1][j]:\n b_ = False\n \n if b_:\n self.winner = self.board[0][j]\n return True\n \n # Diagonal check\n if self.board[1][1] != None:\n if self.board[0][0] == self.board[1][1] == self.board[2][2]:\n self.winner = self.board[1][1]\n return True\n\n if self.board[2][0] == self.board[1][1] == self.board[0][2]:\n self.winner = self.board[1][1]\n return True\n\n # Draw check\n if sum([row.count(None) for row in self.board]) == 0:\n self.winner = None\n return True\n \n return False", "def is_knight_move_valid(self, from_row, from_col, to_row, to_col):\n # check for valid move\n if ((abs(from_row - to_row) == 1 and abs(from_col - to_col) == 2) or\n (abs(from_row - to_row) == 2 and abs(from_col - to_col) == 1)):\n return True\n return False", "def intersect(self, line):\n c = line.cross_z\n d = self.v.dot(c)\n if d == 0:\n return False, 0, 0\n t = c.dot(line.p - self.p) / d\n return True, self.lerp(t), t", "def check_diagonals(self):\n\t\tdiags = [[(0,0), (1,1), (2,2)], [(0,2), (1,1), (2,0)]]\n\n\t\tfor diag in diags:\n\t\t\tpts = 0\n\t\t\tfor loc in diag:\n\t\t\t\tif self.board[loc[0]][loc[1]] == self.marker:\n\t\t\t\t\tpts+=1\n\t\t\tif pts == 3:\n\t\t\t\tprint('WE WON')\n\t\t\t\treturn True", "def willcollide(self, p, c, r, v=None):\n return (p.step(dt).vec(c)).len() > r", "def is_tr(self, y, t):\n return t != 0 and y != 0", "def on_turnover(self):\n return True if self.rotor_setting in self.turnover_characters else False", "def can_move(self, side, number_of_turns):\n return True", "def check_shot_direction(self, shot):\n return Vector.v_from_a(shot.angle) * self.dir < 0", "def __check_direction(self, vector, coordinate):\n inverse_vector = -vector[0], -vector[1]\n # Calculate hits to direction\n hits = self.__direction(vector,1,coordinate)\n if hits == 5:\n return True\n # After reaching the end, add hits towards the opposite direction\n hits = self.__direction(inverse_vector,hits,coordinate)\n if hits == 5:\n return True", "def is_rook_move_valid(self, from_row, from_col, to_row, to_col):\n # if not on same column or row\n if ((from_row != to_row and from_col != to_col) or\n (from_row == to_row and from_col == to_col)):\n return False\n\n # check if any pieces are in the way of destination\n if from_row != to_row:\n dc = 0\n dr = 1 if to_row - from_row > 0 else -1\n if from_col != to_col:\n dr = 0\n dc = 1 if to_col - from_col > 0 else -1\n dm = abs(to_row - from_row)\n\n retVal = self._any_piece_in_way(from_row, from_col, dr, dc, dm, toRow=to_row, toCol=to_col)\n\n # Casting: Rook invalidation\n if retVal and (from_row == 0 or from_row == 7):\n piece = self.board.squares[from_row][from_col]\n piece_color = self.piece_color(piece)\n if piece_color == \"white\":\n if from_col == 0:\n self.whiteCanCastleQside = False\n elif from_col == 7:\n self.whiteCanCastleKside = False\n else:\n if from_col == 0:\n self.blackCanCastleQside = False\n elif from_col == 7:\n self.blackCanCastleKside = False\n\n return retVal", "def colisiona(self, r, p):\n # Esta en el eje de las x?\n if p[0] >= r[0] and p[0] <= r[0] + 10:\n # Esta en el eje de las y?\n if p[1] >= r[1] and p[1] <= r[1] + 5:\n return True\n else:\n return False\n else:\n return False", "def line_collision(self, x1, y1, x2, y2, step=5):\n theta = math.atan2(y2-y1, x2-x1)\n length = math.sqrt((x1-x2)**2+(y1-y2)**2)\n for i in range(int(length/step)):\n new_x = x1+step*i*math.cos(theta)\n new_y = y1+step*i*math.sin(theta)\n if self.has_collision(new_x, new_y):\n return True\n return False", "def is_straight(hand):\n\ti = 0\n\twhile i < 8:\n\t\tif hand[i] == 1 and hand[i+1] == 1 and hand[i+2] == 1 and hand[i+3] == 1 and hand[i+4] == 1:\n\t\t\treturn True, i + 4\n\t\ti += 1\n\treturn False" ]
[ "0.7412801", "0.7080334", "0.7044733", "0.68451196", "0.6785564", "0.6677456", "0.63376695", "0.63277566", "0.63084525", "0.6250756", "0.62460744", "0.6239518", "0.6237629", "0.6189446", "0.6157471", "0.6096383", "0.6095791", "0.6095791", "0.6009904", "0.5992941", "0.59842193", "0.59549165", "0.59448564", "0.59335124", "0.5909104", "0.5897223", "0.5894332", "0.5875472", "0.5873982", "0.5869658", "0.5867264", "0.58486634", "0.5847261", "0.5841068", "0.58258224", "0.58031124", "0.5787829", "0.57793415", "0.57784295", "0.57689416", "0.57525575", "0.57312286", "0.5726833", "0.57239544", "0.56845146", "0.5674761", "0.5671379", "0.5659283", "0.56489307", "0.56486535", "0.56486535", "0.5644927", "0.5640076", "0.5631288", "0.56217974", "0.56019294", "0.55962396", "0.559404", "0.5592898", "0.5582357", "0.55818987", "0.55812275", "0.557992", "0.55715746", "0.5570595", "0.5570407", "0.5569841", "0.5563944", "0.5562107", "0.5551768", "0.5551086", "0.5546653", "0.55412775", "0.55361706", "0.55295634", "0.5528169", "0.5526371", "0.5517552", "0.5515524", "0.55152756", "0.5503935", "0.5502442", "0.5499917", "0.5499355", "0.5498101", "0.5483307", "0.5472684", "0.54720503", "0.5471338", "0.546135", "0.54579335", "0.54428", "0.5440477", "0.54398227", "0.5439214", "0.5437655", "0.54363483", "0.54350996", "0.5427362", "0.54248285", "0.5421994" ]
0.0
-1
Function to check if a number is very small.
def isSmall(number): return abs(number) < epsilon
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_small(a:int, b:int) -> bool:\n return a <= b", "def dangerouslySmall(c, e):\n return e < -limit and e < (-integerLog10(abs(c))) - 1", "def is_large(a:int, b:int) -> bool:\n return a>=b", "def has_small_digits(n,maxdigit):\n digits = [int(num) for num in str(n)]\n return all([num <= maxdigit for num in digits])", "def verySmall(a, delta=1e-30):\n # used when we really want the values to be 0, but this\n # can't be guaranteed due to floating errors.\n return np.average(a*a) <= delta", "def exceeds_min(value, min_):\n\n if isinstance(value, (float, int)):\n val_ = value\n else:\n try:\n val_ = int(value)\n except:\n val_ = value\n if isinstance(min_, (float, int)):\n return (val_ < min_)\n else:\n if min_.isalnum():\n try:\n imin = int(min_)\n return (val_ < imin)\n except:\n pass\n \n return False", "def smallest_factor(number):\n for i in xrange(2, int(sqrt(number)+1)):\n if number % i == 0:\n return i\n return False", "def largest_scaled_float_not_above(val, scale):\n assert val >= 0\n assert scale >= 0\n float_val = float(val) / 10**scale\n if float_val * 10**scale > val:\n # Take the float just below... it *should* satisfy\n float_val = np.nextafter(float_val, 0.0)\n if float_val * 10**scale > val:\n float_val = np.nextafter(float_val, 0.0)\n assert float_val * 10**scale <= val\n return float_val", "def isSmallInt(op):\n return op == opcode.OP_0 or (op >= opcode.OP_1 and op <= opcode.OP_16)", "def validate_low_integer(number):\n if number < 2:\n raise MaxLimitTooLowError()", "def is_scientific(number):\n if convert_to_scientific_notation(float(number)) == number:\n return True\n return False", "def test_large_values(self):\n eq_(0, smart_int('1' * 1000))", "def lessThan(self, t):\n if t is None:\n return False\n if isinstance(t, (float, int)):\n return self._micros < long(t * 1000000)\n else:\n return self._micros < t._micros", "def is_deficient_number(x):\n return sum(proper_divisors(x)) < x", "def ensure_size(value):\n return int(round(value * 1.0 / base)) * base", "def test_constructed_is_small(self):\n self.assertTrue(all(elt<10 for elt in goodwinsheaf.checkradii()))#check all entries have small radii", "def min_size(v=(0, 0)):\n return _check_two_scalars('min_size', v)", "def check_is_less_than(number1, number2):\n if number1<number2:\n return True\n else:\n return False\n pass", "def is_valid_significant_digits(\n value: Decimal,\n max_significant_digits: int\n) -> bool:\n return round(value, max_significant_digits) == value", "def is_simple_number(x: int):\n divisor = 2\n while divisor < x:\n if x % divisor == 0:\n return False\n divisor += 1\n return True", "def _is_pos_int(number: int) -> bool:\n return type(number) == int and number >= 0", "def test_size_too_small(self):\n min_size = min(settings.MISAGO_AVATARS_SIZES)\n too_small = min_size / 2\n\n self.assertEqual(clean_size(too_small), min_size)", "def arecloseenough(x1, x2):\n\n if abs(x1 - x2) <= VERYSMALL:\n return True\n \n return False", "def n_sanity_check(number):\n #number = min(99,number)\n #number = max(1,number)\n #return number\n if number > 99: # This is alot clearer no?\n return 99\n elif number < 1:\n return 1\n else:\n return number", "def find_bigger_sqrt_number(num):\n\n tmpPos = num\n while np.sqrt(tmpPos) % 1 != 0:\n tmpPos += 1\n return int(np.sqrt(tmpPos))", "def is_acceptable_multiplier(m):\n return 1 < m < (2 ** 61 - 1)", "def is_integer(number: float):\n\tif number.is_integer():\n\t\treturn int(number)\n\treturn number", "def big_or_small(n):\n\n if n > 100:\n print('This is a big number ({0})'.format(n))\n else:\n print('This is a small number ({0})'.format(n))", "def _is_safe_size(n):\n n = int(n)\n\n if n == 0:\n return True\n\n # Divide by 3 until you can't, then by 5 until you can't\n for c in (3, 5):\n while n % c == 0:\n n //= c\n\n # Return True if the remainder is a power of 2\n return not n & (n-1)", "def floor_with_tolerance(num):\n if isclose(num, ceil(num)):\n return ceil(num)\n return floor(num)", "def is_number(num):\n try:\n float(num)\n return True\n except ValueError:\n return False", "def is_number(number):\n try:\n float(number)\n return True\n except ValueError:\n return False", "def near_hundred(n):\n if 90 <= n <= 110 or 190 <= n <= 210:\n return True\n else:\n return False", "def is_valid_volsize(self,volsize):\n \n if type(volsize) is int:\n size_temp = divmod(volsize, 8192)\n if size_temp[1] > 0: # If not on 8GB boundary\n return int((1 + size_temp[0]) * 8192) # Always round to next 8GB increment\n else:\n return int(volsize)", "def is_number(n):\n\ttry:\n\t\tfloat(n)\n\t\treturn True\n\texcept ValueError:\n\t\treturn False", "def isnum(value):\n\n try:\n return bool(isinstance(value, (float, int)))\n except RuntimeError:\n return False", "def _single_value_min(data, threshold):\r\n amin = np.min(data)\r\n amax = np.max(data)\r\n limit = amin + (amax - amin) * threshold\r\n return data < limit", "def isNumber(num):\n try:\n abs(num)\n return True\n except:\n return False", "def is_int(num):\n return int(num) == num", "def hasLow(self):\n\t\treturn self.toLow().exists", "def safe_calc(exponent):\n\n if exponent > 700:\n return sys.float_info.max\n else:\n return math.exp(exponent)", "def is_armstrong_number(number: int) -> bool:\n\n str_number = f\"{number}\"\n return sum(pow(int(x), len(str_number)) for x in str_number) == number", "def isgoodnum(n):\n return (not isinstance(n,bool)) and isinstance(n,(int,float))", "def large_difference(benchmark_value, candidate_value, significant_digits):\n diff = abs(benchmark_value - candidate_value)\n return diff > 0 and significant_digits + math.log10(diff) > 0", "def is_number(n):\n return isinstance(n, (int, float))", "def check_number(number):\n digits = str(number)\n if len(digits) != 6:\n return False\n\n double = False\n last = '0'\n for digit in digits:\n if digit < last:\n return False\n\n if digit == last:\n double = True\n\n last = digit\n\n return double", "def is_armstrong_number(number: int) -> bool:\n result = 0\n num_str = str(number)\n for i in num_str:\n result += int(i) ** len(num_str)\n return result == number", "def isHappy(n):\n def check_num(n):\n res = 0\n while n > 0:\n n, digits = divmod(n, 10)\n res += digits ** 2\n return res\n \n \n seen = set()\n while n != 1 and n not in seen:\n seen.add(n)\n n = check_num(n)\n \n return n == 1", "def __check_bit_size(self, value, num_bits):\n is_fit = False\n if value <= 2 ** num_bits - 1:\n is_fit = True\n return is_fit", "def is_lower_limit(self):\n is_lower = self.get_raw_status() & self.STATUS_LLIM\n return bool(is_lower)", "def isLow(self):\n\t\treturn self.resolution == 'LOW'", "def _validate_train_size(train_size):\n assert isinstance(train_size, float) and (0. < train_size < 1.), \\\n \"train_size should be a float between 0 and 1\"", "def is_truncatable(number: int):\n\n str_number = str(number)\n index = 0\n\n # Left shift:\n while index < len(str_number):\n if not is_prime(int(str_number[index:])):\n return False\n\n index += 1\n\n # Right shift:\n index = len(str_number)\n while index > 0:\n if not is_prime(int(str_number[:index])):\n return False\n\n index -= 1\n\n return True", "def is_number(x):\n if isinstance(x, (int, float)):\n return True\n else:\n return False", "def lessThanEqualTo(self, t):\n if t is None:\n return False\n if isinstance(t, (float, int)):\n return self._micros <= long(t * 1000000)\n else:\n return self._micros <= t._micros", "def isnumber(x):\n try:\n float(x)\n return True\n except ValueError:\n return False", "def min_signed_type(x, min_size=8):\n for int_dtype in np.sctypes[\"int\"]:\n if (cudf.dtype(int_dtype).itemsize * 8) >= min_size:\n if np.iinfo(int_dtype).min <= x <= np.iinfo(int_dtype).max:\n return int_dtype\n # resort to using `int64` and let numpy raise appropriate exception:\n return np.int64(x).dtype", "def check_size(s):\n\n s = check_1d(s, \"size\")\n if any(map(lambda d: d <= 0, s)):\n raise Exception('Size cannot be 0 or negative')\n\n return s", "def isprime(number: int) -> bool:\n for i in range(2, int(number ** 0.5) + 1):\n if number % i == 0:\n return False\n return True", "def raise_not_number(x: int) -> None:\n try:\n float(x)\n except ValueError:\n raise SizeError('Must pass a number, received {}'.format(x))", "def is_superprime(x: int) -> bool:\n if x <= 0:\n return False\n\n while x:\n if is_prime(x) == False:\n return False\n x //= 10\n return True", "def valid_house_num(cls, new_num):\n if new_num > cls.POSITIVE_MIN:\n return True\n # else\n return False", "def _image_is_large_enough(im):\n return (im.shape[0] >= MIN_DIM) and (im.shape[1] >= MIN_DIM)", "def check_mountain_number(n):\n def helper(x, is_incresing):\n if x // 10 == 0:\n return True\n if is_incresing and (x % 10) < ((x // 10) % 10):\n return helper(x // 10, is_incresing)\n return (x % 10) > ((x // 10) % 10) and helper(x // 10, False)\n return helper(n, True)", "def isValidTeamSize(size, minimum, maximum) :\n\n return isInteger(size) and int(size) >= minimum and int(size) <= maximum", "def check_for_integer(number):\r\n \r\n try:\r\n int(number) \r\n return True\r\n except ValueError:\r\n return False", "def fastfloor(x):\n return int(x) if x > 0 else int(x) - 1", "def blood_pressure_systolic_validation(blood_pressure_systolic: int) -> bool:\n\n if not str(blood_pressure_systolic).isnumeric() or isinstance(blood_pressure_systolic, str):\n return False\n\n return int(blood_pressure_systolic) < 251 and int(blood_pressure_systolic) >= 0", "def isInteger(number) :\n\n try:\n int(number)\n return True\n except ValueError:\n return False", "def test_signed_assert_min_exceeded(self):\n with pytest.raises(ConversionError):\n DPT2ByteSigned.to_knx(-32769)", "def _limit_fill():\n z = random.randint(0, 10)\n if z/10.0 < LIMIT_FILL_PROBABILITY:\n return True\n else:\n return False", "def magnitude_too_small(mag, lon, lat, config):\n pt = Point((lon, lat))\n for boxname in sorted(config['boxes']):\n boxdict = config['boxes'][boxname]\n if pt.within(boxdict['poly']):\n if mag >= boxdict['mag']:\n return False\n else:\n return True\n #\n # Not in any boxes\n #\n if mag >= config['minmag']:\n return False\n\n return True", "def check_cpu_usage():\n usage = psutil.cpu_percent(1)\n print(\"DEBUG:usage:{}\".format(usage))\n return usage < 75", "def find_smallest(num_vars):\n for x in range(10):\n if num_vars <= 2**x:\n return x", "def is_hilbert_square(n):\n return ((-1 + math.sqrt(n)) / 4).is_integer()", "def isInteger(number) :\n\n try:\n int(number)\n return True \n except ValueError:\n return False", "def check_cpu_usage():\n usage = psutil.cpu_percent(1)\n return usage < 75", "def is_number(value):\n try:\n float(value)\n return True\n except ValueError:\n return False", "def check_cpu_constrained():\n return psutil.cpu_percent(1) > 75", "def floor(x):\n # if x is within MACHINE_EPS of an integer, return that integer\n if abs(x - round(x)) < MACHINE_EPS:\n return round(x)\n # otherwise, return the floor of x\n return math.floor(x)", "def longdouble_lte_float64():\n return np.longdouble(2**53) == np.longdouble(2**53) + 1", "def tiny(x):\n\n # Make sure we have an array view\n x = np.asarray(x)\n\n # Only floating types generate a tiny\n if np.issubdtype(x.dtype, np.floating) or np.issubdtype(\n x.dtype, np.complexfloating\n ):\n dtype = x.dtype\n else:\n dtype = np.float32\n\n return np.finfo(dtype).tiny", "def checkPerfectNumber(self, num: int) -> bool:\n if num <= 0:\n return False\n s = 0\n for i in range(1, int(math.sqrt(num) + 1)):\n if i != num:\n res = num % i\n if res == 0:\n s += i\n divisor = num // i\n if divisor != num:\n s += divisor\n if s > num:\n return False\n return s == num", "def near_hundred_abs(n):\n if abs(100-n) <= 10 or abs(200-n):\n return True\n else:\n return False", "def is_num(var):\n try:\n int(var)\n return True\n except ValueError:\n return False", "def check_hole_number(n):\n if n // 10 == 0:\n return True\n\n # The \\ symbol just allows me to continue this line of code on a new line.\n # It's only included to make sure all the code stays on the page\n return ((n // 10) % 10) < (n % 10) and ((n // 10) % 10) < ((n // 100) % 10) \\\n and check_hole_number(n // 100)", "def is_number(number):\n if type(number) == type(1) or type(number) == type(0.1) or type(number) == type('') or type(u''):\n try:\n float(number)\n return True\n except ValueError:\n return False\n except TypeError:\n return False\n else:\n return False", "def threshold(self, value):\r\n threshold = 0.5\r\n if value >= threshold:\r\n return 1\r\n else:\r\n return 0", "def is_armstrong_number(number: int) -> bool:\n return get_armstrong_value(number) == number", "def test_small_chunk(self):\n chunksize = 7 * (1024 ** 2)\n size = 8 * (1024 ** 2)\n self.assertEqual(find_chunksize(size, chunksize), chunksize)", "def blood_pressure_diastolic_validation(blood_pressure_diastolic: int) -> bool:\n\n if not str(blood_pressure_diastolic).isnumeric():\n return False\n\n return int(blood_pressure_diastolic) < 141 and int(blood_pressure_diastolic) >= 0", "def isprime(number):\n\n if number == 1:\n return False\n for i in range(2, int(number**0.5) + 1):\n if number % i == 0:\n return False\n return True", "def min_digit(x):\n \"\"\" GRAPSTE TON KWDIKA SAS APO KATW \"\"\"\n s = 10\n while(x>0):\n if(s>(x%10)):\n s = x%10\n x = x//10\n return s", "def secure_filesize(filepath):\n return os.path.getsize(filepath) <= MAX_FILESIZE", "def check_cpu_usage():\n usage = psutil.cpu_percent(1)\n return usage < 73", "def are_sizes_valid(sizes):\n return all(isinstance(size, int) and size >= 16 and size <= 28 for size in sizes)", "def bbox_is_small(bbox, h_min, w_min, min_area=None):\n [ymin, xmin, ymax, xmax] = list(bbox)\n width = (xmax - xmin + 1)\n if width < w_min:\n return True\n height = (ymax - ymin + 1)\n if height < h_min:\n return True\n area = width * height\n\n if min_area is not None:\n if area < min_area:\n return True\n return False", "def uInt32Compatible(integer):\n return integer < 0x100000000 and integer >= 0", "def approximate_size(size, a_kilobyte_is_1024_bytes=True):\n if size < 0:\n raise ValueError('number must be non-negative')\n\n multiple = 1024 if a_kilobyte_is_1024_bytes else 1000\n for suffix in SUFFIXES[multiple]:\n if size < multiple:\n return '{0:.1f} {1}'.format(size, suffix)\n size = 1.0 * size / multiple\n\n raise ValueError('number too large')", "def is_number(value):\n try:\n int(value)\n return True\n except (ValueError, TypeError):\n return False" ]
[ "0.71358377", "0.70233357", "0.6894309", "0.64070696", "0.6313465", "0.623198", "0.62296844", "0.6060151", "0.605466", "0.6032642", "0.5905205", "0.58516663", "0.58045554", "0.571476", "0.5698064", "0.56735736", "0.5664453", "0.5655646", "0.5649527", "0.5642045", "0.5599978", "0.5578391", "0.5576619", "0.5572189", "0.55665046", "0.5549886", "0.5548237", "0.5534949", "0.55279547", "0.55171806", "0.55117714", "0.55117697", "0.5506724", "0.54996645", "0.54978824", "0.5477659", "0.54634833", "0.5460191", "0.54452014", "0.543967", "0.54395545", "0.54386634", "0.5434749", "0.5427698", "0.5418038", "0.5408011", "0.5399605", "0.5388605", "0.53878146", "0.53784996", "0.53660095", "0.53526205", "0.5345634", "0.5324321", "0.53240275", "0.5320971", "0.5315125", "0.5314113", "0.5310695", "0.53098243", "0.5307607", "0.5305623", "0.52993053", "0.5290503", "0.52902937", "0.5276785", "0.5264881", "0.526334", "0.5260478", "0.52544475", "0.5253148", "0.5250768", "0.5245221", "0.5241142", "0.5233757", "0.522517", "0.52234304", "0.5218444", "0.52142227", "0.52133244", "0.5208783", "0.5200722", "0.51988465", "0.51974726", "0.51821446", "0.5176731", "0.51753306", "0.5172597", "0.5168079", "0.51663727", "0.5155266", "0.5154591", "0.5154503", "0.5153927", "0.5149386", "0.5145275", "0.51401424", "0.5137585", "0.513436", "0.51250833" ]
0.81616944
0
Compute the crossProduct of the vectors p2 p1 and p3 p1.
def crossProduct(p1, p2, p3): return ( -(p1[1]*p2[0]) + p1[0]*p2[1] + p1[1]*p3[0] - p2[1]*p3[0] - p1[0]*p3[1] + p2[0]*p3[1] )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cross_product(p0,p1,p2):\n\treturn (((p1[0]-p0[0])*(p2[1]-p0[1]))-((p2[0]-p0[0])*(p1[1]-p0[1])))", "def cross_product(v1, v2):\n return cg3d_vector.CG3dVector(\n v1[1] * v2[2] - v2[1] * v1[2],\n v1[2] * v2[0] - v2[2] * v1[0],\n v1[0] * v2[1] - v2[0] * v1[1]\n )", "def crossProduct(self, p1, p2):\n return (p1.x * p2.y - p1.y * p2.x)", "def cross_pts_triangle(p1, p2, p3):\n return (p1[:, 0] - p3[0]) * (p2[1] - p3[1]) - (p2[0] - p3[0]) * (p1[:, 1] - p3[1])", "def cross3(self, left, right):\n return np.array([left[1] * right[2] - left[2] * right[1],\n left[2] * right[0] - left[0] * right[2],\n left[0] * right[1] - left[1] * right[0]])", "def cross(p, q):\n xyz = np.zeros(3)\n xyz[0] = p[1] * q[2] - p[2] * q[1]\n xyz[1] = p[2] * q[0] - p[0] * q[2]\n xyz[2] = p[0] * q[1] - p[1] * q[0]\n return xyz", "def cross_product(a, b):\n a1, a2, a3 = a\n b1, b2, b3 = b\n return (a2 * b3 - a3 * b2, a3 * b1 - a1 * b3, a1 * b2 - a2 * b1)", "def cross(v1, v2):\n return np.cross(v1, v2)", "def cross(vec1, vec2):\n result = np.zeros(3)\n return cross_(vec1, vec2, result)", "def crossProduct( set1, set2):\n set1 = asarray( set1, _aformat(set1))\n set1 = reshape( set1, (-1, 3))\n set2 = asarray( set2, _aformat(set2))\n set2 = reshape( set2, (-1, 3))\n return cross( set1, set2 )", "def cross(self, v):\n if (len(self.mV) != 3) or (len(v) != 3):\n raise IndexError('Cross product is only for 2 3-vectors.')\n\n (x1, y1, z1) = (self.mV[0], self.mV[1], self.mV[2])\n (x2, y2, z2) = (v[0], v[1], v[2])\n x = y1 * z2 - y2 * z1\n y = z1 * x2 - z2 * x1\n z = x1 * y2 - x2 * y1\n return Vector(x, y, z)", "def cross(v1: Vector, v2: Vector) -> Vector: # Function is fucked TODO\n if len(v1.coords) != 3 or len(v2.coords) != 3:\n raise ValueError(\"Vectors have to be 3 fucking D, nøøb\")\n x = v1.y * v2.z - v1.z * v2.y\n y = v1.z * v2.x - v1.x * v2.z\n z = v1.x * v2.y - v1.y * v2.x\n return Vector(x, y, z)", "def cross_(vec1, vec2, result):\n a1, a2, a3 = double(vec1[0]), double(vec1[1]), double(vec1[2])\n b1, b2, b3 = double(vec2[0]), double(vec2[1]), double(vec2[2])\n result[0] = a2 * b3 - a3 * b2\n result[1] = a3 * b1 - a1 * b3\n result[2] = a1 * b2 - a2 * b1\n return result", "def cross(a, b):\n c1 = a[1]*b[2] - a[2]*b[1]\n c2 = a[2]*b[0] - a[0]*b[2]\n c3 = a[0]*b[1] - a[1]*b[0]\n return sp.array([c1,c2,c3])", "def cross(a, b):\n #return np.cross(a,b)\n\n return vector(a[1] * b[2] - a[2] * b[1],\n a[2] * b[0] - a[0] * b[2],\n a[0] * b[1] - a[1] * b[0])", "def cross( self, vector3 ):\n product = cross( self.array, vector3.array )\n self._coords[:3] = matrix( product ).transpose()\n return self", "def cross_product(a, b):\n return (a[1]*b[2] - a[2]*b[0],\n a[2]*b[0] - a[0]*b[2],\n a[0]*b[1] - a[1]*b[0])", "def cross_multiply(x):\n return (x[0][0] * x[1][1]) - (x[0][1] * x[1][0])", "def cross(u,v):\n u1, u2, u3 = u\n v1, v2, v3 = v\n return np.array([u2*v3 - u3*v2,\n u3*v1 - u1*v3,\n u1*v2 - u2*v1], dtype=u.dtype)", "def cross_product(a,b):\n return [a[1]*b[2]-a[2]*b[1], a[2]*b[0]-a[0]*b[2], a[0]*b[1]-a[1]*b[0]]", "def crossProduct4( set1, set2 ):\n set1 = asarray( set1, _aformat(set1))\n set1 = reshape( set1, (-1, 4))\n set2 = asarray( set2, _aformat(set1))\n set2 = reshape( set2, (-1, 4))\n result = zeros( (len(set1),4), _aformat(set1))\n result[:,:3] = cross( set1[:,:3],set2[:,:3])\n result[:,3] = 1.0\n return result", "def xCrossProd(self, other):\n return other.y * self.z - other.z * self.y", "def test_cross_v3(self):\n\n vec1 = Vec3(1, 0, 0)\n vec2 = Vec3(0, 1, 0)\n cross = vec1.cross(vec2)\n\n expected = Vec3(0, 0, 1)\n\n self.assertEqual(cross, expected)", "def vec_cross(a,b):\r\n return [a[1] * b[2] - a[2] * b[1],\r\n a[2] * b[0] - a[0] * b[2],\r\n a[0] * b[1] - a[1] * b[0]]", "def cross(a,b):\n \n return [ a[1]*b[2] - a[2]*b[1],\n a[2]*b[0] - a[0]*b[2],\n a[0]*b[1] - a[1]*b[0],\n 1.0 ]", "def cross(a, b):\n return np.array([a[1]*b[2] - a[2]*b[1],\n a[2]*b[0] - a[0]*b[2],\n a[0]*b[1] - a[1]*b[0]])", "def cross(v1: Vec2, v2: Vec2) -> float:\n return v1.x * v2.x + v1.y * v2.y", "def cross(x, y):\n x = x.reshape(3)\n y = y.reshape(3)\n z = np.cross(x, y)\n z = z.reshape((3, 1))\n return z", "def d_cross(a, b):\n d_cross = np.zeros((3, 3), dtype=float)\n for i in range(3):\n ei = np.zeros(3, dtype=float)\n ei[i] = 1.0\n d_cross[i] = np.cross(ei, b)\n return d_cross", "def uVectNorm(x1,y1,z1, # P\n x2,y2,z2, # Q\n x3,y3,z3): # R\n p1 = np.array([x1,y1,z1])\n p2 = np.array([x2,y2,z2])\n p3 = np.array([x3,y3,z3])\n\n v1 = p3-p1\n v2 = p2-p1\n\n cp = np.cross(v1,v2)\n a,b,c = cp\n\n d = np.dot(cp, p3)\n\n print(a,b,c)", "def crossproduct(first, other=FreeCAD.Vector(0,0,1)):\n if isinstance(first,FreeCAD.Vector) and isinstance(other,FreeCAD.Vector):\n return FreeCAD.Vector(first.y*other.z - first.z*other.y, first.z*other.x - first.x*other.z, first.x*other.y - first.y*other.x)", "def ccw(p1, p2, p3):\n return (p2[0] - p1[0])*(p3[1] - p1[1]) - (p2[1] - p1[1])*(p3[0] - p1[0])", "def inv_cross_ration(z1, z2, z3, a):\n return ((1-a)*z1*z3 + a*z2*z3 - z1*z2)/((a-1)*z2 - a*z1 + z3)", "def test_cross():\n assert_equal(cross(Vector(1, 0, 0), Vector(0, 1, 0)), Vector(0, 0, 1))\n assert_equal(cross(Vector(1, 3, 2), Vector(-1, 1, 0)), Vector(-2, -2, 4))", "def vector_cross(x, y):\n\n if(len(x) != len(y)):\n raise ValueError(\"vector lengths differ\")\n elif(len(x) > 3):\n raise ValueError(\"vector is more than 3D\")\n else:\n s = [x[1] * y[2] - x[2] * y[1],\n x[2] * y[0] - x[0] * y[2],\n x[0] * y[1] - x[1] * y[0]]\n return s", "def normal_vector_3p(a: Vector, b: Vector, c: Vector) -> Vector:\n return (b - a).cross(c - a).normalize()", "def testCross(self):\n v1 = Vector(1, 0, 0)\n v2 = Vector(0, 1, 0)\n assert v1.cross(v2) == [0, 0, 1]\n assert v1.cross([0, 1, 0]) == Vector(0, 0, 1)\n\n v3 = Vector(-1, 0, 0)\n assert v2.cross(v3) == [0, 0, 1]\n\n assert Vector(0, 0, 1).cross(Vector(1, 0, 0)) == Vector(0, 1, 0)\n c = 0.707106781 # Cos 45\n assert Vector(0, 0, 3).cross(Vector(2*c, 0, 2*c)) == Vector(\n 0, 6*c, 0)\n\n c = 0.5 # cos 60deg\n s = 0.866025404 # sin 60deg\n assert Vector(0, 0, 3).cross(Vector(s, 0, c)) == Vector(0, 3*s, 0)\n assert Vector(0, 0, 3).cross([s, 0, c]) == [0, 3*s, 0]\n\n hitException = False\n try:\n v1 = Vector(1, 2, 3, 4)\n v2 = Vector(5, 6, 7, 8)\n v3 = v1.cross(v2)\n except IndexError:\n hitException = True\n assert hitException", "def test_cross_v3(self):\n\n from pedemath.vec3 import cross_v3\n\n cross = cross_v3(Vec3(0, -1, 0), Vec3(0, 0, -1))\n expected = Vec3(1, 0, 0)\n\n self.assertEqual(cross, expected)", "def crossProduct(self, factor):\n components = np.cross(self.components(), factor.components())\n return Vector.initializeFromComponents(components)", "def cross(self):\n return self.v.cross(self.z_axis)", "def cross(self, vec):\n if not isinstance(vec, self.__class__):\n raise TypeError('Cross product operand must be a vector')\n return Vector3(0, 0, np.asscalar(np.cross(self, vec)))", "def cross(triangles):\n vectors = np.diff(triangles, axis=1)\n crosses = np.cross(vectors[:, 0], vectors[:, 1])\n return crosses", "def cross_z(self):\n return self.v.cross(Vector((0, 0, 1)))", "def get_cross2d(v1, v2):\n return v1[0]*v2[1] - v1[1]*v2[0]", "def vector_cross(v, w):\n res = np.cross(v, w)\n\n if len(v) == 3:\n return Vector(*res)\n else:\n return res", "def cross_vectors(u, v):\n return [u[1] * v[2] - u[2] * v[1],\n u[2] * v[0] - u[0] * v[2],\n u[0] * v[1] - u[1] * v[0]]", "def counterclockwise(self, p1, p2, p3):\n return self.cross(Point(p2.x - p1.x, p2.y - p1.y), Point(p3.x - p1.x, p3.y - p1.y))", "def cross(a, b):\n # since the data can be n-dimensional, reshape\n # to a 2-d (3, N) array\n xyz_a, xyz_b = a.xyz, b.xyz\n orig_shape_a = xyz_a.shape\n orig_shape_b = xyz_b.shape\n xyz_a = xyz_a.reshape((3, xyz_a.size // 3))\n xyz_b = xyz_b.reshape((3, xyz_b.size // 3))\n\n # take the cross product\n cross_product = np.cross(xyz_a[:, :, np.newaxis], xyz_b,\n axisa=0, axisb=0, axisc=0)\n cross_product_unit = xyz_a.unit * xyz_b.unit\n cross_product = u.Quantity(cross_product, unit=cross_product_unit)\n\n cartrep = CartesianRepresentation(cross_product)\n return cartrep.reshape(orig_shape_a[1:] + orig_shape_b[1:])", "def cross(self, other):\n\t\treturn Vector3(\n\t\t\tself.y * other.z - self.z * other.y,\n\t\t\tself.z * other.x - self.x * other.z,\n\t\t\tself.x * other.y - self.y * other.x,\n\t\t)", "def TripleProduct(self, j1, m1, s1, j2, m2, s2, j3, m3, s3):\n return math.sqrt((2*j1+1)*(2*j2+1)*(2*j3+1) / (4*math.pi)) * self.threej((j1, m1, j2, m2, j3, m3)) * self.threej((j1, -1*s1, j2, -1*s2, j3, -1*s3))", "def find_p_cross(self):\n\n # initial values\n a = 0.\n b = 1.\n err = 1.\n\n while err > 1e-3:\n p = 0.5 * (a + b)\n self.compute_clusters(p)\n if self.is_crossed():\n b = p\n else:\n a = p\n err = abs(a - b)\n\n return p", "def ncross2(u, v):\n return sq2(u) * sq2(v) - dot2(u, v) ** 2", "def cross(self, vec):\n if not isinstance(vec, Vector3Array):\n raise TypeError('Cross product operand must be a Vector3Array')\n if self.nV != 1 and vec.nV != 1 and self.nV != vec.nV:\n raise ValueError('Cross product operands must have the same '\n 'number of elements.')\n return Vector3Array(np.cross(self, vec))", "def cross_product(qa, qb):\n qa_0 = qa[:, :, 0]\n qa_1 = qa[:, :, 1]\n qa_2 = qa[:, :, 2]\n\n qb_0 = qb[:, :, 0]\n qb_1 = qb[:, :, 1]\n qb_2 = qb[:, :, 2]\n\n # See https://en.wikipedia.org/wiki/Cross_product\n q_mult_0 = qa_1 * qb_2 - qa_2 * qb_1\n q_mult_1 = qa_2 * qb_0 - qa_0 * qb_2\n q_mult_2 = qa_0 * qb_1 - qa_1 * qb_0\n\n return torch.stack([q_mult_0, q_mult_1, q_mult_2], dim=-1)", "def get_cross_prod(self):\n ret = 1\n for dec in self.decisions:\n ret *= self.get_num_alt(dec)\n return ret", "def c1(adp1, adp2):\n\n def get_axis(adp):\n \"\"\"\n Returns ADP as its three principle axis representation.\n :param adp: List/Array type of length 6.\n :returns: List of three arrays of length 3.\n \"\"\"\n adp = np.matrix([[float(adp[0]), float(adp[3]), float(adp[4])],\n [float(adp[3]), float(adp[1]), float(adp[5])],\n [float(adp[4]), float(adp[5]), float(adp[2])]])\n w, v = np.linalg.eig(adp)\n return [np.array((w[j] * v[:, j]).flatten().tolist()[0]) for j \\\n in xrange(3)]\n\n adp1_axis = get_axis(adp1)\n adp2_axis = get_axis(adp2)\n\n val = 0\n for i in xrange(3):\n addval = abs(norm(adp1_axis[i] - adp2_axis[i]))\n addval = addval * abs((1 - abs(np.dot(adp1_axis[i], adp2_axis[i]))))\n val += addval\n return val", "def cross(o, a, b):\r\n xo, yo = o\r\n xa, ya = a\r\n xb, yb = b\r\n return (xa - xo)*(yb - yo) - (ya - yo)*(xb - xo)", "def euclidean_distance_3(P1, P2):\r\n return (P1[0]-P2[0])**2+(P1[1]-P2[1])**2+(P1[2]-P2[2])**2", "def cross(self, vec):\n if not isinstance(vec, self.__class__):\n raise TypeError('Cross product operand must be a vector')\n return self.__class__(np.cross(self, vec))", "def ucross(a, b):\n ev = a / np.linalg.norm(a)\n return np.cross(ev, b)", "def cross(self, vector):\n\n return Vector((self.y * vector.z - self.z * vector.y),\n (self.z * vector.x - self.x * vector.z),\n (self.x * vector.y - self.y * vector.x))", "def spm_cross(X, x=None, *args):\n\n if len(args) == 0 and x is None:\n if X.dtype == \"object\":\n Y = spm_cross(*list(X))\n\n elif np.issubdtype(X.dtype, np.number):\n Y = X\n\n return Y\n\n if X.dtype == \"object\":\n X = spm_cross(*list(X))\n\n if x is not None and x.dtype == \"object\":\n x = spm_cross(*list(x))\n\n reshape_dims = tuple(list(X.shape) + list(np.ones(x.ndim, dtype=int)))\n A = X.reshape(reshape_dims)\n\n reshape_dims = tuple(list(np.ones(X.ndim, dtype=int)) + list(x.shape))\n B = x.reshape(reshape_dims)\n\n Y = np.squeeze(A * B)\n\n for x in args:\n Y = spm_cross(Y, x)\n\n return Y", "def cross(self, vec2):\n if type(vec2) != Vector:\n raise TypeError(\"Not a vector\")\n\n if (len(self) or len(vec2)) != 3:\n raise Exception(\"Incorrect vector lengths. Must be two 3 length vectors.\")\n\n return Vector(self[1]*vec2[2] - self[2]*vec2[1],\n self[2]*vec2[0] - self[0]*vec2[2],\n self[0]*vec2[1] - self[1]*vec2[0])", "def cross_product(self, B:'Matrix') -> 'Matrix':\n assert self.shape() == B.shape(), f\"For cross product, shapes should be (1x3) - these are {self.shape()} and {B.shape()}.\"\n if self.shape()[1] == 1: # checking for a (3 x 1) matrix, in which case, we'll use the transposes.\n assert self.shape()[0] > 2 , f\"self must be at least 3 in one direction. This is {self.shape()}\"\n return self.transpose().cross(B.transpose())\n assert self.shape()[1] > 2, f\"self must be at least 3 in one direction. This is {self.shape()}\"\n\n return Matrix(((self.mat[0][1] * B.mat[0][2] - self.mat[0][2] * B.mat[0][1],\n self.mat[0][2] * B.mat[0][0] - self.mat[0][0] * B.mat[0][2],\n self.mat[0][0] * B.mat[0][1] - self.mat[0][1] * B.mat[0][0]),))", "def cross_product(col_set1, col_set2=None):\n if col_set2 is None:\n col_set2 = col_set1\n table = []\n for col1 in col_set1:\n row = []\n for col2 in col_set2:\n cov = covariance(col1.data, col2.data, col1.mask, col2.mask)\n # if abs(cov) > 0.1 and cov != 1.00000:\n print(col1.name, ' v.s. ', col2.name, '\\t', '{:+.5f}'.format(cov))\n row.append(cov)\n table.append(row)\n return table", "def cross(self, other: PointOrIterable) -> float:\n try:\n return (self.x * other.y) + (self.y * other.x)\n except AttributeError:\n pass\n return (self.x * other[1]) + (self.y * other[0])", "def mult(p, q):\n if p.ndim == 1 and q.ndim > 1:\n p = np.tile(p,(q.shape[0],1))\n if q.ndim == 1 and p.ndim > 1:\n q = np.tile(q,(p.shape[0],1))\n if q.ndim == 1 and p.ndim == 1:\n p = p.reshape((1,4))\n q = q.reshape((1,4))\n\n ps = p[:,3]\n qs = q[:,3]\n pv = p[:,:3]\n qv = q[:,:3]\n\n pq = np.empty_like(p)\n pq[:,3] = ps * qs \n pq[:,3] -= arraylist_dot(pv, qv).flatten()\n pq[:,:3] = ps[:,np.newaxis] * qv \n pq[:,:3] += pv * qs[:,np.newaxis] \n pq[:,:3] += np.cross(pv , qv)\n\n #opposite sign due to different convention on the basis vectors\n #pq *= -1\n return pq", "def find_plane_eq(p1, p2, p3):\n\n p1 = np.asarray(p1)\n p2 = np.asarray(p2)\n p3 = np.asarray(p3)\n\n # These two vectors are in the plane\n v1 = p3 - p1\n v2 = p2 - p1\n\n # the cross product is a vector normal to the plane\n cp = np.cross(v1, v2)\n a, b, c = cp\n\n # This evaluates a * x3 + b * y3 + c * z3 which equals d\n d = np.dot(cp, p3)\n\n plane_eq = np.array([a, b, c, d])\n\n return plane_eq", "def liner_cross_point(a1, b1, c1, a2, b2, c2):\n if a1 == 0 or a2 == 0:\n if a2 == 0:\n a1, b1, c1, a2, b2, c2 = a2, b2, c2, a1, b1, c1\n y = - c1 / b1\n x = - (b2 * y + c2) / a2\n elif b1 == 0 or b2 == 0:\n if b2 == 0:\n a1, b1, c1, a2, b2, c2 = a2, b2, c2, a1, b1, c1\n x = - c1 / a1\n y = - (a2 * x + c2) / b2\n else:\n a1, b1, c1 = a1 / b1, b1 / b1, c1 / b1\n a2, b2, c2 = a2 / b2, b2 / b2, c2 / b2\n x = - (c1 - c2) / (a1 - a2)\n y = - a1 * x - c1\n return x, y", "def plane_point_side_v3(p: np.ndarray, v: np.ndarray) -> Any:\n return p[:3].dot(v) + p[3]", "def cross2(u, v, w):\n return dot2(u, w) * v - dot2(u, v) * w", "def get_normal_vector_of_plane(p1, p2, p3):\n v12 = np.array(p1) - np.array(p2)\n v13 = np.array(p1) - np.array(p3)\n nvec = np.cross(v12, v13)\n ## print 'norm: '+str(np.linalg.norm(nvec))\n return nvec / np.linalg.norm(nvec)", "def rotation(self, p1, p2, p3):\n return (p2[0] - p1[0]) * (p3[1] - p1[1]) - (p2[1] - p1[1]) * (p3[0] - p1[0])", "def quaterion_product(q, p):\n q0 = q[3]\n p0 = p[3]\n\n return [q0*p[0:3] + p0*q[0:3] + mtimes(skew(q[0:3]), p[0:3]), q0*p0 - mtimes(q[0:3].T, p[0:3])]", "def cross(self, other):\n return type(self)(\n self.y * other.z - self.z * other.y,\n -self.x * other.z + self.z * other.x,\n self.x * other.y - self.y * other.x,\n )", "def outer_product(u: Vector3D, v: Vector3D):\n cx = u.y * v.z - u.z * v.y\n cy = u.z * v.x - u.x * v.z\n cz = u.x * v.y - u.y * v.x\n return Vector3D(cx, cy, cz, coordinate_system='cartesian')", "def crossform(a):\n return np.array([[0, -a[2], a[1]],\n [a[2], 0, -a[0]],\n [-a[1], a[0], 0]])", "def product1(a, b, c) :\n return a * b * c", "def cross(self, other):\n if self.x == other.x:\n if self.x == 0:\n return other\n else:\n cross = getcopy(self)\n for row in other.a:\n cross.newrow(row)\n cross.newrow([self.prepare(1.0)]*cross.x)\n out = cross.new(1)\n for x in xrange(0, out.x):\n out.store(0,x, cross.minor(cross.y-1, x).det())\n return out\n else:\n raise IndexError(\"Matrix cross product invalid for dimensions \"+str(self.y)+\"x\"+str(self.x)+\" and \"+str(other.y)+\"x\"+str(other.x))", "def cross(self, other):\n \n return self.x * other[1] - self.y * other[0]", "def vector_dot(v1,v2):\n return (v1.x * v2.x) + (v1.y * v2.y) + (v1.z * v2.z)", "def cross_z(self):\n return Vector((self.v.y, -self.v.x))", "def crossover(p1, p2, gamma=0.1):\n c1 = p1.deepcopy()\n c2 = p2.deepcopy()\n alpha = np.random.uniform(0, gamma, 1)\n c1.position = alpha * p1.position + (1 - alpha) * p2.position\n c2.position = alpha * p2.position + (1 - alpha) * p1.position\n return c1, c2", "def product(value1, value2, value3):\n prod = value1 * value2\n prod = prod * value3\n return prod", "def product(value1, value2, value3):\n prod = value1 * value2\n prod = prod * value3\n return prod", "def same_side_product(p, q, a, b):\n return line_ccw(a, b, p) * line_ccw(a, b, q)", "def dotProduct(self, v):\n return self.x * v.x + self.y * v.y + self.z * v.z", "def dot_product(a, b):\n a1, a2, a3 = a\n b1, b2, b3 = b\n return a1 * b1 + a2 * b2 + a3 * b3", "def pair_product(x1, x2):\n return np.multiply(x1, x2)", "def cross(a: conlist(item_type=confloat(),\n min_items=3,\n max_items=3),\n b: conlist(item_type=confloat(),\n min_items=3,\n max_items=3)) -> Dict[str, float]:\n\n i = a[1] * b[2] - a[2] * b[1]\n j = a[2] * b[0] - a[0] * b[2]\n k = a[0] * b[1] - a[1] * b[0]\n\n return {\n 'x': i,\n 'y': j,\n 'z': k\n }", "def cross(self, other):\n\n Vector = sympy.vector.Vector\n if other == Vector.zero:\n return Dyadic.zero\n elif isinstance(other, Vector):\n outdyad = Dyadic.zero\n for k, v in self.components.items():\n cross_product = k.args[1].cross(other)\n outer = k.args[0].outer(cross_product)\n outdyad += v * outer\n return outdyad\n else:\n raise TypeError(str(type(other)) + \" not supported for \" +\n \"cross with dyadics\")", "def cartesian(self) -> Tuple[np.number, np.number, np.number, np.number]:\n if self.dimension > 3:\n raise ValueError(\"The plane dimension must be <= 3.\")\n\n # The normal must be 3D to extract the coefficients.\n a, b, c = self.normal.set_dimension(3)\n\n d = -self.normal.dot(self.point)\n\n return a, b, c, d", "def fit(self, p1, p2, p3):\n a = ((p2[1]-p1[1])*(p1[0]-p3[0]) + (p3[1]-p1[1])*(p2[0]-p1[0])) / \\\n ((p1[0]-p3[0])*(p2[0]**2-p1[0]**2) + (p2[0]-p1[0])*(p3[0]**2-p1[0]**2))\n b = ((p2[1]-p1[1]) - a*(p2[0]**2 - p1[0]**2)) / (p2[0]-p1[0])\n c = p1[1] - a*p1[0]**2 - b*p1[0]\n return lambda x: a*x**2 + b*x + c", "def from_3p(cls, a: Vector, b: Vector, c: Vector) -> 'Plane':\n n = (b - a).cross(c - a).normalize()\n return Plane(n, n.dot(a))", "def intersects(p1, p2):\n if p1^p2:\n return -(np.dot(p1.v, p2.w) * np.eye(3, 3) + \\\n p1.w.reshape((3,1)) @ p2.v.reshape((1,3)) - \n p2.w.reshape((3,1)) @ p1.v.reshape((1,3))) * sm.unitvec(np.cross(p1.w, p2.w))\n else:\n return None", "def ccw(p1: np.ndarray, p2: np.ndarray, p3: np.ndarray) -> int:\n dx1 = p2[0] - p1[0]\n dy1 = p2[1] - p1[1]\n dx2 = p3[0] - p1[0]\n dy2 = p3[1] - p1[1]\n\n dx1dy2 = dx1 * dy2\n dy1dx2 = dy1 * dx2\n\n if dx1dy2 > dy1dx2:\n return 1\n if dx1dy2 < dy1dx2:\n return -1\n if dx1 * dx2 < 0 or dy1 * dy2 < 0:\n return -1\n if dx1 * dx1 + dy1 * dy1 < dx2 * dx2 + dy2 * dy2:\n return 1\n\n return 0", "def dotproduct(v1, v2):\n\treturn sum(imap(operator.mul, v1, v2))", "def cross(self, other):\n if isinstance(other, float):\n return Vector(other*self.y, -other*self.x)\n\n if isinstance(other, Vector):\n return self.x*other.y - self.y*other.x", "def cross_covariance(y, z):\n return CrossCovariance()(y, z)", "def proVec(*args):\r\n resultado = []\r\n i,j,k = (args[0][1] * args[1][2]) - (args[0][2] * args[1][1]) , ((args[0][0] * args[1][2]) - (args[0][2] * args[1][0])) * (-1) , (args[0][0] * args[1][1]) - (args[0][1] * args[1][0])\r\n resultado.append(i)\r\n resultado.append(j)\r\n resultado.append(k)\r\n return resultado" ]
[ "0.7840646", "0.7627517", "0.7564835", "0.75022256", "0.7371499", "0.716658", "0.7165488", "0.71437544", "0.7071827", "0.69996214", "0.6967165", "0.69446385", "0.6910858", "0.6805146", "0.67996776", "0.67702717", "0.67530876", "0.6737045", "0.6727149", "0.6688273", "0.66100967", "0.65714574", "0.6571141", "0.655815", "0.6545461", "0.65289557", "0.6527682", "0.6517802", "0.64971024", "0.6418191", "0.6403177", "0.63746303", "0.63191706", "0.6305024", "0.63009834", "0.62964976", "0.6295711", "0.62867427", "0.62817425", "0.62771595", "0.6245885", "0.6227525", "0.614918", "0.6148046", "0.6135433", "0.60973", "0.60839534", "0.60702837", "0.6058816", "0.60385466", "0.60128874", "0.6008357", "0.5990883", "0.59898835", "0.5975903", "0.5961346", "0.59135705", "0.59133804", "0.591207", "0.5888788", "0.5885148", "0.58749354", "0.58749056", "0.5864775", "0.5842458", "0.5808323", "0.5802841", "0.5797754", "0.57901144", "0.57645965", "0.5757501", "0.5736145", "0.5733094", "0.5725109", "0.5720479", "0.5703096", "0.5694883", "0.56859547", "0.5685461", "0.5630397", "0.56244373", "0.5623307", "0.5605206", "0.56025", "0.56025", "0.55951303", "0.5585885", "0.55552393", "0.55101067", "0.5509476", "0.55076283", "0.5480992", "0.5474871", "0.5472534", "0.5464957", "0.5461841", "0.5446487", "0.5440593", "0.5423468", "0.54205686" ]
0.92136556
0
Calculate the determinant of the matrix. 1 a[0] a[1] 1 b[0] b[1] 1 c[0] c[1]
def det(a, b, c): d = (b[0]*c[1]-c[0]*b[1])+(c[0]*a[1]-a[0]*c[1])+(a[0]*b[1]-a[1]*b[0]) return d
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def determinant(self):\n if not self.is_square():\n raise(ValueError, \"Cannot calculate determinant of non-square matrix.\")\n if self.h > 2:\n raise(NotImplementedError, \"Calculating determinant not implemented for matrices largerer than 2x2.\")\n\n # TODO - your code here\n if self.h == 1:\n return self.g[0][0] # a 1x1 matrix\n else:\n return ((self.g[0][0] * self.g[1][1]) - (self.g[0][1] * self.g[1][0])) # a 2x2 matrix\n # TODO - your code here", "def determinant(self) -> float:\n num_R, num_C = self.shape()\n assert num_R == num_C, f\"Determinant must be for a square matrix; this one is {self.shape()}.\"\n # -------------------------------------------------------\n # TODO: You write this one.\n # Note: this one should be recursive....\n if num_R == 1:\n return self.mat[0][0]\n det =0\n for i in range(num_R):\n det += self.mat[0][i] * self.get_minor(0,i).determinant() * (-1)**i\n return det\n pass # remove this when you add your code.\n # -------------------------------------------------------", "def determinant(self):\n if self.n_rows != self.n_cols:\n raise Exception('Matrix is not square')\n if self.n_rows == 2:\n return (self.data[0][0] * self.data[1][1]) - (self.data[1][0] * self.data[0][1])\n else:\n echelon, ops = reduce_to_echelon(self.data.copy(), True)\n swaps = sum([1 if row[0] == 'swap' else 0 for row in ops])\n return math.prod([echelon[i][i] for i in range(len(echelon))]) * (-1) ** swaps", "def determinant(self):\n if self.m != self.n:\n raise exc.LinearAlgebraError(\"cannot calculate the determinant of\"\n \"a non-square matrix\")\n if self.m == 1:\n return self[0, 0]\n # TODO: can we choose a better row/column to improve efficiency\n return sum([self[0, j] * (-1 if j % 2 else 1) *\n self.subset([i for i in range(1, self.m)],\n [k for k in range(self.n) if k != j]).determinant\n for j in range(self.n)])", "def determinant(self):\n if self.m != self.n:\n raise exc.LinearAlgebraError(\"cannot calculate the determinant of\"\n \"a non-square matrix\")\n if self.m == 1:\n return self[0, 0]\n # TODO: can we choose a better row/column to improve efficiency\n return functools.reduce(\n lambda x, y: x ^ y,\n [self[0, j] and\n self.subset([i for i in range(1, self.m)],\n [k for k in range(self.n) if k != j]).determinant\n for j in range(self.n)],\n )", "def determinant(self):\n d1 = self._row_1[0] * (self._row_2[1] * self._row_3[2] - self._row_2[2] * self._row_3[1])\n d2 = self._row_1[1] * (self._row_2[0] * self._row_3[2] - self._row_2[2] * self._row_3[0])\n d3 = self._row_1[2] * (self._row_2[0] * self._row_3[1] - self._row_2[1] * self._row_3[0])\n return d1 - d2 + d3", "def determinant(self):\n if self.cols != self.rows:\n raise Exception ('Matrix is not square!')\n for i in range(self.rows):\n if self.values[i][i] == 0:\n raise Exception ('There is zero on the main diagonal')\n #TODO: Rearrange the lines, that the main diagonal don't have a zero values \n\n arr = self.values[:]\n for i in range(self.rows):\n for j in range(self.cols):\n diag = [arr[l][p] for p in range(self.cols) for l in range(self.rows) if l == p ]\n if i > j :\n arr2 = arr[i][j]/diag[j]\n arr1 = [round(x * arr2, 4) for x in arr[i-i+j]]\n arr[i] = map(lambda x,y: round(x - y, 4) , arr[i], arr1 )\n\n diag = [arr[l][p] for p in range(self.cols) for l in range(self.rows) if l == p ]\n det = 1\n for i in range(len(diag)):\n det *= diag[i]\n if det != 0 :\n return True\n else:\n return False", "def determinant (self):\n if self.is_square:\n det = 1\n for idx, row in enumerate(echelon_form(self).rows()):\n det *= row[idx]\n return det\n else:\n raise NotImplementedError(\n \"Determinant only defined for square matrices.\")", "def determinant(self):\n if not self.is_square():\n raise(ValueError, \"Cannot calculate determinant of non-square matrix.\")\n if self.h > 2:\n raise(NotImplementedError, \"Calculating determinant not implemented for matrices largerer than 2x2.\")\n \n # TODO - your code here\n if self.h == 1:\n return self.g[0][0];\n else:\n return self.g[0][0]*self.g[1][1]-self.g[0][1]*self.g[1][0];", "def det(self,mat):\n if(len(mat[0])==len(mat)):\n result = np.linalg.det(mat)\n self.determinant = result\n return self.determinant\n else:\n print(\"Not a square Matrix\")", "def det(self):\n self.matrix() # forces the update of the matrix in the module's default\n # basis, to make sure that the dictionary self._matrices\n # is not empty\n return self._matrices.values()[0].det() # pick a random value in the\n # dictionary self._matrices\n # and compute the determinant", "def det(self):\n\n if self.rows != self.columns:\n raise ValueError(\"Matrix must be square\")\n\n if self.rows == 1:\n return self.row(1)[0]\n\n if self.rows == 2:\n return self.entry(1,1) * self.entry(2,2) - self.entry(1,2) * self.entry(2,1)\n\n det = 0\n row_to_expand = 1\n\n for i in range(1, self.columns + 1):\n det += self.entry(row_to_expand, i) * self._cofactor(row_to_expand, i)\n\n return det", "def det_matrix(self):\n return np.linalg.det(self.take_matrix())", "def determinant(self):\n det = 0\n # Check if is square\n # 检验其是否是方形矩阵\n if not self.is_square():\n raise(ValueError, \"Cannot calculate determinant of non-square matrix.\")\n if self.h > 2:\n raise(ValueError, \"Calculating determinant not implement for matrices largerer than 2x2\")\n\n # TODO - your code here\n\n # 这里仅实现了获取1x1 2x2 矩阵的det值\n # For Matrix 1x1\n if (self.h * self.w) == 1:\n det = self.grid[0][0]\n # For Matrix 2x2\n elif self.h == 2 & self.w == 2:\n det = self.g[1][1] * self.g[0][0] - self.g[0][1] * self.g[1][0]\n # In the future could implement determinant for matrix bigger\n else:\n raise(NotImplementedError, \"Calculating determinant not implement for matrices largerer than 2x2.\")\n return det", "def _det(mat):\n\n return (mat[0][0] * (mat[1][1] * mat[2][2] - mat[1][2] * mat[2][1])\n + mat[0][1] * (mat[1][2] * mat[2][0] - mat[1][0] *\n mat[2][2]) + mat[0][2] * (mat[1][0] * mat[2][1] -\n mat[1][1] * mat[2][0]))", "def determinant(self):\n if not self.isSquare():\n raise ValueError(\"Determinant is not defined for non-square matrix\")\n if (self._height == 1 and self._width == 1):\n return self._value[0][0]\n returnvalue = 0\n for i in range(self._width):\n returnvalue += self._value[0][i] * self.cofactor(0, i)\n return returnvalue", "def determinant(self):\n return np.linalg.det(self._data)", "def matrix_det(A):\n\tx = A[0,0]*A[1,1]*A[2,2] + A[0,1]*A[1,2]*A[2,0] + A[0,2]*A[1,0]*A[2,1]\n\ty = A[0,0]*A[1,2]*A[2,1] + A[0,1]*A[1,0]*A[2,2] + A[0,2]*A[1,1]*A[2,0]\n\treturn x - y", "def compute_determinant(matrix):\n det = np.linalg.det(matrix)\n #if det == 0.:\n # The det = 0 could be related to the third feature\n # det = np.linalg.det(matrix[:2, :2])\n if det == 0.:\n # Singular covariance matrix, should not be taken into account\n det = np.nan\n if np.isclose(det, 0):\n det = np.abs(det)\n return det", "def determinant(self):\n if self.L is None or self.U is None:\n self.decomposeLU()\n\n retval = 1.0\n for i in range(self.rows):\n retval *= self.L[i, i] * self.U[i, i]\n return retval", "def determinant(matrix):\n if matrix == [[]]:\n return 1\n if type(matrix) is not list or len(matrix) < 1 or\\\n not all(isinstance(x, list) for x in matrix):\n raise TypeError(\"matrix must be a list of lists\")\n if not all(len(matrix) == len(x) for x in matrix):\n raise ValueError(\"matrix must be a square matrix\")\n copy = list(map(list, matrix))\n dim = len(matrix)\n if dim == 1:\n return matrix[0][0]\n elif dim == 2:\n return matrix[0][0] * matrix[1][1] - matrix[1][0] * matrix[0][1]\n else:\n for cur in range(dim):\n for i in range(cur + 1, dim):\n if copy[cur][cur] == 0:\n copy[cur][cur] = 1.0e-10\n curScaler = copy[i][cur] / copy[cur][cur]\n for j in range(dim):\n copy[i][j] = copy[i][j] - curScaler * copy[cur][j]\n det = 1\n for i in range(dim):\n det *= copy[i][i]\n return round(det)", "def determinant(matrix):\n if type(matrix) is not list or len(matrix) == 0:\n raise TypeError(\"matrix must be a list of lists\")\n\n if len(matrix) == 1 and len(matrix[0]) == 0:\n return 1\n\n for i in matrix:\n if type(i) is not list:\n raise TypeError(\"matrix must be a list of lists\")\n\n if len(i) != len(matrix):\n raise ValueError(\"matrix must be a square matrix\")\n\n if len(matrix) == 1:\n return matrix[0][0]\n\n if len(matrix) == 2:\n return (matrix[0][0] * matrix[1][1]) - (matrix[0][1]\n * matrix[1][0])\n deter = 0\n\n for j, k in enumerate(matrix[0]):\n rows = [r for r in matrix[1:]]\n sub = []\n for r in rows:\n sub.append([r[a] for a in range(len(matrix)) if a != j])\n deter += k * (-1) ** j * determinant(sub)\n return deter", "def det(self):\n if self.x == 0 or self.y == 0:\n return None\n elif self.x == 1 or self.y == 1:\n return self.retrieve(0,0)\n else:\n out = 0.0\n for x in xrange(0, self.x):\n out += self.retrieve(0,x)*self.C(0,x)\n return out", "def det(A):\n # Section 1: Establish n parameter and copy A\n n = len(A)\n AM = A[:]\n\n # Section 2: Row manipulate A into an upper triangle matrix\n for fd in range(n): # fd stands for focus diagonal\n if AM[fd][fd] == 0:\n AM[fd][fd] = 1.0e-18 # Cheating by adding zero + ~zero\n for i in range(fd+1, n): # skip row with fd in it.\n crScaler = AM[i][fd] / AM[fd][fd] # cr stands for \"current row\".\n for j in range(n): # cr - crScaler * fdRow, one element at a time.\n AM[i][j] = AM[i][j] - crScaler * AM[fd][j]\n\n # Section 3: Once AM is in upper triangle form ...\n product = 1.0\n for i in range(n):\n product *= AM[i][i] # ... product of diagonals is determinant\n\n return product", "def det2(m):\n\t(a,b), (c,d) = m\n\treturn a*d - b*c", "def determinant(x):\n if len(x) == len(x[0]):\n if len(x) == 2:\n return cross_multiply(x)\n else:\n val = 0\n alt = False\n for i in range(len(x)):\n tmp = x[1:]\n t1, t2 = tmp[0][:], tmp[1][:]\n _ = t1.pop(i), t2.pop(i)\n new_t = [t1, t2]\n print(new_t)\n x_multiply = cross_multiply(new_t)\n if val == 0:\n val = x[0][i] * x_multiply\n else:\n if alt:\n val = val + (x[0][i] * x_multiply)\n alt = False\n else:\n val = val - (x[0][i] * x_multiply)\n alt = True\n return val\n else:\n return 'matrix is not a square matrix.'", "def det(a):\n a = copy.deepcopy(a)\n n = len(a)\n det = 1\n com_k = 1\n for k in range(n-1):\n step = 1\n\n while a[k][k] == 0:\n a[k+step], a[k] = a[k], a[k+step]\n det = -det\n step += 1\n mul = a[k][k]\n\n for i in range(k+1, n):\n for j in range(k+1, n):\n a[i][j] *= mul\n a[i][j] -= a[i][k] * a[k][j]\n a[i][j] /= com_k\n\n com_k = mul\n\n det = det * a[-1][-1]\n\n return det", "def determinant(A):\n \n total = 0\n\n if len(A) == 1:\n return A[0][0]\n\n for col in range(len(A)):\n Asub = A[1:]\n for j in range(len(A)-1):\n Asub[j] = Asub[j][:col] + Asub[j][col+1:]\n subdet = determinant(Asub)\n sign = (-1) ** (col % 2)\n total += sign * A[0][col] * subdet\n\n return total", "def Determinant(matrix, mul):\r\n width = len(matrix)\r\n # Stop Conditions\r\n if width == 1:\r\n return mul * matrix[0][0]\r\n else:\r\n sign = -1\r\n det = 0\r\n for i in range(width):\r\n m = []\r\n for j in range(1, width):\r\n buff = []\r\n for k in range(width):\r\n if k != i:\r\n buff.append(matrix[j][k])\r\n m.append(buff)\r\n # Change the sign of the multiply number\r\n sign *= -1\r\n # Recursive call for determinant calculation\r\n det = det + mul * Determinant(m, sign * matrix[0][i])\r\n return det", "def calc_det(m):\n det = m[0][0] * m [1][1] - (m[0][1] * m[1][0])\n return det", "def determinant_fast(A):\n # Section 1: Establish n parameter and copy A\n n = len(A)\n AM = copy_matrix(A)\n\n # Section 2: Row manipulate A into an upper triangle matrix\n for fd in range(n): # fd stands for focus diagonal\n if AM[fd][fd] == 0: \n AM[fd][fd] = 1.0e-18 # Cheating by adding zero + ~zero\n for i in range(fd+1,n): # skip row with fd in it.\n crScaler = AM[i][fd] / AM[fd][fd] # cr stands for \"current row\".\n for j in range(n): # cr - crScaler * fdRow, but one element at a time.\n AM[i][j] = AM[i][j] - crScaler * AM[fd][j]\n \n # Section 3: Once AM is in upper triangle form ...\n product = 1.0\n for i in range(n):\n product *= AM[i][i] # ... product of diagonals is determinant\n\n return product", "def det(mtx):\n if not is_square(mtx):\n raise ValueError(\"Matrix should be square\")\n if len(mtx) == 2:\n return mtx[0][0] * mtx[1][1] - mtx[0][1] * mtx[1][0]\n\n result = 0\n sign = 1\n for inx in range(len(mtx)):\n next_mtx = get_minor_mtx(mtx, 0, inx)\n result += sign * (mtx[0][inx] * det(next_mtx))\n sign *= -1\n return result", "def det_matrix_2x2(m: list):\n return m[0][0]*m[1][1] - m[0][1]*m[1][0]", "def det(self):\n\t\t\n\t\trows = self._rows\n\t\tsign = +1\n\t\tsumm = 0\n\n\t\tfor perm in permutations(range(rows), rows):\n\t\t\tmul = 1\n\t\t\tsign = SquareMatrix.__parity_of_permutation(perm)\n\n\t\t\tfor i in range(rows):\n\t\t\t\tmul *= self[i][perm[i]]\n\n\t\t\tsumm += sign * mul\n\t\treturn summ", "def determinant(self):\r\n m11,m12,m13,m14,m21,m22,m23,m24,m31,m32,m33,m34,m41,m42,m43,m44 = self.mlist\r\n\r\n return m11*m22*m33*m44 \\\r\n -m11*m22*m34*m43 \\\r\n +m11*m23*m34*m42 \\\r\n -m11*m23*m32*m44 \\\r\n +m11*m24*m32*m43 \\\r\n -m11*m24*m33*m42 \\\r\n -m12*m23*m34*m41 \\\r\n +m12*m23*m31*m44 \\\r\n -m12*m24*m31*m43 \\\r\n +m12*m24*m33*m41 \\\r\n -m12*m21*m33*m44 \\\r\n +m12*m21*m34*m43 \\\r\n +m13*m24*m31*m42 \\\r\n -m13*m24*m32*m41 \\\r\n +m13*m21*m32*m44 \\\r\n -m13*m21*m34*m42 \\\r\n +m13*m22*m34*m41 \\\r\n -m13*m22*m31*m44 \\\r\n -m14*m21*m32*m43 \\\r\n +m14*m21*m33*m42 \\\r\n -m14*m22*m33*m41 \\\r\n +m14*m22*m31*m43 \\\r\n -m14*m23*m31*m42 \\\r\n +m14*m23*m32*m41", "def Determinant_3x3(A, step_by_step=True ,row=True, n=1):\n \n if A.shape!=(3,3):\n raise ValueError('Dimension of matrix A should be 3x3. The input A must be a sp.Matrix of shape (3,3).')\n if n<1 or n>3 or not isinstance(n, int):\n raise ValueError('n should be an integer between 1 and 3.')\n \n # Construct string for determinant of matrix A\n detA_s = sp.latex(A).replace('[','|').replace(']','|')\n \n # To print all the steps\n if step_by_step:\n\n # If we compute the determinant with row n \n if row:\n # Matrix with row i and col j removed (red_matrix(A, i, j))\n A1 = red_matrix(A, n, 1)\n A2 = red_matrix(A, n, 2)\n A3 = red_matrix(A, n, 3)\n detA1_s = sp.latex(A1).replace('[','|').replace(']','|')\n\n detA2_s = sp.latex(A2).replace('[','|').replace(']','|')\n detA3_s = sp.latex(A3).replace('[','|').replace(']','|')\n\n line1 = \"$\" + detA_s + ' = ' + pl_mi(n,1, True) + sp.latex(A[n-1, 0]) + detA1_s + pl_mi(n,2) + \\\n sp.latex(A[n-1, 1]) + detA2_s + pl_mi(n,3) + sp.latex(A[n-1, 2]) + detA3_s + '$'\n\n line2 = '$' + detA_s + ' = ' + pl_mi(n,1, True) + sp.latex(A[n-1, 0]) + \"\\cdot (\" + sp.latex(sp.det(A1)) \\\n +\")\" + pl_mi(n,2) + sp.latex(A[n-1, 1]) + \"\\cdot (\" + sp.latex(sp.det(A2)) + \")\"+ \\\n pl_mi(n,3) + sp.latex(A[n-1, 2]) + \"\\cdot (\" + sp.latex(sp.det(A3)) + ')$'\n line3 = '$' + detA_s + ' = ' + sp.latex(sp.simplify(sp.det(A))) + '$'\n\n # If we compute the determinant with col n \n else:\n # Matrix with row i and col j removed (red_matrix(A, i, j))\n A1 = red_matrix(A, 1, n)\n A2 = red_matrix(A, 2, n)\n A3 = red_matrix(A, 3, n)\n detA1_s = sp.latex(A1).replace('[','|').replace(']','|')\n detA2_s = sp.latex(A2).replace('[','|').replace(']','|')\n detA3_s = sp.latex(A3).replace('[','|').replace(']','|')\n\n line1 = \"$\" + detA_s + ' = ' + pl_mi(n,1, True) + brackets(A[0, n-1]) + detA1_s + pl_mi(n,2) + \\\n brackets(A[1, n-1]) + detA2_s + pl_mi(n,3) + brackets(A[2, n-1]) + detA3_s + '$'\n\n line2 = '$' + detA_s + ' = ' + pl_mi(n,1, True) + brackets(A[0, n-1]) + \"\\cdot (\" + sp.latex(sp.det(A1))\\\n +\")\" + pl_mi(n,2) + brackets(A[1, n-1]) + \"\\cdot (\" + sp.latex(sp.det(A2)) + \")\"+ \\\n pl_mi(n,3) + brackets(A[2, n-1]) + \"\\cdot (\" + sp.latex(sp.det(A3)) + ')$'\n\n line3 = '$' + detA_s + ' = ' + sp.latex(sp.simplify(sp.det(A))) + '$'\n\n # Display step by step computation of determinant\n display(Latex(line1))\n display(Latex(line2))\n display(Latex(line3))\n # Only print the determinant without any step\n else:\n display(Latex(\"$\" + detA_s + \"=\" + sp.latex(sp.det(A)) + \"$\"))", "def det(self):\n if not self.is_endomorphism():\n raise ArithmeticError(\"Matrix morphism must be an endomorphism.\")\n return self.matrix().determinant()", "def determinant(self: Qs) -> Qs:\n\n if self.dim == 1:\n q_det = self.qs[0]\n\n elif self.dim == 4:\n ad = product(self.qs[0], self.qs[3])\n bc = product(self.qs[1], self.qs[2])\n q_det = dif(ad, bc)\n\n elif self.dim == 9:\n aei = product(product(self.qs[0], self.qs[4]), self.qs[8])\n bfg = product(product(self.qs[3], self.qs[7]), self.qs[2])\n cdh = product(product(self.qs[6], self.qs[1]), self.qs[5])\n ceg = product(product(self.qs[6], self.qs[4]), self.qs[2])\n bdi = product(product(self.qs[3], self.qs[1]), self.qs[8])\n afh = product(product(self.qs[0], self.qs[7]), self.qs[5])\n\n sum_pos = add(aei, add(bfg, cdh))\n sum_neg = add(ceg, add(bdi, afh))\n\n q_det = dif(sum_pos, sum_neg)\n\n else:\n raise ValueError(\"Oops, don't know how to calculate the determinant of this one.\")\n\n return Qs(\n [q_det], qs_type=self.qs_type, rows=1, columns=1\n )", "def det(input):\n is_input_dparray = isinstance(input, dparray)\n\n if not use_origin_backend(input) and is_input_dparray:\n if input.shape[-1] == input.shape[-2]:\n result = dpnp_det(input)\n\n # scalar returned\n if result.shape == (1,):\n return result.dtype.type(result[0])\n\n return result\n\n return call_origin(numpy.linalg.det, input)", "def det(self):\n # XXX: At least the first three algorithms described above should also\n # be implemented in the pure Python DDM and SDM classes which at the\n # time of writng just use Bareiss for all matrices and domains.\n # Probably in Python the thresholds would be different though.\n return self.rep.det()", "def ensemble_determinant(self):\n return np.linalg.det(self.ensemble_transition_matrix)", "def determinant(v,w):\n return v[0] * w[1] - v[1] * w[0]", "def det(v_i, v_j):\n return (v_i[0] * v_j[1]) - (v_j[0] * v_i[1])", "def det(v_i, v_j):\n return (v_i[0] * v_j[1]) - (v_j[0] * v_i[1])", "def det3(m):\n\ta, b, c = m[0]\n\tda = det2([ m[1][1:] , m[2][1:]])\n\tdb = det2([[m[1][0],m[1][2]],[m[2][0],m[2][2]]])\n\tdc = det2([ m[1][:2] , m[2][:2]])\n\treturn a*da - b*db + c*dc", "def test_det(self, a, dete):\n detc = det(a)\n assert np.isclose(detc, dete)", "def _safe_det_3x3(t: torch.Tensor):\n\n det = (\n t[..., 0, 0] * (t[..., 1, 1] * t[..., 2, 2] - t[..., 1, 2] * t[..., 2, 1])\n - t[..., 0, 1] * (t[..., 1, 0] * t[..., 2, 2] - t[..., 2, 0] * t[..., 1, 2])\n + t[..., 0, 2] * (t[..., 1, 0] * t[..., 2, 1] - t[..., 2, 0] * t[..., 1, 1])\n )\n\n return det", "def __det3x3__(a):\r\n # val = +a[0,0] * ( a[1,1] * a[2,2] - a[2,1] * a[1,2] )\r\n # val += -a[0,1] * ( a[1,0] * a[2,2] - a[2,0] * a[1,2] )\r\n # val += +a[0,2] * ( a[1,0] * a[2,1] - a[2,0] * a[1,1] )\r\n val = +a[0] * (a[4] * a[8] - a[7] * a[5])\r\n val += -a[1] * (a[3] * a[8] - a[6] * a[5])\r\n val += +a[2] * (a[3] * a[7] - a[6] * a[4])\r\n return val", "def testAlgn(x,y):\n\n A = numpy.ones((3,3))\n A[:,1] = x\n A[:,2] = y\n return numpy.linalg.det(A)", "def compute_det(self, log_progress=False):\n if not self.is_square():\n raise Exception(u\"Not a square matrix\")\n\n mat = clone_matrix(self.coefficients)\n size = self.get_size()[0]\n\n for i in range(size - 1):\n for j in range(i + 1, size):\n for k in range(i + 1, size):\n mat[j][k] = (mat[j][k] * mat[i][i]) - (mat[j][i] * mat[i][k])\n if i > 0:\n mat[j][k] //= mat[i - 1][i - 1]\n if log_progress:\n print(i)\n if i > 0:\n for j in range(size):\n mat[j][i - 1] = 0\n mat[i - 1][j] = 0\n\n return mat[size - 1][size - 1]", "def det(self, colBy = 0):\n try:\n if not 0 <= colBy < self.getColCount(): raise self.matrixBadDimension('Podano niewłaściwy numer kolumny macierzy.\\nPodano: %s' % (colBy,))\n if self.getColCount() != self.getRowCount() or not self.matrix: return None\n if self.getColCount() == 1: return self[0,0]\n except self.matrixException as e:\n print \"Wyjątek w A.det(colBy = %d)!\\nA = \\n%s\\n\" % (colBy, indent(self))\n return None\n else:\n return reduce(lambda x,y: x+y, [(-1)**(i+colBy) * self[i,colBy] * self.minor(i,colBy).det() for i in range(self.getColCount())])", "def det(v1, v2):\n\treturn v1[0] * v2[1] - v1[1] * v2[0]", "def det_2x2(matrix: FieldMatrix) -> FlowFieldVal:\n _validate_matrix_shape(matrix, (2, 2))\n\n det = lambda a, b, c, d: a * d - b * c\n\n a, b = matrix[0]\n c, d = matrix[1]\n\n return tf.nest.map_structure(det, a, b, c, d)", "def adj_poly_det(self, cp=None):\n\n # Cayley-Hamilton says that a matrix satisfies its own minimal\n # polynomial\n #\n # p[0]*A^n + p[1]*A^(n-1) + ... + p[n]*I = 0\n #\n # with p[0]=1 and p[n]=(-1)^n*det(A) or\n #\n # det(A)*I = -(-1)^n*(p[0]*A^(n-1) + p[1]*A^(n-2) + ... + p[n-1]*A).\n #\n # Define a new polynomial f with f[i] = -(-1)^n*p[i] for i=0..n-1. Then\n #\n # det(A)*I = f[0]*A^n + f[1]*A^(n-1) + ... + f[n-1]*A.\n #\n # Multiplying on the right by inv(A) gives\n #\n # det(A)*inv(A) = f[0]*A^(n-1) + f[1]*A^(n-2) + ... + f[n-1].\n #\n # So adj(A) = det(A)*inv(A) = f(A)\n\n A = self\n m, n = self.shape\n\n if m != n:\n raise DMNonSquareMatrixError(\"Matrix must be square\")\n\n if cp is None:\n cp = A.charpoly()\n\n if len(cp) % 2:\n # n is even\n detA = cp[-1]\n f = [-cpi for cpi in cp[:-1]]\n else:\n # n is odd\n detA = -cp[-1]\n f = cp[:-1]\n\n return f, detA", "def pddet(A):\r\n L = jitchol(A)\r\n logdetA = 2*sum(np.log(np.diag(L)))\r\n return logdetA", "def ar_trid_det(A):\n\n if T < 5:\n return(np.linalg.det(A))\n else:\n gamma = A[0,0]\n alpha = A[1,1]\n beta = A[1,2]\n\n # Get determinant of \"middle matrices\"\n d1 = np.exp(td_const_ldet(alpha, beta, T-2))\n print(np.abs(np.linalg.det(A[1:-1,1:-1]) - d1))\n d2 = np.exp(td_const_ldet(alpha, A[1,2], T-3))\n print(np.abs(np.linalg.det(A[2:-1,2:-1]) - d2))\n d3 = np.exp(td_const_ldet(alpha, A[1,2], T-4))\n print(np.abs(np.linalg.det(A[3:-1,3:-1]) - d3))\n\n # Get determinant of all except last col/row\n dJ1 = gamma * d1 - pow(beta,2)*d2\n print(np.abs(np.linalg.det(A[:-1,:-1]) - dJ1))\n dJ2 = gamma * d2 - pow(beta,2)*d3\n print(np.abs(np.linalg.det(A[:-2,:-2]) - dJ2))\n\n # Get determinant of the whole enchilada.\n detA = gamma * dJ1 - pow(beta,2)*dJ2\n print(detA - np.linalg.det(A))", "def find_determinant(self, leftmost: Point, rightmost: Point, point: Point):\n x1, y1 = leftmost\n x2, y2 = rightmost\n x3, y3 = point\n\n result = (x1 * y2) + (x3 * y1) + (x2 * y3) - (x3 * y2) - (x2 * y1) - (x1 * y3)\n return result", "def det_3x3(matrix: FieldMatrix) -> FlowFieldVal:\n _validate_matrix_shape(matrix, (3, 3))\n\n det2 = lambda a, b, c, d: a * d - b * c\n\n def det3(a, b, c, d, e, f, g, h, i):\n return a * det2(e, f, h, i) - b * det2(d, f, g, i) + c * det2(d, e, g, h)\n\n a, b, c = matrix[0]\n d, e, f = matrix[1]\n g, h, i = matrix[2]\n\n return tf.nest.map_structure(det3, a, b, c, d, e, f, g, h, i)", "def cayley_menger_det_no_linalg(x2, y2, z2, xb2, yb2, zb2):\n xs = x2 + xb2\n ys = y2 + yb2\n zs = z2 + zb2\n buf1 = ys + zs\n buf1 -= xs\n buf2 = x2 * xb2\n buf1 *= buf2 # buf1 has first term, halved\n np.multiply(y2, yb2, out=buf2)\n buf3 = xs + zs\n buf3 -= ys\n buf2 *= buf3 # buf2 has second term\n buf1 += buf2 # buf1 is sum of two terms, halved\n np.multiply(z2, zb2, out=buf3)\n np.add(xs, ys, out=buf2) # reuse buf2\n buf2 -= zs\n buf3 *= buf2 # buf3 has third term\n buf1 += buf3 # buf1 is sum of 3 first terms, halved\n buf1 *= 2\n np.subtract(x2, xb2, out=buf2)\n np.subtract(y2, yb2, out=buf3)\n buf2 *= buf3\n np.subtract(z2, zb2, out=buf3)\n buf2 *= buf3\n buf1 += buf2 # buf1 is sum of 4 first terms\n np.multiply(xs, ys, out=buf3)\n buf3 *= zs\n buf1 -= buf3\n return buf1", "def tridiag_det(A):\n if (type(A) is not np.ndarray):\n raise ValueError(\"A must be a np.ndarray\")\n if (A.shape[0] is not A.shape[1]):\n raise ValueError(\"A must be square.\")\n N = A.shape[0]\n f = np.empty(N+2)\n f[0] = 0\n f[1] = 1\n f[2] = A[1,1]\n for n in range(3,N+2):\n f[n] = A[n-2,n-2] * f[n-1] - A[n-2,n-3] * A[n-3,n-2] * f[n-2]\n\n return(f[-1])", "def determinant_recursive(A, total=0):\n # Section 1: store indices in list for flexible row referencing\n indices = list(range(len(A)))\n \n # Section 2: when at 2x2 submatrices recursive calls end\n if len(A) == 2 and len(A[0]) == 2:\n val = A[0][0] * A[1][1] - A[1][0] * A[0][1]\n return val\n\n # Section 3: define submatrix for focus column and call this function\n for fc in indices: # for each focus column, find the submatrix ...\n As = copy_matrix(A) # make a copy, and ...\n As = As[1:] # ... remove the first row\n height = len(As)\n\n for i in range(height): # for each remaining row of submatrix ...\n As[i] = As[i][0:fc] + As[i][fc+1:] # remove the focus column elements\n\n sign = (-1) ** (fc % 2) # alternate signs for submatrix multiplier\n sub_det = determinant_recursive(As) # pass submatrix recursively\n total += sign * A[0][fc] * sub_det # total all returns from recursion\n\n return total", "def inverse_3by3_double(M):\n if len(M.shape) > 1:\n M = M.flatten()\n\n M = np.array(M, 'float')\n\n determinant = 0.\n adj_M = np.zeros((9,), 'float')\n\n # First row of adjugate matrix\n adj_M[0] = (M[4] * M[8] - M[7] * M[5]) # Det #0\n adj_M[1] = -(M[1] * M[8] - M[7] * M[2])\n adj_M[2] = (M[1] * M[5] - M[4] * M[2])\n\n # Second row of adjugate matrix\n adj_M[3] = -(M[3] * M[8] - M[6] * M[5]) # Det #1\n adj_M[4] = (M[0] * M[8] - M[6] * M[2])\n adj_M[5] = -(M[0] * M[5] - M[3] * M[2])\n\n # Third row of adjugate matrix\n adj_M[6] = (M[3] * M[7] - M[6] * M[4]) # Det #2\n adj_M[7] = -(M[0] * M[7] - M[6] * M[1])\n adj_M[8] = (M[0] * M[4] - M[3] * M[1])\n\n determinant += M[0] * adj_M[0]\n determinant += M[1] * adj_M[3] # Using addition since minus is integrated in adjugate matrix.\n determinant += M[2] * adj_M[6]\n\n return (adj_M / determinant)", "def calc_metric3(K_tilda):\n trace = np.trace(K_tilda)\n # determinant = np.linalg.det(K_tilda)\n _, log_determinant = np.linalg.slogdet(K_tilda)\n diff = trace - log_determinant\n print(trace, log_determinant, diff)\n return diff", "def inverseN(self):\r\n result = Matrix(self.rows, self.columns)\r\n for r in range(self.rows):\r\n for c in range(self.columns):\r\n result.mat[r][c] = self.cofactor(r, c)\r\n result.out()\r\n result = result.transpose()\r\n det = self.determinant()\r\n print(\"1/(\" + str(det) + \")\")\r\n result.out()\r\n return result", "def local_det_chol(node):\r\n if node.op == det:\r\n x, = node.inputs\r\n for (cl, xpos) in x.clients:\r\n if isinstance(cl.op, Cholesky):\r\n L = cl.outputs[0]\r\n return [tensor.prod(extract_diag(L) ** 2)]", "def det_nth_root(X, method='lu'):\n N = float(X.shape[0])\n if method == 'lu':\n P, L, U = scipy.linalg.lu(X)\n diags = (np.diag(L) ** (1 / N) * (np.diag(U) ** (1 / N)))\n determinant = np.product(diags)\n elif method == 'eig':\n L = np.linalg.eigvalsh(X)\n determinant = np.product(L ** (1 / float(L.size)))\n elif method == 'qr':\n (R,) = scipy.linalg.qr(X, mode='r')\n determinant = np.product(np.abs(np.diag(R)) ** (1 / N))\n else:\n raise Exception('method not understood')\n\n return np.nan_to_num(determinant)", "def test_xdet(self):\n sol = Mader(p_cj=3.0e11, d_cj=8.0e5, gamma=3.0, u_piston=0.0)\n r = np.array([0.7, 0.8])\n t = 6.25e-6\n solrt = sol(r, t)\n np.testing.assert_allclose(solrt.xdet[0], 4.3)", "def inverse_3by3_int64(M, return_determinant=True):\n if len(M.shape) > 1:\n M = M.flatten()\n\n determinant = np.int64(0)\n adj_M = np.zeros((9,), 'int64')\n\n # First row of adjugate matrix\n adj_M[0] = (M[4] * M[8] - M[7] * M[5]) # Det #0\n adj_M[1] = -(M[1] * M[8] - M[7] * M[2])\n adj_M[2] = (M[1] * M[5] - M[4] * M[2])\n\n # Second row of adjugate matrix\n adj_M[3] = -(M[3] * M[8] - M[6] * M[5]) # Det #1\n adj_M[4] = (M[0] * M[8] - M[6] * M[2])\n adj_M[5] = -(M[0] * M[5] - M[3] * M[2])\n\n # Third row of adjugate matrix\n adj_M[6] = (M[3] * M[7] - M[6] * M[4]) # Det #2\n adj_M[7] = -(M[0] * M[7] - M[6] * M[1])\n adj_M[8] = (M[0] * M[4] - M[3] * M[1])\n\n if return_determinant:\n if ((np.log2(np.abs(M[0])) + np.log2(np.abs(adj_M[0]))) > 63 or\n (np.log2(np.abs(M[1])) + np.log2(np.abs(adj_M[1]))) > 63 or\n (np.log2(np.abs(M[2])) + np.log2(np.abs(adj_M[6]))) > 63):\n print(\"inverse_3by3_int64: Overflow in determinant calculation!\")\n determinant += int(M[0]) * int(adj_M[0])\n determinant += int(M[1]) * int(adj_M[3]) # Using addition since minus is integrated in adjugate matrix.\n determinant += int(M[2]) * int(adj_M[6])\n else:\n determinant += np.int64(M[0]) * np.int64(adj_M[0])\n determinant += np.int64(M[1]) * np.int64(adj_M[3]) # Using addition since minus is integrated in adjugate matrix.\n determinant += np.int64(M[2]) * np.int64(adj_M[6])\n return adj_M, determinant\n else:\n return adj_M", "def decomposeLU(self):\n self.check_square()\n\n N = self.rows\n L = make_matrix(N, N)\n U = make_matrix(N, N)\n A = self #for more math friendly notation\n\n\n for j in range(N):\n L[j, j] = 1.0 #Doolittle factorization\n\n #e.g., if you are in column = 5, you go down 6 rows\n for i in range(j+1):\n U[i, j] = A[i, j] - sum(L[i, k] * U[k, j] for k in range(i))\n #e.g., if you are in column = 5,\n # you start at row 5 and go down for the lower triangular matrix\n for i in range(j, N):\n L[i, j] = (A[i, j] - sum(L[i, k] * U[k, j] for k in range(j))) / U[j, j]\n\n self.L = L\n self.U = U\n return L, U", "def inverse_symmetric_3by3_double(M):\n\n determinant = 0\n adj_M = np.zeros((9,), dtype='float')\n\n # First row of adjugate matrix\n adj_M[0] = (M[3] * M[5] - (M[4] ** 2)) # Det #0\n adj_M[1] = -(M[1] * M[5] - M[4] * M[2]) # Det #1\n adj_M[2] = (M[1] * M[4] - M[3] * M[2]) # Det #2\n\n # Second row of adjugate matrix\n adj_M[3] = adj_M[1]\n adj_M[4] = (M[0] * M[5] - (M[2] ** 2))\n adj_M[5] = -(M[0] * M[4] - M[1] * M[2])\n\n # Third row of adjugate matrix\n adj_M[6] = adj_M[2]\n adj_M[7] = adj_M[5]\n adj_M[8] = (M[0] * M[3] - (M[1] ** 2))\n\n determinant += M[0] * adj_M[0]\n determinant += M[1] * adj_M[1] # Using addition since minus is integrated in adjugate matrix.\n determinant += M[2] * adj_M[2]\n\n return adj_M / determinant", "def log_abs_det_jacobian(self, z):\n pre_u = self.u_ + self.u\n pre_w = self.w_ + self.w\n a = F.softplus(self.a + self.inv)\n w = F.softmax(pre_w, dim=3)\n u = F.softmax(pre_u, dim=3)\n # Perform computation\n pre_sigm = torch.sum(u * a * z, 3) + self.b\n sigm = torch.sigmoid(pre_sigm)\n x_pre = torch.sum(w * sigm, dim=3)\n x_pre_clipped = x_pre * (1 - self.eps) + self.eps * 0.5\n logj = F.log_softmax(pre_w, dim=3) + logsigmoid(pre_sigm) + logsigmoid(-pre_sigm) + torch.log(a)\n # n, d, d2, dh\n logj = logj + F.log_softmax(pre_u, dim=3)\n # n, d, d2, dh, d1\n logj = torch.log(torch.sum(torch.exp(logj),3))\n # n, d, d2, d1\n logdet_ = logj + np.log(1 - self.eps) - (torch.log(x_pre_clipped) + torch.log(-x_pre_clipped + 1))\n return logdet_", "def matrix_inv(mat):\n\ta = mat[0,0]\n\tb = mat[0,1]\n\tc = mat[0,2]\n\td = mat[1,0]\n\te = mat[1,1]\n\tf = mat[1,2]\n\tg = mat[2,0]\n\th = mat[2,1]\n\ti = mat[2,2]\n\n\tdet = b*f*g + c*d*h + a*e*i - a*f*h - b*d*i - c*e*g\n\n\tinvmat = np.zeros((3,3))\n\tinvmat[0,0] = (e*i - f*h) / det\n\tinvmat[0,1] = (c*h - b*i) / det\n\tinvmat[0,2] = (b*f - c*e) / det\n\tinvmat[1,0] = (f*g - d*i) / det\n\tinvmat[1,1] = (a*i - c*g) / det\n\tinvmat[1,2] = (c*d - a*f) / det\n\tinvmat[2,0] = (d*h - e*g) / det\n\tinvmat[2,1] = (b*g - a*h) / det\n\tinvmat[2,2] = (a*e - b*d) / det\n\treturn invmat", "def eigen_decomp(matrix):\n w = None\n v = None\n ### YOUR CODE HERE\n w,v=np.linalg.eig(matrix)\n ### END YOUR CODE\n return w, v", "def flow_det(z, params):\n lparams, mparams = np.split(params, 2)\n diag = (1-mask)*lfun(mask*z,lparams)\n if len(z.shape) > 1:\n return np.sum(diag, axis=1)\n else:\n return np.sum(diag)", "def resultant(P, Q):\n return np.linalg.det(P.sylvester(Q))", "def tanp_to_det(self, x, y):\n return x, y", "def test_mantel(self):\r\n a = reshape(arange(25), (5, 5))\r\n a = tril(a) + tril(a).T\r\n fill_diagonal(a, 0)\r\n b = a.copy()\r\n # closely related -- should be significant\r\n self.assertCorrectPValue(0.0, 0.049, mantel, (a, b, 1000))\r\n\r\n c = reshape(ones(25), (5, 5))\r\n c[0, 1] = 3.0\r\n c[1, 0] = 3.0\r\n fill_diagonal(c, 0)\r\n # not related -- should not be significant\r\n self.assertCorrectPValue(0.06, 1.0, mantel, (a, c, 1000))", "def crammer(self):\n \n matrix = [] # creamos una lista para matrices\n determinant = [] # creamos una lista para las determinantes\n \n # creamos la matriz de la determinante\n d = np.zeros(shape=(self.grado + 1, self.grado + 1))\n \n # Creamos las matrices (determinante + n matrices)\n for k in range(0, self.grado + 1) :\n # Creamos una matriz vacia\n a = np.zeros(shape=(self.grado + 1, self.grado + 1))\n \n if k == 0 :\n # Llenamos la matriz\n for i in range(self.init + 1, self.init + self.grado + 2) :\n # Creamos las n matrices \n aux = []\n # llenamos cada columna\n for j in range(0, self.grado + 1):\n aux.append(mt.pow(sp.Float(self.dato2[i].get()), j))\n \n \n d[i - (self.init +1)] = aux\n \n \n if np.linalg.det(d) == 0: return False\n \n #print d\n #print np.linalg.det(d)\n matrix.append(d) # Almacenamos la matriz\n determinant.append(np.linalg.det(d)) # Almacenamos la determinante\n \n # Llenamos la matriz\n for i in range(self.init + 1, self.init + self.grado + 2) :\n # Creamos las n matrices \n aux = []\n # llenamos cada columna\n for j in range(0, self.grado + 1):\n if k == j :\n aux.append(sp.Float(self.dato1[i].get()))\n else :\n aux.append(mt.pow(sp.Float(self.dato2[i].get()), j))\n \n \n a[i - (self.init +1)] = aux\n \n \n #print a\n #print np.linalg.det(a)\n matrix.append(a) # Almacenamos la matriz\n determinant.append(np.linalg.det(a)) # Almacenamos la determinante\n \n\n # Guardamos la informacion\n self.matrix = matrix\n self.determinant = determinant\n \n return True", "def plotDeterminant2D(A):\n # See; https://stackoverflow.com/questions/44881885/python-draw-parallelepiped\n \n # Will only execute if it is 2x2\n if (np.shape(A) != (2,2)):\n print('Le matrice A doit être 2x2.')\n return\n \n # Define vertices for a cube to multiply with input matrix A to get parallelopiped\n rect = np.array([[1, -1],\n [1, 1],\n [-1, 1],\n [-1, -1]])\n \n vertices = np.zeros((5,2))\n \n for i in range(4): vertices[i,:] = np.dot(rect[i,:], A)\n vertices[4,0] = vertices[0,0]\n vertices[4,1] = vertices[0,1]\n \n # Create figure / grid to plot\n fig = plt.figure(figsize=(10,5))\n ax = fig.add_subplot(111)\n \n # Plot vertices\n ax.plot(vertices[:,0], vertices[:,1])\n vol = np.abs(np.linalg.det(A)) # absolute value of the determinant\n vol = np.round(vol, decimals = 3)\n print(\"L'aire est:\", vol)\n\n coll = PolyCollection([vertices])\n ax.add_collection(coll)\n ax.autoscale_view()\n plt.grid()\n plt.show()", "def lu_decom(A,b):\n # init\n n = len(b)\n L = np.eye(n)\n U = np.zeros((n,n))\n x = np.zeros(n)\n y = np.zeros(n)\n\n # decomposition A = LU\n\n U[0,:] = A[0,:]\n L[1:,0] = A[1:,0] / U[0,0]\n\n for i in range(1,n):\n for j in range(i,n):\n\n U[i,j] = A[i,j] - np.dot(L[i,:i],U[:i,j])\n\n if j != n-1:\n L[j+1,i] = (A[j+1,i] - np.dot(L[j+1,:i],U[:i,i])) / U[i,i]\n\n # solve Ly=b\n y[0] = b[0]\n\n for k in range(1,n):\n y[k] = b[k] - np.dot(L[k,:k],y[:k])\n\n # solve Ux=y\n x[-1] = y[-1] / U[-1,-1]\n\n for k in range(n-2,-1,-1):\n x[k] = (y[k] - np.dot(U[k,k+1:],x[k+1:])) / U[k,k]\n\n return x,L,U", "def discriminant(self):\r\n return self.__b**2 - (4 * self.__a * self.__c)", "def eigen_decomp(matrix):\n w = None\n v = None\n ### YOUR CODE HERE\n pass\n ### END YOUR CODE\n return w, v", "def calc_det_dzh(theta):\n return 919.49 - 27.018 * theta + 0.26209 * theta ** 2 - 0.00083803 * theta ** 3", "def test_suite():\n test(calc_det([[2, 1],[3, 4]]), 5)", "def fast_logdet(matrix):\n sign, ld = np.linalg.slogdet(matrix)\n if not sign > 0:\n return -np.inf\n return ld", "def test_decomposition_reconsturction(args):\n mat, = args # unpack\n if mat.shape[-1] > 0: # make sure matrix is diagonalizable\n assume(np.all(np.linalg.cond(mat) < 1e8))\n dec = gt.matrix.decompose_gf(mat)\n assert_allclose(dec.reconstruct(kind='full'), mat, atol=1e-10)\n assert_allclose(dec.reconstruct(kind='diag'),\n np.diagonal(mat, axis1=-2, axis2=-1), atol=1e-10)\n\n # symmetric\n sym_mat = mat + gt.matrix.transpose(mat)\n if sym_mat.shape[-1] > 0: # make sure matrix is diagonalizable\n assume(np.all(np.linalg.cond(sym_mat) < 1e8))\n dec = gt.matrix.decompose_sym(sym_mat)\n assert_allclose(dec.reconstruct(kind='full'), sym_mat, atol=1e-10)\n assert_allclose(dec.reconstruct(kind='diag'),\n np.diagonal(sym_mat, axis1=-2, axis2=-1), atol=1e-10)\n\n # Hermitian\n her_mat = mat + gt.matrix.transpose(mat).conj()\n if her_mat.shape[-1] > 0: # make sure matrix is diagonalizable\n assume(np.all(np.linalg.cond(her_mat) < 1e8))\n dec = gt.matrix.decompose_hamiltonian(her_mat)\n assert_allclose(dec.reconstruct(kind='full'), her_mat, atol=1e-10)\n assert_allclose(dec.reconstruct(kind='diag'),\n np.diagonal(her_mat, axis1=-2, axis2=-1), atol=1e-10)", "def test_decomposition_inverse(args):\n mat, = args # unpack\n # make sure `mat` is reasonable\n if mat.shape[-1] > 0: # make sure matrix is diagonalizable\n assume(np.all(np.linalg.cond(mat) < 1e8))\n inverse = np.linalg.inv(mat)\n dec = gt.matrix.Decomposition.from_gf(mat)\n assert_allclose(dec.reconstruct(1./dec.eig, kind='full'), inverse, atol=1e-10)\n assert_allclose(dec.reconstruct(1./dec.eig, kind='diag'),\n np.diagonal(inverse, axis1=-2, axis2=-1), atol=1e-10)\n\n # symmetric\n sym_mat = mat + gt.matrix.transpose(mat)\n if sym_mat.shape[-1] > 0: # make sure matrix is diagonalizable\n assume(np.all(np.linalg.cond(sym_mat) < 1e8))\n inverse = np.linalg.inv(sym_mat)\n dec = gt.matrix.decompose_sym(sym_mat)\n assert_allclose(dec.reconstruct(1./dec.eig, kind='full'), inverse, atol=1e-10)\n assert_allclose(dec.reconstruct(1./dec.eig, kind='diag'),\n np.diagonal(inverse, axis1=-2, axis2=-1), atol=1e-10)\n\n # Hermitian\n her_mat = mat + gt.matrix.transpose(mat).conj()\n if her_mat.shape[-1] > 0: # make sure matrix is diagonalizable\n assume(np.all(np.linalg.cond(her_mat) < 1e8))\n inverse = np.linalg.inv(her_mat)\n dec = gt.matrix.Decomposition.from_hamiltonian(her_mat)\n assert_allclose(dec.reconstruct(1./dec.eig, kind='full'), inverse, atol=1e-10)\n assert_allclose(dec.reconstruct(1./dec.eig, kind='diag'),\n np.diagonal(inverse, axis1=-2, axis2=-1), atol=1e-10)", "def singular_decomp(A):\n # Initialization\n n, m = A.shape\n U = np.zeros((n, m), dtype='float64')\n\n # Diagonalization of A^T * A\n rot, e, V = eigen.diag(np.dot(np.transpose(A), A))\n\n # Calculate U\n U = np.dot(A, V)\n for i in range(m):\n e[i] = np.sqrt(e[i])\n U[:, i] /= e[i]\n\n return U, e, V", "def log_abs_det_jacobian(self, z):\n self.a = F.softplus(self.a)\n self.w = F.softmax(self.w, dim=1)\n pre_sigm = self.a * z + self.b\n sigm = torch.sigmoid(pre_sigm)\n x_pre = self.w * sigm\n if (len(z.shape) > 2):\n x_pre = torch.sum(self.w * sigm, dim=1)\n x_pre_clipped = x_pre * (1 - self.eps) + self.eps * 0.5\n logj = F.log_softmax(self.w, dim=1) + logsigmoid(pre_sigm) + logsigmoid(-pre_sigm) + torch.log(self.a)\n logj = torch.log(torch.sum(torch.exp(logj)))#,2).sum(2)\n logdet = logj + np.log(1 - self.eps) - (torch.log(x_pre_clipped) + torch.log(-x_pre_clipped + 1))\n return sum_dims(logdet)", "def decomposition_method(matrix):\n x, y, z = 0, 1, 2 # indices\n K = np.array([\n [R[x, x]-R[y, y]-R[z, z], R[y, x]+R[x, y], R[z, x]+R[x, z], R[y, z]-R[z, y]],\n [R[y, x]+R[x, y], R[y, y]-R[x, x]-R[z, z], R[z, y]+R[y, z], R[z, x]-R[x, z]],\n [R[z, x]+R[x, z], R[z, y]+R[y, z], R[z, z]-R[x, x]-R[y, y], R[x, y]-R[y, x]],\n [R[y, z]-R[z, y], R[z, x]-R[x, z], R[x, y]-R[y, x], R[x, x]+R[y, y]+R[z, z]]\n ])\n K = K / 3.0\n\n e_vals, e_vecs = np.linalg.eig(K)\n print('Eigenvalues:', e_vals)\n print('Eigenvectors:', e_vecs)\n max_index = np.argmax(e_vals)\n principal_component = e_vecs[max_index]\n return principal_component", "def test_call_decompose(self):\n dec = TwoQubitDecomposeUpToDiagonal()\n u4 = scipy.stats.unitary_group.rvs(4, random_state=47)\n dmat, circ2cx = dec(u4)\n dec_diag = dmat @ Operator(circ2cx).data\n self.assertTrue(Operator(u4) == Operator(dec_diag))", "def calc_det_dz(theta):\n return abs(calc_det_dzh(theta)) + abs(calc_det_dzb(theta))", "def _dmatrix(kn_u, kn_d):\n d = np.zeros((kn_u.size, 4, 4), np.complex128)\n d_inv = np.zeros_like(d)\n\n d[:, 0, 0] = 1\n d[:, 0, 1] = 1\n d[:, 1, 0] = kn_u\n d[:, 1, 1] = -kn_u\n\n d[:, 2, 2] = 1\n d[:, 2, 3] = 1\n d[:, 3, 2] = kn_d\n d[:, 3, 3] = -kn_d\n\n # an analytic matrix inverse saves time\n inv_kn_u = 0.5 / kn_u\n inv_kn_d = 0.5 / kn_d\n\n d_inv[:, 0, 0] = 0.5\n d_inv[:, 0, 1] = inv_kn_u\n d_inv[:, 1, 0] = 0.5\n d_inv[:, 1, 1] = -inv_kn_u\n\n d_inv[:, 2, 2] = 0.5\n d_inv[:, 2, 3] = inv_kn_d\n d_inv[:, 3, 2] = 0.5\n d_inv[:, 3, 3] = -inv_kn_d\n\n return d, d_inv", "def disagreement(self):\n return 0.5*(np.dot(np.dot(np.transpose(self.x),self.L),self.x)).item(0)", "def test_LU(self):\n A = np.random.rand(10, 10)\n MA = to_matrix(A)\n ML, MU = MA.decomposeLU()\n self.assertEqual(ML*MU, MA)\n self.assertTrue(ML.is_lower_triangular())\n self.assertTrue(MU.is_upper_triangular())", "def modalDiffMatrix(n):\n k = np.arange(n)\n a = (-1)**k\n A = sp.triu(1-np.outer(a,a))\n D = np.dot(A,np.diag(k))\n D[0,:] = D[0,:]/2\n return D", "def inverse(self):\n myMatrix = np.array(self.Matrix)\n if np.linalg.det(myMatrix) == 0:\n print(\"This matrix has a determinant of 0, meaning it has no inverse\")\n else:\n self.Inverse = np.linalg.inv(myMatrix)\n print(\"This is the inverse to your matrix: \", self.Inverse)", "def detr(self, Am, total=0):\r\n # Section 1: store indices in list for flexible row referencing\r\n indices = list(range(len(Am)))\r\n \r\n # Section 2: when at 2x2 submatrices recursive calls end\r\n if len(Am) == 2 and len(Am[0]) == 2:\r\n val = Am[0][0] * Am[1][1] - Am[1][0] * Am[0][1]\r\n return val\r\n \r\n # Section 3: define submatrix for focus column and call this function\r\n for fc in indices: # for each focus column, find the submatrix ...\r\n As = self.copy_matrix(Am) # make a copy, and ...\r\n As = As[1:] # ... remove the first row\r\n height = len(As)\r\n \r\n for i in range(height): # for each remaining row of submatrix ...\r\n As[i] = As[i][0:fc] + As[i][fc+1:] # zero focus column elements\r\n \r\n sign = (-1) ** (fc % 2) # alternate signs for submatrix multiplier\r\n sub_det = self.detr(As) # pass submatrix recursively\r\n total += sign * Am[0][fc] * sub_det # total all returns from recursion\r\n \r\n return total", "def check_non_singular(self, Am):\r\n det = self.detf(Am)\r\n if det != 0:\r\n return det\r\n else:\r\n raise ArithmeticError(\"Singular Matrix!\")", "def inverse(self):\n # find the determinant of the matrix\n determinant = self.determinant()\n # find the matrix of minors of the matrix\n matrix_of_minors = self.matrix_of_minors()\n # find the cofactor of the matrix of minors\n cofactor_matrix = self.cofactor_matrix(matrix_of_minors)\n # find the transpose of the cofactor matrix\n transpose_cofactor_matrix = self.transpose(cofactor_matrix)\n # find the adjugate (inverse) matrix\n inverse_matrix = self.adjugate_matrix(determinant, transpose_cofactor_matrix)\n\n return inverse_matrix" ]
[ "0.83127224", "0.820862", "0.81752694", "0.81515414", "0.8125667", "0.8122804", "0.8108574", "0.8040401", "0.7995659", "0.79841894", "0.79394454", "0.78940946", "0.78786963", "0.7800863", "0.77844036", "0.77780765", "0.7775752", "0.77719444", "0.7747811", "0.7714111", "0.76515764", "0.7616039", "0.7608849", "0.7571796", "0.74697924", "0.7447279", "0.7428655", "0.7410307", "0.7398362", "0.73868865", "0.73373884", "0.7257629", "0.72372955", "0.7222028", "0.71645117", "0.7127997", "0.70594627", "0.69907147", "0.6957366", "0.68874997", "0.68849736", "0.6878263", "0.6859686", "0.6859686", "0.6827958", "0.6674012", "0.66071016", "0.66053593", "0.66034055", "0.65900385", "0.6539863", "0.64894956", "0.64745843", "0.6378487", "0.634063", "0.6328731", "0.6317221", "0.62066674", "0.6124167", "0.60841495", "0.60830134", "0.598877", "0.59613883", "0.58746934", "0.5841483", "0.57599795", "0.5719658", "0.5671255", "0.5663616", "0.5647514", "0.5643762", "0.56103945", "0.5592065", "0.5575654", "0.55652404", "0.55424106", "0.5536913", "0.55114424", "0.5507731", "0.55062705", "0.54946995", "0.5494274", "0.54870105", "0.54860383", "0.54683053", "0.54369444", "0.54250056", "0.5403005", "0.54013926", "0.5396052", "0.53946614", "0.5394332", "0.5370367", "0.5358962", "0.5357049", "0.5352792", "0.5348647", "0.53211534", "0.5276462", "0.52741736" ]
0.73150444
31
Read SQL table from database and return as dataframe.
def get_db_table(table: str, index_col='id'): # Read url from secret environment variable. Set this in your CI environment. url = os.getenv('DATABASE_URL') if url is None: logging.error("Environment variable DATABASE_URL not set.") return pd.DataFrame() # Create an engine instance. engine = create_engine(url, pool_recycle=3600) # Connect to PostgreSQL server. conn = engine.connect() # Read data from PostgreSQL database table and load into a DataFrame instance. dataFrame = pd.read_sql(f"select * from \"{table}\"", conn, index_col=index_col) # Close the database connection. conn.close() return dataFrame
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_data_from_database(query, db_connection):\n\n dataframe = pandas.read_sql(query, con=db_connection)\n print(\"Data from database: \", dataframe.head(5))\n print(\"Size of dataframe from database: \", dataframe.shape)\n\n return dataframe", "def open_data(table):\n engine = create_engine(myDB, encoding='latin1') \n conn = engine.connect()\n select = conn.execute('select * from ' + table)\n\n df = pd.DataFrame(select.fetchall()) \n df.columns = select.keys()\n\n conn.close()\n return df", "def read_table(self, db, table_name):\n engine = self.connect_to_database(db=db)\n df = pd.read_sql_table(table_name=table_name, con=engine)\n engine.connect().connection.close()\n return df", "def fetch_db_dataframe(self, table_name):\n try:\n df = pd.read_sql(\"SELECT * from \" + table_name, self.engine)\n print(\"-I- Completed read of DataFrame from \" + table_name)\n return df\n except Exception as e:\n print(\"-W- \" + str(e))", "def sqlite_to_df(db_name: str, table_name: str):\n df = pd.read_sql(\"SELECT * FROM \" + table_name, __engine)\n # also see read_sql_query() and read_sql_table()\n\n return df", "def get_sql_table_as_df(conn, table, columns=['*'], db=IEDC_pass.IEDC_database,\n index='id', addSQL=''):\n # Don't show this to anybody, please. SQL injections are a big nono...\n # https://www.w3schools.com/sql/sql_injection.asp\n columns = ', '.join(c for c in columns if c not in \"'[]\")\n df = pd.read_sql(\"SELECT %s FROM %s.%s %s;\" % (columns, db, table, addSQL),\n conn, index_col=index)\n return df", "def read_sql_query(sql: str, con: RdsDataApi, database: Optional[str] = None) -> pd.DataFrame:\n return con.execute(sql, database=database)", "def load_table_as_pd(conn, tablename: str):\n # get table as a pandas dataframe\n statement = f\"\"\"\n SELECT *\n FROM '{tablename}';\n \"\"\"\n df = pd.read_sql_query(statement, conn)\n return df", "def read(name, db):\n \n # Make connection with the database\n\tconn = sqlite3.connect(db)\n\tdf = pd.read_sql_query(\"select * from \" + name + ';', conn)\n \n # Print loaded data table name and return DataFrame\n\tprint(name + ': loaded')\n\treturn df", "def query(self, sql):\n df = pd.read_sql(sql, self.conn)\n return df", "def get_df_from_db(self, query):\n cursor = self.conn.cursor()\n cursor.execute(\"set hive.execution.engine = tez\")\n cursor.execute(\"set tez.queue.name = sephora_internal\")\n cursor.execute(query)\n data = cursor.fetchall()\n col_des = cursor.description\n col_des = [tuple([x[0].split('.')[1] if '.' in x[0] else x[0]] + list(x[1:])) for x in col_des]\n col_name = [col_des[i][0] for i in range(len(col_des))]\n df = pd.DataFrame([list(i) for i in data], columns=col_name)\n return df", "def query_to_df(db, sql):\n conn_string = return_connection(db)\n with pg2.connect(conn_string) as conn:\n return psql.read_sql(sql, conn)", "def query(statement, con=None, params=None):\n if con is None:\n con = get_connection()\n table = pd.io.sql.read_sql(statement, con, params=params)\n return table", "def get_dataframe(q):\n cnx = create_engine(postgres_str)\n query = q\n return pd.read_sql_query(query, cnx)", "def read_sql(self, sql_query: str, **kwargs) -> pd.DataFrame:\n connection_infos = {\n param: getattr(self, param) for param in [\"host\", \"port\", \"dbname\", \"user\"]\n }\n connection_infos[\"password\"] = pgpasslib.getpass(**connection_infos)\n connection = pg.connect(**connection_infos)\n if self.schema:\n connection.cursor().execute(f\"SET SCHEMA '{self.schema}'\")\n\n df = pd.read_sql(sql_query, con=connection, **kwargs)\n\n connection.close()\n return df", "def get_db_data(self, sql_string):\n connection_string = f\"\"\"\n host='{self.host}' \n dbname='{self.db_name}' \n user='{self.user}' \n password='{self.password}' \n port='{self.port}'\n \"\"\"\n\n with psycopg2.connect(connection_string) as connection:\n cursor = connection.cursor()\n cursor.execute(sql_string)\n\n dataframe = pd.DataFrame(cursor.fetchall())\n dataframe.columns = [desc[0] for desc in cursor.description]\n\n return dataframe", "def db_to_df(query):\n conn = loader.database._connection\n return sql.read_frame(query, conn)", "def read_table(self, table_name):\n return pd.read_sql_table(table_name=table_name, con=self.engine)", "def get_df_from_db(self, query):\n cursor = self.conn.cursor()\n cursor.execute(query)\n data = cursor.fetchall()\n col_des = cursor.description\n col_des = [tuple([x[0].split('.')[1] if '.' in x[0] else x[0]] + list(x[1:])) for x in col_des]\n col_name = [col_des[i][0] for i in range(len(col_des))]\n ret_df = pd.DataFrame([list(i) for i in data], columns=col_name)\n return ret_df", "def read_sql(qu, conn):\n df = pd.read_sql(qu, conn)\n df.columns = [col.lower() for col in df.columns] \n \n return df", "def load_table(conn, table_name):\n return pd.read_sql_query(\"SELECT * FROM \" + table_name, conn)", "def get_data(self):\n\n return pd.read_sql_query(\"Select * from {table}\".format(table=self.table_name), con=self.con)", "def readDBtable(self, tablename, limit=None, selectOptions=None,\n filterOptions=None, orderOptions=None):\n\n try:\n\n sqlQuery = 'SELECT '\n if selectOptions:\n sqlQuery = sqlQuery + selectOptions\n else:\n sqlQuery = sqlQuery + '*'\n\n sqlQuery = sqlQuery + ' FROM ' + tablename + ' '\n\n if filterOptions:\n sqlQuery = sqlQuery + ' WHERE ' + filterOptions\n\n if orderOptions:\n sqlQuery = sqlQuery + ' ORDER BY ' + orderOptions\n\n if limit:\n sqlQuery = sqlQuery + ' LIMIT ' + str(limit)\n\n # This is to update the connection to changes by other\n # processes.\n self._conn.commit()\n\n # Return the pandas dataframe. Note that numbers in text format\n # are not converted to\n return pd.read_sql(sqlQuery, con=self._conn,\n coerce_float=False)\n\n except Exception as E:\n print(str(E))\n print('Error in query:', sqlQuery)\n return", "def table_to_df(db_name, table_name):\n return sqlContext.table(\"{0}.{1}\".format(db_name, table_name))", "def load_data(database_filepath, table_name):\r\n # instance to the database engine\r\n engine = create_engine('sqlite:///{}'.format(database_filepath))\r\n\r\n # read form the database table\r\n df = pd.read_sql_table(table_name, con=engine)\r\n\r\n return df # return our df\r", "def acquire_only(db,query):\n url = get_connection(db)\n df = pd.read_sql(query, url)\n return df", "def load_renter_data():\n return pd.read_sql_query(_sql_query, _con)", "def df_from_table(query, carto_sql_client, index=None):\n resp = carto_sql_client.send(query)\n schema = transform_schema(resp['fields'])\n if index:\n return pd.DataFrame(resp['rows']).set_index('cartodb_id').astype(schema)\n else:\n return pd.DataFrame(resp['rows']).astype(schema)", "def load_sql(df):\n engine = create_engine(f'postgres://{user}:{user}@{host}:{port}/{db}')\n df = df.to_sql(table, engine, if_exists='append')\n logging.debug(str(df)) # logging\n # return df\n #print(df.shape)", "def sql_query_fetch_df(self,sql,primary_key = None):\n\t\tif not self.connected:\n\t\t\tprint ('db not connected yet. Do connect first')\n\t\t\treturn\n\t\tresults = pd.read_sql_query(sql,self.__engine,index_col = primary_key)\n\t\treturn results", "def _get_df_from_db(self, tab_name: str, cols: list or str = \"*\",\n condition: str or None = None, limit: int or None = None):\n cols = ', '.join(cols) if cols != '*' else cols\n sql_query = \"\"\"SELECT {cols} FROM {tab} \"\"\".format(cols=cols, tab=tab_name)\n if condition:\n sql_query += \"\"\"WHERE {cond} \"\"\".format(cond=condition)\n if limit:\n sql_query += \"\"\"LIMIT {l}\"\"\".format(l=limit)\n df = pd.read_sql(sql_query, self.engine)\n return df", "def load_table(self, db_name, table_name, **kwargs):\n\n # Create Connection\n engine, connection = self.create_connection(db_name)\n\n # Check if table exists and read\n if engine.dialect.has_table(engine, table_name):\n sql = 'SELECT * FROM %s' % table_name\n\n # Prevent duplicate keys\n kwargs.pop(\"sql\", None)\n kwargs.pop(\"con\", None)\n kwargs.pop(\"coerce_float\", None)\n\n result = pd.read_sql(sql=sql, con=connection, coerce_float=True, **kwargs)\n else:\n print(table_name, \"does not exist\")\n result = None\n\n # Close connection\n connection.close()\n\n return result", "def create_dataframe(connection: sqlite3.Connection) -> pd.DataFrame:\n dataframe = pd.read_sql_query(f\"\"\"\n SELECT\n combined_jobs.id, combined_jobs.company, combined_jobs.link, combined_jobs.location,\n combined_jobs.date, combined_jobs.content, combined_jobs.title, location_cache.location,\n location_cache.latitude, location_cache.longitude\n FROM\n combined_jobs\n LEFT OUTER JOIN\n location_cache on (combined_jobs.location = location_cache.location)\"\"\",\n connection)\n print(dataframe)\n return dataframe", "def GetDataFromSqlToDF(con: Union[Dict[str, str], pyodbc.Connection], sql_string: str, win_auth: bool=False) -> pd.DataFrame:\r\n if type(con) == dict:\r\n con = StartConnection(con['driver'], con['server'], con['db'], con['user'], con['pw'], win_auth)\r\n try:\r\n df = pd.read_sql_query(sql_string, con)\r\n col_name_list = list(df.columns.get_values())\r\n \r\n print(\"Finish Execution\")\r\n print(\"{} records are found.\".format(len(df)))\r\n msg = \"Fields of data: \\n\"\r\n for field in enumerate(col_name_list):\r\n msg = msg + str(field[0]) + \": \" + str(field[1]) + \"\\n\"\r\n print(msg, \"\\n\")\r\n\r\n con.close()\r\n return df\r\n\r\n except pyodbc.ProgrammingError:\r\n data, col = ExecQuery(con, sql_string, True)\r\n df = FetchQueryResultToDF(data, col)\r\n con.close()\r\n return df", "def OSW2df(osw_file, table_name):\n conn = connOSW(osw_file)\n df = pd.read_sql_query(\"SELECT * FROM \" + table_name, conn)\n conn.close()\n return df", "def sql_return_df(query, params, date_cols):\n conn = sqlite3.connect(db_filepath)\n df = pd.read_sql(query, conn, params=params, parse_dates=date_cols)\n conn.close()\n return df", "def sql_execute(sql_query, create_con_obj=None, n_row=0):\r\n\r\n if create_con_obj is None:\r\n create_con_obj = create_connection()\r\n print (sql_query)\r\n df = pd.read_sql(sql_query, create_con_obj)\r\n print (df.head(2))\r\n\r\n return df", "def read_as_pandas_dataframe(self, sql_query, params=None):\n return pandas.read_sql_query(sql_query, self._conn, params=params)", "def get_df_mysql(tablename, columns=\"*\", engine=None):\n\n if engine is None:\n engine = _get_mysql_engine()\n\n query = \"SELECT {} FROM {}\".format(columns, tablename)\n\n with engine.connect() as connection:\n return pd.read_sql_query(query, connection)", "def fetch_dataset(query: str):\n\n credentials = _parse_credentials(query)\n prepared_query = _prepare_query(query)\n\n connection = connect(**credentials)\n df = pd.io.sql.read_sql_query(prepared_query, connection)\n connection.close()\n\n return df", "def select_from_table(self, table_name):\n sql_str = \"SELECT * FROM {tb}\".format(tb=table_name)\n cur = self.conn.cursor()\n cur.execute(sql_str)\n names = [description[0] for description in cur.description]\n\n rows = cur.fetchall()\n\n df = pd.DataFrame(rows, columns =names) \n\n return df", "def get_training_data(db_conn):\n return pd.read_sql('''select * from churn_model.churn_data;''', db_conn)", "def sql(q, database_url):\r\n output, cur_description = Q(q, database_url, out=True, description=True)\r\n # print(cur_description)\r\n cols = [i[0] for i in cur_description]\r\n return pd.DataFrame(output, columns=cols)", "def as_named_DataFrame (self):\n if self.sql == \"\":\n return DataFrame([])\n flag, values = self.parse_sql()\n try:\n if flag == 'EXPLICIT':\n return DataFrame(self.table, columns = values)\n elif flag == 'IMPLICIT':\n schema = \"'\" + values[0] + \"'\"\n table = \"'\" + values[1] + \"'\"\n return DataFrame(self.table,columns=self.get_headers(table,schema))\n else:\n return self.as_DataFrame()\n except AssertionError:\n return self.as_DataFrame()", "def read_sql_query(sql, engine, index_col=None, coerce_float=True, params=None,\n parse_dates=None, chunksize=None):\n pandas_sql = SQLDatabase(engine, schema=None, meta=None)\n \n return pandas_sql.read_query(sql, index_col=index_col, params=params, \n coerce_float=coerce_float, parse_dates=parse_dates, \n chunksize=chunksize)", "def read_sql_table(table_name, engine, schema=None, meta=None, index_col=None,\n coerce_float=True, parse_dates=None, columns=None,\n chunksize=None):\n if meta is None:\n meta = MetaData(engine, schema=schema)\n \n try:\n meta.reflect(only=[table_name])\n \n except exc.InvalidRequestError:\n raise ValueError(\"Table %s not found\" % table_name)\n\n pandas_sql = SQLDatabase(engine, meta=meta)\n \n table = pandas_sql.read_table(table_name, index_col=index_col, coerce_float=coerce_float,\n parse_dates=parse_dates, columns=columns, chunksize=chunksize)\n\n if table is not None:\n return table\n else:\n raise ValueError(\"Table %s not found\" % table_name, con)", "def loadValueTableFromSqlite(): \n conn = sqlite3.connect(prefix + args.db)\n df = io.read_frame(\"SELECT * FROM value\", conn) \n return df", "def postgresql_to_dataframe(conn, select_query, column_names):\n cursor = conn.cursor()\n try:\n cursor.execute(select_query)\n except (Exception, psycopg2.DatabaseError) as error:\n print(\"Error: %s\" % error)\n cursor.close()\n return 1\n \n # Naturally we get a list of tupples\n tupples = cursor.fetchall()\n cursor.close()\n \n # We just need to turn it into a pandas dataframe\n df = pd.DataFrame(tupples, columns=column_names)\n return df", "def load_data_sql(): \r\n conn = mysql.connect(**st.secrets[\"mysql\"])\r\n\r\n data = pd.read_sql('SELECT * FROM song_data', conn)\r\n lookup_table = pd.read_sql('SELECT * FROM lookup_table', conn)\r\n \r\n return data, lookup_table", "def lazy_fetch_rds_mysql(engine, query, params={}):\n try:\n engineCon = engine.connect()\n df = pd.read_sql_query(query, engineCon, params=params)\n finally:\n engineCon.close()\n return df", "def query_table(cursor, t_name, query, cnx):\n try:\n df=pd.read_sql(query, cnx)\n print(df)\n return df\n except mysql.connector.Error as err:\n if err.errno == 1051:\n print(f\"Cant read '{t_name}', table D.N.E, moving on...\")\n pass\n else:\n print(err.msg + \".\")\n exit(1)\n return None", "def get_dataframe(self, params=None, chunksize=None):\n if chunksize:\n raise NotImplementedError(\"Buffered reading not supported yet\")\n # the resulting `rows` of a query provides a nice way to do this, though\n\n query = self.config[\"query\"]\n params = params or {}\n\n logger.debug(\n \"Fetching query {} with params {}...\".format(\n query, params\n )\n )\n rows = self.db.query(query, fetchall=True, **params)\n df = rows.export(\"df\")\n\n return df", "def df_from_query(query, carto_sql_client, is_org_user, username,\n tablename=None, debug=False):\n if tablename:\n create_table = '''\n CREATE TABLE {tablename} As\n SELECT *\n FROM ({query}) As _wrap;\n SELECT CDB_CartodbfyTable('{org}', '{tablename}');\n '''.format(tablename=tablename,\n query=query,\n org=username if is_org_user else 'public')\n if debug: print(\"Creating table: {}\".format(create_table))\n resp = carto_sql_client.send(create_table)\n if debug: print(resp)\n new_tablename = resp['rows'][0]['cdb_cartodbfytable']\n table_resp = carto_sql_client.send(\n 'SELECT * FROM {tablename}'.format(tablename=new_tablename))\n if debug: print(table_resp)\n schema = transform_schema(table_resp['fields'])\n if table_resp['total_rows'] > 0:\n return pd.DataFrame(table_resp['rows']).set_index('cartodb_id').astype(schema)\n else:\n return pd.DataFrame(data=table_resp['rows'],\n columns=[k for k in table_resp['fields']],\n index=[]).astype(schema)\n else:\n resp = carto_sql_client.send(query)\n schema = transform_schema(resp['fields'])\n return pd.DataFrame(resp['rows']).astype(schema)\n\n return None", "def table_save_data_frame(self, table_name):\n self.recordset_df = pd.read_sql_table(table_name, self.con)\n return self", "def get_data(self, table_name, condition=None):\n\t\tif (self.__dbfile is not None) and self.table_exists(table_name):\n\t\t\tquery = \"SELECT * FROM %s\" % table_name\n\t\t\tif condition is not None:\n\t\t\t\tquery = query + \" WHERE %s\" % condition\n\t\t\tquery = query + \";\"\n\t\t\tdf = pd.read_sql_query(query, self._conn)\n\n\t\t\t# Strange columns appear. Get only the actual columns\n\t\t\treturn df[[col for col in LocalData.table_info[table_name] if col in df]]\n\t\treturn pd.DataFrame(columns=LocalData.table_info[table_name])", "def read_from_mysql_spark(self, dbtable):\n spark = self.spark\n df = spark.read.format(\"jdbc\")\\\n .option(\"url\", self.url)\\\n .option(\"driver\", self.driver)\\\n .option(\"dbtable\", dbtable)\\\n .option(\"user\",self.user)\\\n .option(\"password\",self.password)\\\n .load()\n return df", "def load_table_currency(conn, currency: str):\n # if there is no such table, generate new one\n if not check_table_exist(conn, f\"Rates{currency}\"):\n gen_table_for_currency(conn, currency)\n # get table as a pandas dataframe\n statement = f\"\"\"\n SELECT *\n FROM 'Rates{currency}';\n \"\"\"\n df = pd.read_sql_query(statement, conn)\n # format as Datetime\n df.Date = pd.to_datetime(df.Date)\n return df", "def get_dataframe(execution_ts: pendulum.DateTime, **pandas_read_args: Dict) -> Iterator[pd.DataFrame]:\n connection = _create_mysql_connection(mysql_conn_id=pandas_read_args.pop(\"mysql_conn_id\"))\n query = pandas_read_args.pop(\"sql\").format(\n today=execution_ts + timedelta(minutes=15), yesterday=execution_ts - timedelta(days=1)\n )\n df_batches = pd.read_sql(**pandas_read_args, sql=query, con=connection)\n return df_batches", "def get_query_result_to_df(self, query):\r\n try:\r\n return pd.read_sql_query(query, self.conn)\r\n except pd.pandas.io.sql.DatabaseError:\r\n print('Execution failed. Database error')", "def read_table(self, table_name, index_col=None, coerce_float=True, parse_dates=None, \n columns=None, schema=None, chunksize=None):\n table = SQLTable(table_name, self, index=index_col, schema=schema)\n return table.read(coerce_float=coerce_float, parse_dates=parse_dates, columns=columns, chunksize=chunksize)", "def get_dfbytable(self,\n db_name='HNTS_REVENUE_CYCLE',\n schema_name='RAW',\n table_name='SCHEDULEREPORTS_VISITSBYSTATUS'):\n if table_name.count('.') == 2:\n sqltext = f\"\"\"SELECT * FROM {table_name}\"\"\"\n elif table_name.count('.') == 1:\n sqltext = f\"\"\"SELECT * FROM {db_name}.{table_name}\"\"\"\n else:\n sqltext = f\"\"\"SELECT * FROM {db_name}.{schema_name}.{table_name}\"\"\"\n if self.connector_type == 'snowflake_sqlachemy':\n df = self.fetch_pandas_sqlalchemy(sqltext)\n else:\n df = pd.read_sql(sqltext,self.conn)\n df = df.astype(str)\n return df", "def get_df(self) -> pd.DataFrame:\n return pd.DataFrame(self.fetchall(), columns=self.headers())", "def run_query(query):\n db.query(query)\n dbResult = db.store_result()\n dbFetched = dbResult.fetch_row(maxrows = 0, how = 2)\n df = pd.DataFrame.from_records(dbFetched)\n return df", "def query(self, code):\n df = pd.read_sql(code, self.conn)\n if 'uuid' in df:\n df = df.set_index('uuid')\n return df", "def generic_pull_data(table_name: str = None, my_conn: Optional[dict] = None,\n t_log: Optional[TimeLogger] = None, verbose: bool = None) -> DataFrame:\n\n if verbose is True and t_log is None:\n t_log = TimeLogger()\n\n my_conn = my_connect(my_conn=my_conn, t_log=t_log, verbose=verbose)\n conn = my_conn['conn']\n\n table_id_name: str = table_name + '_id'\n\n query = SQL('SELECT * FROM {table}').format(table=Identifier(table_name))\n\n if verbose is True:\n t_log.new_event('Loading Equation Group Data')\n\n data_df = read_sql(query, con=conn, index_col=table_id_name)\n\n if verbose is True:\n t_log.new_event('Finished Equation Group Data')\n\n return data_df", "def priorities_to_dataframe(conn):\n return connect_database.get_table_into_pandas('priority_table',conn)", "def cleandata():\n engine = create_engine('sqlite:///../data/disaster_db.db')\n df = pd.read_sql_table('disaster_db', engine)\n\n return df", "def read(tablename: str()):\n return pd.read_csv(tablename, dtype={'source_id': str})", "def query_into_pandas(self, query, fields=None, parameters=None, names=None):\n target_url = self.build_query(query, fields=fields, parameters=parameters)\n\n col_id = 'columns'\n col_names = None\n if names is None:\n # If the columns of the query are specified (used for 'tab' or 'txt' value of\n # parameters['format'] only), then we use the same for the DataFrame\n if col_id in parameters:\n col_names = parameters[col_id].split(',')\n else:\n col_names = names\n\n db = pd.read_csv(\n target_url,\n delimiter=\"\\t\",\n skiprows=1,\n header=None,\n names=col_names\n )\n return db", "def get_alter_dataframe(database_connection: DatabaseHandler) -> pandas.DataFrame:\n # clearing for bad insertion\n database_connection.clear_table('''DELETE FROM ''' + table_name)\n insert_bad_data()\n return get_dataframe()", "def get_database_data(file_name=''):\n if not os.path.exists(file_name):\n raise IOError(\"File {} does not exist!\".format(file_name))\n df = pd.read_csv(file_name, header=1)\n return df", "def read_sql_from_file(path, conn):\n with open(path, 'r', encoding='utf-8') as f:\n qu = f.read()\n \n df = read_sql(qu, conn)\n \n return df", "def df2sql(df, table_name, database_url):\r\n conn = sqlite3.connect(database_url)\r\n df.to_sql(table_name, conn, if_exists='replace', index = False)\r\n conn.commit()", "def get_dataframe(self):\n self.logger.info('Fetching movie records...')\n session = connect()\n\n cols = [\n Movie.movie_id,\n Movie.title,\n Movie.start_year,\n Movie.genres,\n Movie.description,\n Movie.kind,\n ]\n\n filters = [\n Movie.description.isnot(None),\n Movie.genres.isnot(None),\n ]\n\n query = session.query(*cols).filter(*filters).order_by(Movie.start_year.desc())\n\n try:\n return pd.read_sql(query.statement, session.bind)\n finally:\n session.close()", "def to_df(self):\n # check read only\n if self.__read_only:\n raise IOError(\"Table is for read only.\")\n\n # convert data to dicts\n data = dict(record.to_id_dict()\n for record in self.__data.values())\n\n # make data frame\n df = pd.DataFrame(data).T\n df.index.name = \"_id\"\n return df", "def load_data_into_pandas(db, sufficiently_old):\n engine = create_engine(db)\n cols = ['eudract_number',\n 'eudract_number_with_country',\n 'date_of_the_global_end_of_the_trial',\n 'trial_is_part_of_a_paediatric_investigation_plan',\n 'end_of_trial_status',\n 'trial_status',\n 'trial_human_pharmacology_phase_i',\n 'trial_therapeutic_exploratory_phase_ii',\n 'trial_therapeutic_confirmatory_phase_iii',\n 'trial_therapeutic_use_phase_iv',\n 'trial_bioequivalence_study',\n 'subject_healthy_volunteers',\n 'trial_condition_being_studied_is_a_rare_disease',\n 'trial_single_blind',\n 'full_title_of_the_trial',\n 'name_or_abbreviated_title_of_the_trial_where_available',\n 'trial_results',\n 'sponsors' ]\n return pd.read_sql_query(\"SELECT {} FROM public.euctr WHERE meta_updated > '{}'\".format(','.join(cols), sufficiently_old), con=engine)", "def get_df_from_db(localhost, username, password, dbname, tbname, fields=None, chunksize=None, time_field=None, start_time=None, end_time=None):\n # con = pymysql.connect(host=localhost, user=username, password=password, database=dbname, charset='utf8', use_unicode=True)\n connect_string = \"mysql+pymysql://{}:{}@{}/{}?charset=utf8\".format(username, password, localhost, dbname)\n con = create_engine(connect_string, encoding='utf-8')\n time_cond = \"\"\n if time_field:\n if not end_time:\n time_cond = \" WHERE \" + time_field + \" <= NOW()\"\n else:\n # data in end time day should be retrieved but default yyyy-mm-dd is the begining of that day\n end_time_nextday = (datetime.datetime.strptime(end_time, \"%Y-%m-%d\") + datetime.timedelta(days=1)).strftime(\"%Y-%m-%d\")\n time_cond = \" WHERE \" + time_field + \" <= '\" + end_time_nextday + \"'\"\n if start_time:\n time_cond += \" AND \" + time_field + \" >= '\" + start_time + \"'\"\n if isinstance(tbname, unicode):\n tbname = str(tbname)\n if isinstance(tbname, str):\n if isinstance(fields, unicode):\n fields = str(fields)\n if isinstance(fields, str):\n fields = [fields]\n if fields == None:\n fields = [\"*\"]\n sql_cmd = \"SELECT \" + \",\".join(fields) + \" FROM \" + tbname + time_cond\n if chunksize:\n for cur_df in pd.read_sql(sql_cmd, con, chunksize=chunksize):\n yield cur_df\n else:\n cur_df = pd.read_sql(sql_cmd, con)\n yield cur_df\n elif isinstance(tbname, list):\n if isinstance(fields, unicode):\n fields = str(fields)\n if isinstance(fields, str):\n fields = [fields]\n if fields == None:\n fields = [\"*\"]\n for cur_tb in tbname:\n sql_cmd = \"SELECT \" + \",\".join(fields) + \" FROM \" + cur_tb + time_cond\n if chunksize:\n for cur_df in pd.read_sql(sql_cmd, con, chunksize=chunksize):\n yield cur_df\n else:\n cur_df = pd.read_sql(sql_cmd, con)\n yield cur_df\n else:\n logging.error(\"Argument tbname only accept a string or a list of string! But input type is %s\" %(type(tbname)))\n exit(-1)", "def targets_to_dataframe(conn):\n return connect_database.get_table_into_pandas('target_info',conn)", "def withOutPandas()-> None:\n logging.info(f\"Making sure the DB is set up {getTime()}\" )\n\n with getCon() as conn:\n with getCursor(conn,True) as cur:\n cur.execute(\"CREATE TABLE IF NOT EXISTS data (iso_code TEXT,continent TEXT,location TEXT,date DATE,total_cases FLOAT,new_cases FLOAT,new_cases_smoothed FLOAT,total_deaths FLOAT,new_deaths FLOAT,new_deaths_smoothed FLOAT,total_cases_per_million FLOAT,new_cases_per_million FLOAT,new_cases_smoothed_per_million FLOAT,total_deaths_per_million FLOAT,new_deaths_per_million FLOAT,new_deaths_smoothed_per_million FLOAT,reproduction_rate FLOAT,icu_patients FLOAT,icu_patients_per_million FLOAT,hosp_patients FLOAT,hosp_patients_per_million FLOAT,weekly_icu_admissions FLOAT,weekly_icu_admissions_per_million FLOAT,weekly_hosp_admissions FLOAT,weekly_hosp_admissions_per_million FLOAT,new_tests FLOAT,total_tests FLOAT,total_tests_per_thousand FLOAT,new_tests_per_thousand FLOAT,new_tests_smoothed FLOAT,new_tests_smoothed_per_thousand FLOAT,positive_rate FLOAT,tests_per_case FLOAT,tests_units TEXT,total_vaccinations FLOAT,people_vaccinated FLOAT,people_fully_vaccinated FLOAT,total_boosters FLOAT,new_vaccinations FLOAT,new_vaccinations_smoothed FLOAT,total_vaccinations_per_hundred FLOAT,people_vaccinated_per_hundred FLOAT,people_fully_vaccinated_per_hundred FLOAT,total_boosters_per_hundred FLOAT,new_vaccinations_smoothed_per_million FLOAT,stringency_index FLOAT,population FLOAT,population_density FLOAT,median_age FLOAT,aged_65_older FLOAT,aged_70_older FLOAT,gdp_per_capita FLOAT,extreme_poverty FLOAT,cardiovasc_death_rate FLOAT,diabetes_prevalence FLOAT,female_smokers FLOAT,male_smokers FLOAT,handwashing_facilities FLOAT,hospital_beds_per_thousand FLOAT,life_expectancy FLOAT,human_development_index FLOAT,excess_mortality_cumulative_absolute FLOAT,excess_mortality_cumulative FLOAT,excess_mortality FLOAT,excess_mortality_cumulative_per_million FLOAT)\")\n cur.execute(\"TRUNCATE data\")\n \n with open(DATA_FILE) as f:\n data = list(csv.reader(f))\n logging.info(f\"Slicing {getTime()}\")\n\n SLICE_SIZE = len(data) // 100\n rows = [data[i:i + SLICE_SIZE] for i in range(1, len(data), SLICE_SIZE)]\n logging.info(f\"Finished slicing {getTime()}\")\n logging.info(f\"Inserting {getTime()}\")\n\n with Pool(2) as p:\n p.map(insert,rows)\n logging.info(f\"Finished Inserting {getTime()}\")\n \n logging.info(f\"Gettign Uniqe Contries {getTime()}\")\n with getCon() as conn:\n with getCursor(conn) as cur:\n cur.execute(\"SELECT DISTINCT location FROM data\")\n result =cur.fetchall()\n with open(RESULT_FILE,\"w\", newline='') as r:\n writer = csv.DictWriter(r,fieldnames=[\"Uniqe Countries\"])\n writer.writeheader()\n writer.writerow({\"Uniqe Countries\":len(result)})", "def cursor_to_dataframe(cur):\n description = cur.description\n column_names = [item.name for item in description]\n data = cur.fetchall()\n df = pandas.DataFrame(data, columns=column_names)\n cur.close()\n return df", "def _fetch_from_ms_sql_server(database, table, columns=None, condition=None):\n try:\n params = f'DRIVER=SQL Server;SERVER=SPAC2SVR;PORT=1433;DATABASE={database}'\n conn = pyodbc.connect(params)\n\n columns = columns if columns is not None else '*'\n sql = f'SELECT {columns} FROM {database}.dbo.{table}'\n sql += f' WHERE {condition}' if condition is not None else ''\n \n df = pd.read_sql(sql, conn)\n conn.close()\n\n if df.empty:\n raise ValueError('ERROR: No records found in the MS SQL Server - Data may be up to date!')\n else:\n df.columns = [i.lower() for i in df.columns.tolist()]\n return df\n except pyodbc.Error as e:\n if e.args[0] == '42000':\n print(f\"ERROR: Cannot access the SQL Server database: {database}!\")\n except:\n raise", "def read_data(db_name, query_file):\r\n con = sqlite3.connect(db_name)\r\n cursor = con.cursor()\r\n\r\n sql = open(query_file,'r')\r\n query = sql.read()\r\n sql.close()\r\n\r\n data = pd.read_sql_query(query, con=con)\r\n data.drop_duplicates(subset=['Title'], inplace=True)\r\n data = data[data['Type']=='movie']\r\n data.set_index('imdbID', inplace=True)\r\n\r\n con.commit()\r\n con.close()\r\n\r\n return data", "def dataFrame(self):\n\n memory_file = StringIO(initial_value=self.sparql_result.decode('utf-8'), newline='\\n')\n reader = DictReader(memory_file)\n\n schema = StructType(\n list(map(lambda f: StructField(f, StringType()), reader.fieldnames))\n )\n\n data = list(map(lambda d: [d[f] for f in reader.fieldnames], list(reader)))\n\n return self.spark.createDataFrame(data, schema)", "def as_DataFrame (self):\n return DataFrame(self.table)", "def get_dataframe() -> pandas.DataFrame:\n database_connection = processing.establish_connection(database_path)\n dataframe = database_connection.to_dataframe(['CustomerId', 'InvoiceDate', 'Total'], table_name)\n database_connection.close()\n dataframe = processing.get_invoice_date_fixed(dataframe)\n analyze_dataframe = dataframe.copy()\n total_sum_dataframe = processing.get_column_sum(analyze_dataframe)\n\n customer_count_dataframe = processing.drop_duplicates(analyze_dataframe)\n customer_count_dataframe = processing.get_column_count(customer_count_dataframe)\n return customer_count_dataframe.merge(total_sum_dataframe, how='inner', on='InvoiceDate')", "def load_db(db_path):\n return pd.read_csv(db_path)", "def get_execute_table(self, limit=None):\n query = self.select_all()\n self.cur.execute(query)\n if limit is None:\n result = self.cur.fetchall()\n else:\n result = self.cur.fetchmany(limit)\n return to_data_frame(result)", "def get_execute_table(self, limit=None):\n query = self.select_all()\n self.cur.execute(query)\n if limit is None:\n result = self.cur.fetchall()\n else:\n result = self.cur.fetchmany(limit)\n return to_data_frame(result)", "def get_client_ltv_table(db_conn):\n query = '''\n select client_id, ltv\n from churn_model.client_ltv;\n '''\n df = pd.read_sql(query, db_conn)\n return df", "def load(file):\n return pq.read_table(file).to_pandas()", "def Get_Data_From_Snowflake(self, query, d_params=None):\n try:\n ctx = self.__Connect_To_Snowflake(d_params)\n df = pd.read_sql_query(query, ctx)\n ctx.close()\n print(\"Data Fetch Completed\")\n return df\n except:\n print(f\"Data Fetching failed for the query: {query}\")\n ctx.close()\n return None", "def load_luigi_stats(db_path, table):\n engine = create_engine('sqlite:///' + db_path)\n return pd.read_sql_table(table, engine)", "def _from_sql(connection, table_or_query):\n cursor = connection.cursor()\n try:\n cursor.execute(table_or_query)\n except Exception:\n try:\n cursor.execute('SELECT * FROM {0}'.format(table_or_query))\n except Exception:\n cursor.close()\n raise\n\n try:\n # If iterable, use cursor directly.\n iter(cursor)\n results = cursor\n except TypeError:\n # If not iterable, build a generator.\n def result_generator(cursor):\n row = cursor.fetchone()\n while row != None:\n yield row\n row = cursor.fetchone()\n results = result_generator(cursor)\n\n header = tuple(x[0] for x in cursor.description)\n reader = chain([header], results)\n return (reader, cursor.close)", "def queryToPDTable(postgreSql_selectQuery):\n\n import os\n import psycopg2\n import pandas as pd\n\n #basic query function to the database using environmental variables for\n #the user name and password\n conn=psycopg2.connect(host=\"postgis1\",\n dbname=\"sdad\",\n user=os.environ.get('UVA_uname'),\n password=os.environ.get('UVA_pass'))\n\n #convert it to a pandas dataframe\n dataOut=pd.read_sql_query(postgreSql_selectQuery,conn)\n\n return dataOut", "def Mydata():\n\n stmt = db.session.query(Appsdata).statement\n df = pd.read_sql_query(stmt, db.session.bind)\n \n return jsonify(df.to_dict())", "def df2db(self, df: pd.DataFrame, tab_name):\n\n self.execute(\"set hive.execution.engine = tez\")\n self.execute(\"set tez.queue.name = sephora_internal\")\n self.execute(\"drop table if exists {table_name}\".format(table_name=tab_name))\n df.to_sql(tab_name, self.engine, method='multi', index=False)", "def query(self, query, **params):\n chunksize = params.pop(\"chunksize\", 100000)\n to_pandas = params.pop(\"to_pandas\", True)\n with self._cursor() as cursor:\n params = {k: v for k, v in params.items() if k in getargs(cursor.execute).args}\n cursor.execute(query, **params)\n fields = [i[0] for i in cursor.description]\n res = []\n while True:\n result = cursor.fetchmany(chunksize)\n if not result:\n break\n res.append(Frame(result))\n frame = rbind(res, bynames=False)\n if frame.shape == (0, 0):\n frame = Frame({n: [] for n in fields})\n else:\n frame.names = fields\n if to_pandas:\n frame = frame.to_pandas()\n return frame", "def query_save_data_frame(self, query):\n self.recordset_df = pd.read_sql_query(query, self.con)\n return self", "def read_query(self, sql, index_col=None, coerce_float=True, parse_dates=None, \n params=None, chunksize=None):\n args = _convert_params(sql, params)\n\n result = self.execute(*args)\n columns = result.keys()\n\n if chunksize is not None:\n return self._query_iterator(result, chunksize, columns,\n index_col=index_col,\n coerce_float=coerce_float,\n parse_dates=parse_dates)\n else:\n data = result.fetchall()\n frame = _wrap_result(data, columns, index_col=index_col,\n coerce_float=coerce_float,\n parse_dates=parse_dates)\n return frame", "def load_table_history(conn, year: str):\n # if there is no such table, generate new one\n if not check_table_exist(conn, f\"History{year}\"):\n gen_table_for_history(conn, year)\n # get table as a pandas dataframe\n statement = f\"\"\"\n SELECT *\n FROM 'History{year}';\n \"\"\"\n df = pd.read_sql_query(statement, conn)\n # format as Datetime\n df.Date = pd.to_datetime(df.Date)\n return df" ]
[ "0.81299627", "0.805204", "0.80501723", "0.80095464", "0.7997728", "0.7991211", "0.79572374", "0.787298", "0.783609", "0.78281873", "0.7788777", "0.77415526", "0.77030283", "0.7697647", "0.7693436", "0.76739347", "0.7665277", "0.76047397", "0.7588384", "0.7585602", "0.75665146", "0.7414126", "0.73320043", "0.72830313", "0.7260695", "0.72404313", "0.72399956", "0.7238862", "0.72164005", "0.7213561", "0.7173024", "0.71706873", "0.715505", "0.7153544", "0.7141767", "0.7107748", "0.7074973", "0.6996506", "0.6971981", "0.6961989", "0.6958829", "0.6936002", "0.6923734", "0.69226545", "0.68963027", "0.68959135", "0.6882044", "0.6878602", "0.6822005", "0.6799532", "0.6738827", "0.6738783", "0.6697448", "0.6689125", "0.66736364", "0.66604817", "0.6651846", "0.6624303", "0.66226614", "0.6621257", "0.66180116", "0.661537", "0.6603627", "0.6593601", "0.65923274", "0.65915084", "0.6590571", "0.658657", "0.6583202", "0.6575206", "0.6567429", "0.6546847", "0.6514682", "0.65098786", "0.6508345", "0.6500285", "0.6496507", "0.6481517", "0.64629614", "0.645404", "0.64375293", "0.64236194", "0.6398057", "0.6394802", "0.6386179", "0.6356714", "0.6338077", "0.6338077", "0.6332617", "0.63131845", "0.6298654", "0.6277504", "0.6245599", "0.6237026", "0.62257797", "0.62255853", "0.62250274", "0.6205788", "0.6201381", "0.61930233" ]
0.7388874
22
Update employment history for given profile id.
def update_work_history(work_history_list, profile_id): saved_work_history_ids = set() for work_history in work_history_list: work_history_id = work_history.get("id") work_history_instance = None if work_history_id: try: work_history_instance = Employment.objects.get( profile_id=profile_id, id=work_history_id ) except Employment.DoesNotExist: raise ValidationError("Work history {} does not exist".format(work_history_id)) work_history_serializer = EmploymentSerializer(instance=work_history_instance, data=work_history) work_history_serializer.is_valid(raise_exception=True) work_history_serializer.save(profile_id=profile_id) saved_work_history_ids.add(work_history_serializer.instance.id) Employment.objects.filter(profile_id=profile_id).exclude(id__in=saved_work_history_ids).delete()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_profits(self, next_profit):\n self.profit = next_profit\n self.profit_history.append(next_profit)", "def update_profile(profile_id):\n \n profile = mongo.db.profiles\n profile.find_one_and_update({'_id': ObjectId(profile_id)},\n {'$set': {'date': datetime.utcnow(),\n 'headline': request.form.get('headline'),\n 'bio': request.form.get('bio'),\n 'xp': request.form.get('xp'),\n 'interests': request.form.get('interests'),\n 'stack': request.form.get('stack'),\n 'languages': request.form.get('languages'),\n 'frameworks': request.form.get('frameworks'),\n 'github': request.form.get('github'),\n 'linkedin': request.form.get('linkedin')\n }\n }\n )\n return redirect(url_for('dashboard'))", "def update(self, profile: Dict[datetime.time, float]) -> None:\n\n if self._profile is None:\n self._profile = profile\n else:\n self._profile.update(profile)", "def update(table, id_):\n\n # 4\n for index in range(len(table)):\n if table[index][0] == id_:\n addnew = ui.get_inputs(\n ['name: ', 'birth_year: '],\n 'Updating list of hr')\n addnew.insert(0, id_)\n table[index] = addnew\n data_manager.write_table_to_file('hr/persons.csv', table)\n return table", "def update_profile(orcid_id, data=None):\n \n u = db.session.query(User).filter_by(orcid_id=orcid_id).first()\n if u:\n u.updated = datetime.utcnow()\n if data:\n u.profile = data\n # save the user\n db.session.begin_nested()\n try:\n db.session.add(u)\n db.session.commit()\n except exc.IntegrityError as e:\n db.session.rollback()\n # per PEP-0249 a transaction is always in progress \n db.session.commit()", "def update_experience(uid, rid, increment):\n errmsg = []\n\n experience = Experience.query.filter(Experience.uid == uid).filter(Experience.rid == rid).first()\n if not experience:\n errmsg.append(\"Experience entry does not exist for the given user ID and restaurant ID.\")\n elif increment < 0:\n errmsg.append(\"Experience cannot be incremented by a negative number.\")\n\n if not errmsg:\n old_level = convert_experience_to_level(experience.experience)\n milestone = get_milestone(uid, rid)\n Experience.query.filter(Experience.uid == uid).filter(Experience.rid == rid).update(dict(experience=experience.experience + increment))\n db.session.commit()\n if milestone:\n new_level = convert_experience_to_level(experience.experience)\n if old_level < new_level and new_level == int(milestone[\"level\"]):\n update_points(uid, rid, milestone[\"reward\"])\n return None\n\n return errmsg", "def update_profile(id):\n tags = request.form.get('tags')\n user = User.query.get(id)\n speaker = Speaker.query.filter_by(id_assigned_user=user.id).first()\n\n speaker.tags = tags\n try:\n db.session.commit()\n except:\n abort(500)\n\n return redirect(url_for('get_profile', id=user.id))", "def update_followers(request, profile_id):\n user = request.user\n profile = User.objects.get(id=profile_id)\n\n if profile in user.following.all():\n user.following.remove(profile.id)\n user.save()\n else:\n user.following.add(profile.id)\n user.save()\n\n return HttpResponseRedirect(reverse(\"profile\", kwargs={\n \"username\": profile.username\n }))", "def profile_page(cls, employee_id, logger=None):\n if logger is None:\n logger = cls._logger\n\n database_connection = DatabaseConnection(f\"employees.csv\")\n table = database_connection.table\n employee = Employee(employee_id)\n\n view = table[(table['employee_id']==employee.get_employee_id())]\n logger.log(view)\n\n while True:\n\n choice = input(\n \"Please choose: \"\n \"(1) check data, \"\n \"(2) update first name, \"\n \"(3) update last name, \"\n \"(4) save changes, \"\n \"(5) exit without saving \"\n )\n if choice not in ('1', '2', '3', '4', '5'):\n logger.log(\"Please pick a valid choice\")\n elif choice=='1':\n view = table[(table['employee_id']==employee.get_employee_id())]\n logger.log(view)\n elif choice=='2':\n first_name = input(\"Enter your first name: \")\n employee.set_first_name(first_name)\n elif choice=='3':\n last_name = input(\"Enter your last name: \")\n employee.set_last_name(last_name)\n elif choice=='4':\n table[\n (table['employee_id']==employee.get_employee_id())\n ] = pd.Series(\n {'employee_id': employee.get_employee_id(),\n 'first_name': employee.get_first_name(),\n 'last_name': employee.get_last_name(),\n }\n )\n database_connection.overwrite()\n logger.log(\"Information saved!\")\n else:\n break", "def update_employee(self, obj):\n cursor = self.dbconnect.get_cursor()\n try:\n cursor.execute('UPDATE employee '\n 'SET name = %s, email = %s, office = %s, extra_info = %s, picture_location = %s, '\n 'research_group = %s, title = %s, is_external = %s, is_admin = %s, is_active = %s '\n 'WHERE id = %s;',\n (obj.name, obj.email, obj.office, obj.extra_info, obj.picture_location, obj.research_group,\n obj.title, obj.is_external, obj.is_admin, obj.is_active, obj.e_id))\n self.dbconnect.commit()\n except:\n self.dbconnect.rollback()\n raise", "def update(self,\n ipfix_l2_profile_id,\n i_pfix_l2_profile,\n ):\n return self._invoke('update',\n {\n 'ipfix_l2_profile_id': ipfix_l2_profile_id,\n 'i_pfix_l2_profile': i_pfix_l2_profile,\n })", "def update(\n self,\n email,\n company_name,\n location,\n job_profile,\n salary,\n username,\n password,\n security_question,\n security_answer,\n notes,\n date_applied,\n status,\n):", "def _update_state(self, job_id):\n self.logger.info(\"updating 'timestamp' in profile state\")\n # get current state ...\n with open(self.state_file, \"r\") as json_current:\n state = json.load(json_current)\n json_current.close()\n # ... and write new timestamp\n with open(self.state_file, \"w\") as json_new:\n state[\"timestamp\"] = job_id\n json.dump(state, json_new, indent=4)\n json_new.close()", "def do_user_baseline_update():\n targetUsers = User.query.filter_by(id=request.form['id']).all()\n if not any(targetUsers):\n return user_list(\"Unknown user.\")\n\n targetUser = targetUsers[0]\n\n targetUser.baseline = request.form['baseline']\n\n db.session.commit()\n return Response(render_template('employee/user/list.html',\n users=targetUsers,\n message=f\"Updated baseline for {targetUser.name}\"),\n mimetype='text/html')", "def put(self, id ):\n adm = Administration()\n print(api.payload)\n p = Profile.from_dict(api.payload)\n if p is not None:\n p.set_id(id)\n adm.save_profile(p)\n return p, 200\n else:\n return '', 500", "def update_policy_profile(self, profile, body=None):\r\n return self.put(self.policy_profile_path % (profile), body=body)", "def update_education(education_list, profile_id):\n saved_education_ids = set()\n for education in education_list:\n education_id = education.get(\"id\")\n if education_id is not None:\n try:\n education_instance = Education.objects.get(profile_id=profile_id, id=education_id)\n except Education.DoesNotExist:\n raise ValidationError(\"Education {} does not exist\".format(education_id))\n else:\n education_instance = None\n education_serializer = EducationSerializer(instance=education_instance, data=education)\n education_serializer.is_valid(raise_exception=True)\n education_serializer.save(profile_id=profile_id)\n saved_education_ids.add(education_serializer.instance.id)\n\n Education.objects.filter(profile_id=profile_id).exclude(id__in=saved_education_ids).delete()", "def update_user_profile_info(user_id, user_fname, user_lname, email):\n \n user=User.query.filter(User.user_id == user_id).first()\n\n if email != None:\n user.update_email(email)\n if user_fname != None:\n user.update_first_name(user_fname)\n if user_lname != None:\n user.update_last_name\n \n db.session.commit()", "def update(table, id_):\n\n # your code\n key = common.check_for_key(id_,table)\n if key == None:\n ui.print_error_message('Key does not exist')\n else:\n return_inputs = ui.get_inputs(['Name', 'Age'], 'Enter New Values')\n modif_index = key\n\n table[modif_index][NAME] = return_inputs[FIRST_PROP]\n table[modif_index][AGE] = return_inputs[SECOND_PROP]\n data_manager.write_table_to_file('hr/persons.csv', table) \n\n return table", "def update(self, id):\n loan = self._model.query.get(id)\n loan.original_due_date = loan.due_date\n loan.due_date = loan.due_date + 1 * TimeUnits.MONTH_IN_SEC\n\n db.session.add(loan)\n\n try:\n db.session.commit()\n except Exception as exc:\n print(f'Something went wrong: {exc}')\n db.session.rollback()", "def update(self,\n ike_profile_id,\n ip_sec_vpn_ike_profile,\n ):\n return self._invoke('update',\n {\n 'ike_profile_id': ike_profile_id,\n 'ip_sec_vpn_ike_profile': ip_sec_vpn_ike_profile,\n })", "async def updateAudit(self, auditid, name, description, status, type, data, userid) -> UpdateAuditResponse:\n return await self.stub.UpdateAudit(\n UpdateAuditRequest(_id=auditid, name=name,\n description=description, status=status, type=type, created_by=userid\n ))", "def update_users(self):\n conn = sqlite3.connect(self.__DB)\n cursor = conn.cursor()\n\n users_data = []\n unsaved_histories_data = []\n for key, user in self.__users.items(): # here, key it's actually users id\n users_data.append((user.get_balance(), key))\n for register in user.get_history():\n register_str, is_saved = register\n if not is_saved:\n unsaved_histories_data.append((register_str, key))\n\n cursor.executemany('''\n UPDATE users\n SET balance=?\n WHERE id=?;\n ''', users_data)\n\n cursor.executemany('''\n INSERT INTO history (register, owner)\n VALUES (?, ?);\n ''', unsaved_histories_data)\n\n conn.commit()\n conn.close()\n\n self.load_users() # RELOADING!!! Pew, pew, pew, pew, pew...", "def commit_checkpoint(self, checkpoint_id):\n changes_to_merge = self.pop_checkpoint(checkpoint_id)\n if self.checkpoints:\n # we only have to merge the changes into the latest checkpoint if\n # there is one.\n self.latest = merge(\n changes_to_merge,\n self.latest,\n )", "def put(self, employee_id):\n\n employee = EmployeeModel.find_by_id(employee_id)\n if employee is None:\n return {'message': \"There is no employee with this ID, or your access_token is invalid.\"}, 404\n else:\n \"\"\" check if employee entered the building today\"\"\"\n if WorkdayModel.find_latest_workday(employee.id):\n \"\"\"checking if employee already entered building today\"\"\"\n last_workday = WorkdayModel.find_latest_workday(employee.id)\n\n if last_workday.time_in.day == datetime.today().day:\n last_workday.time_out = datetime.today()\n # calculate hours_worked| .time converts to H:M\n duration = last_workday.time_out - last_workday.time_in\n # duration is a datetime.timedelta\n duration = (datetime.min + duration).time()\n last_workday.hours_worked = duration\n try:\n last_workday.save_to_db()\n except:\n return {'message': 'An error occurred updating worked hours'}, 500\n\n return last_workday.json()\n\n return {'message': 'First use of card, or employee did not start work today'}, 200", "def employment(self, instance):\r\n return instance.user.profile.employment", "def update_account_with(self, id_, **kwargs):\n self.update_user_with(id_, **kwargs)\n self.update_profile_with(id_, **kwargs)\n # TODO:\n # self.update_prefecture_with(id_, kwargs)", "def put(self, id):\n adm = Administration()\n print(api.payload)\n lp = LearnProfile.from_dict(api.payload)\n if lp is not None:\n lp.set_id(id)\n adm.save_learnprofile(lp)\n return lp, 200\n\n else:\n return '', 500", "def refresh_history(self):\n\n self.old_jobs = self.secretary_bot.history_bullshit_filter(self.old_jobs)\n self.jobs_save(self.old_jobs, 'overwrite')", "def record_update_for_user(record_id, values):\n session = get_session()\n with session.begin():\n record_ref = get_user_record(record_id, session=session)\n record_ref.update(values)\n record_ref.save(session=session)", "def edit_payee(self, payee_id, new_payee_name):\n # [todo] - add check that new_payee_name is unique\n\n # open a cursor\n cur = self.get_cursor()\n\n edit_payee_statement = \"UPDATE payees \" + \\\n \"SET payee_name='{0}' \".format(new_payee_name) + \\\n \"WHERE payee_id={0}\".format(payee_id)\n\n cur.execute(edit_payee_statement)\n\n # close the cursor\n self.close_cursor()", "def updateEMPStudyData(self, study_id, study_score, web_app_user_id):\n con = self.getMetadataDatabaseConnection()\n con.cursor().callproc('qiime_assets.update_emp_study_data', [study_id, study_score, web_app_user_id])", "def profile(request):\n profile = get_object_or_404(UserProfile, user=request.user)\n\n # Date format to get todays games from Gameline model\n todays_date = datetime.datetime.now(pytz.timezone('America/Los_Angeles'))\n date_LA = todays_date.strftime('%B %-d, %Y')\n\n # Get all objects in MLBGameLine model\n all_games = MLBGameLine.objects.all()\n\n if request.method == \"POST\":\n form = EditProfileInfo(request.POST, instance=profile)\n if form.is_valid():\n form.save()\n messages.success(request, 'Successfully updated profile!')\n return redirect(reverse('profile'))\n else:\n messages.error(request,\n \"Failed to update profile.\")\n else:\n form = EditProfileInfo(instance=profile)\n\n template = \"profiles/profile.html\"\n context = {\n 'profile': profile,\n 'all_games': all_games,\n 'date_LA': date_LA,\n 'form': form,\n }\n\n return render(request, template, context)", "def update_user_profile(id):\n token = request.json['token']\n u = user.User.query.filter(user.User.token == token).first()\n if u is None:\n abort(404)\n if u.id != id:\n print \"user id is wrong.\" #TODO: Support log system\n abort(500)\n u.name = request.json['name']\n u.nickname = request.json['nickname']\n u.company = request.json['nickname']\n with store_context(fs_store):\n with open(files.path(request.json['header'])) as f:\n u.header_icon.from_file(f)\n db.session.merge(u)\n db.session.commit()\n db.session.merge(u)\n db.session.commit()\n return jsonify(u.to_dict())", "def put(self, id):\n empleadoactualizar = EmployeeModel.query.filter_by(employee_id=id).first()\n if empleadoactualizar:\n reg = api.payload\n empleadoactualizar.employee_id = reg['employee_id']\n empleadoactualizar.name = reg['name']\n empleadoactualizar.age = reg['age']\n empleadoactualizar.position = reg['position']\n empleadoactualizar.fechaingreso = datetime.date.fromisoformat(reg['fechaingreso'])\n db.session.merge(empleadoactualizar)\n db.session.commit()\n return 201\n api.abort(404)", "def update(table, id_):\n ID = 0\n ids = [item[ID] for item in table]\n if id_ not in ids:\n raise ValueError(\"The given ID not in the table.\")\n titles_sales = [\"Name: \", \"Birth Year: \"]\n inputs = ui.get_inputs(titles_sales, \"Specify new properties\")\n for index, item in enumerate(table):\n if id_ == item[ID]:\n table[index] = inputs\n table[index].insert(0, id_)\n return table", "def update_profile(request, id):\n username = request.POST.get(\"username\")\n first_name, last_name = request.POST.get(\"fullname\").split()\n email = request.POST.get(\"email\")\n phone = request.POST.get(\"phone\")\n address = request.POST.get(\"address\")\n\n userObj = User.objects.get(id=id)\n userObj.first_name = first_name\n userObj.last_name= last_name\n userObj.username= username\n userObj.email = email\n userObj.phone = phone\n userObj.address = address\n userObj.save()\n messages.success(request, \"Your profile is successfully update.\", fail_silently=False)\n return redirect(\"user_profile\", id)", "def update_employee(employee):\n employee_id = get_employee_input_int(\"Enter the employee id you want to update\")\n newGrade = get_employee_input_int(\"Enter the new grade for \")\n db.update_employee(employee_id, newGrade)\n print(employee.full_name + \"'s grade value has been updated to :-> \", newGrade)", "def test_update_profile(self):\n url = self.url\n url = url + '{}/'.format(\n self.profile.pk\n )\n response = self.client.patch(url)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n\n update_data = {\n 'first_name': 'UpdateTest'\n }\n\n response = self.client.patch(url, update_data, format='json')\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n\n # Caso 1\n update_data['tasks_finalize'] = '14124123'\n update_data['tasks_pending'] = '124123132'\n update_data['tasks_created'] = '12412323'\n\n response = self.client.patch(url, update_data, format='json')\n self.assertEqual(response.data['tasks_finalize'], 0)\n self.assertEqual(response.data['tasks_pending'], 0)\n self.assertEqual(response.data['tasks_created'], 0)", "def edit_job(request, job_id):\n job = get_object_or_404(Jobs, pk=job_id)\n\n if request.user.id != job.author.id:\n messages.error(request, 'You can only edit your own job profiles')\n return redirect(reverse('view_home'))\n\n if request.method == 'POST':\n form = JobsForm(request.POST, instance=job)\n if form.is_valid():\n form.save()\n messages.success(\n request, 'You have successfully updated the job profile!')\n return redirect(reverse('job_profile', args=[job.id]))\n else:\n messages.error(request,\n ('Could not update job profile. '\n 'Make sure you entered valid data.'))\n else:\n form = JobsForm(instance=job)\n messages.info(request, f'You are editing {job.title}')\n\n template = 'jobs/edit_job.html'\n context = {\n 'title': 'Edit job profile',\n 'form': form,\n 'job': job,\n }\n\n return render(request, template, context)", "def edit_profile(profile_id):\n # This check is in place to avoid users trying to edit a profile via the dashboard\n # when they have not created one. If not the option is not displayed\n user = mongo.db.user.find_one({'username': session['username']})\n chck = mongo.db.profiles.find_one_or_404({'user_id': user['_id']})\n if chck: \n profile = mongo.db.profiles.find_one(\n {'_id': ObjectId(profile_id)})\n \n form=ProfileForm()\n form.headline.data = profile['headline']\n form.bio.data = profile['bio']\n form.xp.data = profile['xp']\n form.interests.data = profile['interests']\n form.stack.data = profile['stack']\n form.languages.data = profile['languages']\n form.frameworks.data = profile['frameworks']\n form.github.data = profile['github']\n form.linkedin.data = profile['linkedin']\n \n return render_template('pages/editprofile.html', form=form, profile=profile, legend='Edit your Profile')", "def update_profile(self, method=\"POST\", id=1, fullname=\"John Doe\",\r\n name=\"johndoe\", locale=\"es\",\r\n email_addr=\"johndoe@example.com\",\r\n new_name=None,\r\n btn='Profile'):\r\n url = \"/account/%s/update\" % name\r\n if new_name:\r\n name = new_name\r\n if (method == \"POST\"):\r\n return self.app.post(url,\r\n data={'id': id,\r\n 'fullname': fullname,\r\n 'name': name,\r\n 'locale': locale,\r\n 'email_addr': email_addr,\r\n 'btn': btn},\r\n follow_redirects=True)\r\n else:\r\n return self.app.get(url,\r\n follow_redirects=True)", "def update_profile():\n logger.debug(\"entering function update_profile\")\n response = update_user_profile(request.json)\n logger.debug(\"exiting function update_profile\")\n return jsonify(response)", "def test_update_profile(self):\n self.cim.update_profile(\n customer_id=u\"222\",\n description=u\"Foo bar baz quz\",\n email=u\"dialtone@gmail.com\",\n customer_profile_id=u\"122\"\n )", "def put(self, request, flavor_profile_id):\n update_flavor_profile(request)", "def user_profile_commit(self, hUserProfile):\n\t\treturn Job(SDK.PrlSrv_UserProfileCommit(self.handle, conv_handle_arg(hUserProfile))[0])", "def put(self, request):\n\n data = request.data\n test_id = data['test_id']\n data.pop(\"test_id\")\n test_data = UserTestHistory.objects.filter(id=test_id)\n\n try:\n test_data.update(**data)\n LOGGER.info(\"Test data updated successfully\")\n return Response({\"status\": \"SUCCESS\", \"message\": \"Record updated successfully\"})\n except Exception, error:\n LOGGER.error(\"Error:%s\", str(error))\n return Response({\"status\": \"FAILED\", \"message\": str(error)})", "def change_employee(self, employee):\n cursor = self.dbconnect.get_cursor()\n try:\n if employee.id == None:\n raise Exception('no id given')\n cursor.execute('select * from employee where employeeID=%s', (str(employee.id),))\n if cursor.rowcount == 0:\n raise Exception('no employee found with that id')\n cursor.execute(\n 'update employee set name= %s,email= %s,office= %s,title= %s,INTernORextern= %s,active= %s,promotor= %s where employeeID=%s',\n (employee.name, employee.email, employee.office, employee.title,\n employee.internOrExtern, employee.active, employee.promotor, employee.id))\n self.dbconnect.commit()\n except:\n self.dbconnect.rollback()\n raise Exception('unable to change employee')", "def update(self,\n ipfix_dfw_profile_id,\n i_pfixdfw_profile,\n ):\n return self._invoke('update',\n {\n 'ipfix_dfw_profile_id': ipfix_dfw_profile_id,\n 'i_pfixdfw_profile': i_pfixdfw_profile,\n })", "def set_profile_version(context, profile_id, version):\n\n check_profile_id(profile_id)\n ps = getToolByName(context, 'portal_setup')\n\n ps.setLastVersionForProfile(profile_id, unicode(version))\n assert(ps.getLastVersionForProfile(profile_id) == (version, ))\n print \"Set version for '%s' to '%s'.\" % (profile_id, version)", "def edit_employee_hours(employee_id):\n\n if not g.user:\n flash(\"Please login to access\", \"danger\")\n return redirect(\"/\")\n \n if g.user.is_admin == False:\n flash (\"Unauthorized\", \"danger\")\n return redirect(\"/login\")\n\n employee = Employee.query.get_or_404(employee_id)\n form = Edit_Hours_Form(obj = employee)\n\n if form.validate_on_submit():\n \n employee.completed = form.completed.data, \n employee.required = form.required.data,\n \n db.session.commit()\n \n flash(f\"{employee.first_name} {employee.last_name} has been saved\", \"success\")\n return redirect(\"/administrator\")\n else:\n\n return render_template(\"/admin/edit_hours.html\", employee = employee, form = form)", "def save_profile(self):\n self.save()", "def update(self,\n tunnel_profile_id,\n ip_sec_vpn_tunnel_profile,\n ):\n return self._invoke('update',\n {\n 'tunnel_profile_id': tunnel_profile_id,\n 'ip_sec_vpn_tunnel_profile': ip_sec_vpn_tunnel_profile,\n })", "def enable(self,\n profile_id=None):\n if profile_id is None:\n self._enabled = True\n else:\n self._profiles[profile_id] = True", "def test_update_payment_profile(self):\n self.cim.update_payment_profile(\n customer_profile_id=u\"122\",\n customer_payment_profile_id=u\"444\",\n card_number=u\"422222222222\",\n expiration_date=u\"2009-10\"\n )", "def update_TradeHistory(self, market):\n ##self.marketid is to do!!!\n mid = self.marketid(market)\n history = self.Request.fetch('markettrades',params={'marketid':mid})\n pair = self.Pairs[mid]\n self.TradeHistory[pair] = history\n return 0", "def saveUsersProfiles_(self, plist):\r\n \r\n LogInfo(u\"Saving update profiles with PublicationDate %@\", plist[u\"PublicationDate\"])\r\n if not plist.writeToFile_atomically_(self.userUpdateProfilesPath, False):\r\n LogError(u\"Failed to write %@\", self.userUpdateProfilesPath)", "def edit_profile(self, name, username, email):\n return self.app.post('/_editProfile', data = dict(\n name = name,\n username = username,\n email = email\n ), follow_redirects = True)", "def update_by_id(cls, id, name, surname):\n\t\tauthor = Author.query.get(id)\n\t\tauthor.name = name\n\t\tauthor.surname = surname\n\t\tdb.session.commit()", "def update_result(result):\n invader = result['invader'] \n countryOfInvader = result['countryOfInvader'] \n defender = result['defender'] \n countryOfdefender = result['countryOfdefender'] \n invasionHistory = InvasionHistory(\n invader ,\n countryOfInvader,\n defender,\n countryOfdefender,\n datetime.datetime.now() )\n\n #update territory info\n t_record = Territory.get_by_key_name(countryOfdefender)\n t_record.occupier = invader\n t_record.put() \n\n db.put([invasionHistory])", "def put(self, request, pk):\n data = request.data\n data.pop('skills')\n Department_name = data.pop('department')\n department = Department.objects.get(name=Department_name)\n manager_name = data.pop('manager')\n manager = Manager.objects.get(name=manager_name)\n EmployeeDetail.objects.filter(pk=pk).update(department=department, manager=manager, **data)\n return Response(\n data=\"request.data\"\n )", "def put(id, last_name, first_name,\n email,\n password,\n alt_email,\n alt_phone_no,\n phone_no,\n department,\n designation,\n profile_picture_url,\n country,\n state,\n city,\n address,\n created_date,\n last_updated_date):\n # checking session token\n headers = request.headers\n if request.headers.get('Authorization') is None:\n res = jsonify({\"data\": [], \"status\": \"error\", \"message\": \"Require session token\"})\n return make_response(res, 401)\n if JWT.is_valid(headers.get('Authorization')) is False:\n res = jsonify({\"data\": [], \"status\": \"error\", \"message\": \"Invalid session token\"})\n return make_response(res, 401)\n # checking session token ends\n # hash password\n if password is not None:\n password = Bcrypt.get_hashed_password(password)\n repository = HRRepository()\n hr = repository.update(\n id=id,\n last_name=last_name,\n first_name=first_name,\n email=email,\n password=password,\n alt_email=alt_email,\n alt_phone_no=alt_phone_no,\n phone_no=phone_no,\n department=department,\n designation=designation,\n profile_picture_url=profile_picture_url,\n country=country,\n state=state,\n city=city,\n address=address,\n created_date=created_date,\n last_updated_date=last_updated_date)\n try:\n res = jsonify({\"data\": hr.json, \"status\": \"success\"})\n except:\n res = jsonify({\"hr\": hr})\n return make_response(res, 200)", "def update_employee(emp_id, key=None, value=None, items=None):\n if items is None:\n if key is None or value is None:\n return {\"Error\": \"At least one key/value pair is required\"}\n items = {key: value}\n elif isinstance(items, str):\n items = salt.utils.yaml.safe_load(items)\n\n xml_items = \"\"\n for pair in items:\n xml_items += '<field id=\"{}\">{}</field>'.format(pair, items[pair])\n xml_items = \"<employee>{}</employee>\".format(xml_items)\n\n status, result = _query(\n action=\"employees\",\n command=emp_id,\n data=xml_items,\n method=\"POST\",\n )\n\n return show_employee(emp_id, \",\".join(items.keys()))", "def update(self, profiles, matches):\n raise NotImplementedError()", "def add_skills_to_profile():\n # get specific objects\n profile = storage.get(\"Profile\", profile_id)\n skills = storage.get(\"Skills\", skills_id)\n if profile is not None and skills is not None:\n # check every skill in profile\n for profile_skill in profile.skills:\n # if the given skill is already linked to profile, return\n if profile_skill.id == skills.id:\n return jsonify(skills.to_dict()), 200\n # if skill is not in profile, append skill and save\n profile.skills.append(skills)\n profile.save()\n return jsonify(skills.to_dict()), 201\n\n # if id not in database, abort\n abort(404)", "def update(self) -> None:\n self.previous_status = self.status\n\n jobs = self._client.describe_jobs(jobs = [ self.id ])[\"jobs\"]\n\n try:\n self.state = jobs[0]\n except IndexError:\n raise ValueError(\"Invalid or unknown job id %s\" % self.id) from None", "def ipfixprofiles(self, ipfixprofile_id, data, tenant_id=None, api_version=\"v2.0\"):\n\n if tenant_id is None and self._parent_class.tenant_id:\n # Pull tenant_id from parent namespace cache.\n tenant_id = self._parent_class.tenant_id\n elif not tenant_id:\n # No value for tenant_id.\n raise TypeError(\"tenant_id is required but not set or cached.\")\n cur_ctlr = self._parent_class.controller\n\n url = str(cur_ctlr) + \"/{}/api/tenants/{}/ipfixprofiles/{}\".format(api_version,\n tenant_id,\n ipfixprofile_id)\n\n api_logger.debug(\"URL = %s\", url)\n return self._parent_class.rest_call(url, \"put\", data=data)", "def request_user_baseline_update():\n target_user = User.query.filter_by(id=request.args['id']).first()\n if target_user is None:\n return user_list(\"Unknown user.\")\n\n return Response(render_template('employee/user/update_baseline.html',\n csrf_token=(\n get_raw_jwt() or {}).get(\"csrf\"),\n target=\"/employee/user/update\",\n first_name=target_user.first_name,\n name=target_user.name,\n baseline=target_user.baseline,\n id=target_user.id,\n mimetype='text/html'))", "def do(self, market_data):\r\n self.data.history = self.data.history + market_data", "def edit_server_profile_for_dl(profile_obj):\n # This keyword is deprecated, please do not use.\n FusionUIBase.navigate_to_section(SectionType.SERVER_PROFILES, time_for_loading=5)\n\n total = len(profile_obj)\n not_exists = 0\n edited = 0\n\n for n, profile in enumerate(profile_obj):\n logger.info(\"{2} No: {0} --- Total: {1} {2}\".format((n + 1), total, '-' * 14))\n\n logger.info(\"editing a server profile with name '%s' ...\" % profile.name)\n if not VerifyServerProfile.verify_server_profile_exist(profile.name, fail_if_false=False):\n logger.warn(\"server profile '%s' does not exist\" % profile.name)\n not_exists += 1\n continue\n # - Prep the auto_power_off switch\n # - By default, this keyword will power off the server if it's powered on -- unless the attribute 'auto_power_off' is explicitly set to 'false'\n auto_power_off = False if getattr(profile, 'auto_power_off', '').lower() == 'false' else True\n # open Edit SP dialog and enter data ...\n CommonOperationServerProfile.click_server_profile(profile.name)\n EditServerProfile.select_action_edit()\n EditServerProfile.wait_edit_server_profile_dialog_shown()\n\n EditServerProfile.input_name(profile.newName)\n EditServerProfile.input_description(profile.desc)\n # Input 'Server hardware'\n # - input server name,\n # - select option from the popped out drop-down list,\n # - verify the server hardware is refreshed to the type name displayed in the drop-down list for selecting server hardware\n if not EditServerProfile.input_select_server_hardware(profile.server, auto_power_off=auto_power_off):\n logger.warn(\"server hardware '%s' is not selected for editing server profile, may be wrong name, or powered on but failed to power it off. \"\n \"test will skip this profile '%s' and continue to edit other server profiles\" % (profile.server, profile.name))\n continue\n msg = EditServerProfile.get_error_message_from_server_hardware()\n if msg is not None:\n logger.warn(\"error occurred, server profile can not be edited successfully\")\n ui_lib.fail_test(msg)\n sht_selected = EditServerProfile.get_selected_server_hardware_type(profile.server)\n if profile.hardwaretype not in sht_selected:\n logger.warn(\"the server hardware type of server '%s' is NOT consistent with test data '%s'\" % (sht_selected, profile.hardwaretype))\n # set boot mode if attribute 'manageBootMode' is true - only for Gen 9 (or later) server:\n FusionUIBase.select_view_by_name('Boot Settings')\n if 'gen9' in sht_selected.lower():\n logger.info(\"setting 'Boot mode' for Gen 9 specially ...\")\n if getattr(profile, 'manageBootMode', '').lower() == 'true':\n CommonOperationServerProfile.BootSettings.tick_manage_boot_mode()\n CommonOperationServerProfile.BootSettings.select_boot_mode_by_text(profile.bootMode) if hasattr(profile, 'bootMode') else None\n if getattr(profile, 'bootMode', '').lower() == 'legacy bios':\n CommonOperationServerProfile.BootSettings.set_legacy_bios_mode_boot_order(profile)\n else:\n CommonOperationServerProfile.BootSettings.set_non_legacy_bios_mode_boot_order(profile, hardware_type=sht_selected)\n else:\n CommonOperationServerProfile.BootSettings.untick_manage_boot_mode()\n else:\n CommonOperationServerProfile.BootSettings.set_legacy_bios_mode_boot_order(profile)\n\n EditServerProfile.click_ok_button()\n # if EditServerProfile.get_error_message_from_boot_mode() is not None:\n if CommonOperationServerProfile.BootSettings.get_error_message_from_boot_mode() is not None:\n logger.warn(\"test data may be wrongly defined for 'Boot mode', which caused an error that blocks profile being edited. \"\n \"Test will skip this profile '%s' and continue to edit other server profiles\" % profile.name)\n continue\n\n status, msg = FusionUIBase.get_error_message_from_dialog(timeout=10)\n if status is True:\n logger.warn(\"unexpected error occurred: %s\" % msg)\n ui_lib.fail_test(msg)\n\n EditServerProfile.wait_edit_server_profile_dialog_disappear(timeout=180)\n FusionUIBase.show_activity_sidebar()\n FusionUIBase.wait_activity_action_ok(profile.newName, 'Update', timeout=300, fail_if_false=False)\n FusionUIBase.show_activity_sidebar()\n CommonOperationServerProfile.wait_server_profile_status_ok(profile.newName, timeout=180, fail_if_false=False)\n logger.info(\"edited server profile '%s' successfully\" % profile.newName)\n edited += 1\n\n logger.info(\"{0} == Summary == {0}\".format('-' * 14))\n if total - not_exists == 0:\n logger.warn(\"no server profile to edit! all %s server profile(s) is NOT existing, hence test is considered PASS\" % not_exists)\n return True\n else:\n if edited < total:\n logger.warn(\"not all of the server profile(s) is successfully edited - %s out of %s edited \" % (edited, total))\n if edited + not_exists == total:\n logger.warn(\"%s non-existing server profile(s) is skipped being edited, hence test is considered PASS\" % not_exists)\n return True\n else:\n logger.warn(\"%s non-existing server profile(s) is skipped being edited, but %s profile(s) left is failed being edited \" % (not_exists, total - edited - not_exists))\n return False\n\n logger.info(\"all of the server profile(s) is successfully edited - %s out of %s \" % (edited, total))\n return True", "def update_user(id):\n with app.app_context():\n user = User.query.get(id)\n if user is None:\n return \"User not found\", 404\n skills = validate_skills(request.get_json().get(\"skills\"))\n if not skills:\n return \"Invalid skills\", 400\n\n for skill in skills:\n skill_db = Skill.query.filter_by(name=skill).first()\n if skill_db is None:\n skill_db = Skill(name=skill)\n db.session.add(skill_db)\n \n user.skills = [\n skill for skill in Skill.query.filter(Skill.name.in_(skills)).all()\n ]\n \n users_response = UsersResponse(\n users=[\n {\n \"id\": user.id,\n \"name\": user.name,\n \"skills\": [skill.name for skill in user.skills]\n }\n ]\n )\n db.session.commit()\n return users_response.json(), 200", "def test_editProfile(self):\n\t\turl = \"/profiles/1/\"\n\t\tdata = { \"name\" : \"testName\", \"surname\" : \"testSurname\" }\n\t\tresponse = self.client.patch(url, data, format='json')\n\t\tself.assertEqual(response.status_code, status.HTTP_200_OK)\n\t\tself.assertEqual(response.data[\"name\"], \"testName\")\n\t\tself.assertEqual(response.data[\"surname\"], \"testSurname\")", "def update_user_profile_deep(id):\n token = request.json['token']\n u = user.User.query.filter(user.User.token == token).first()\n if u is None:\n abort(404)\n if u.id != id:\n print \"user id is wrong.\" #TODO: Support log system\n abort(500)\n u.name = request.json['name']\n u.title = request.json['title']\n u.company = request.json['company']\n u.nickname = request.json['company']\n u.phone_number = request.json['phone_number']\n u.email = request.json['email']\n if request.json['password'] != None and request.json['password'] != '':\n u.password = request.json['password']\n db.session.merge(u)\n db.session.commit()\n return jsonify(u.to_dict())", "def update_audit_info(progress_controller=None):\n if progress_controller is None:\n progress_controller = ProgressControllerBase()\n progress_controller.maximum = 2\n\n from stalker.db.session import DBSession\n from stalker import LocalSession\n\n with DBSession.no_autoflush:\n local_session = LocalSession()\n logged_in_user = local_session.logged_in_user\n progress_controller.increment()\n\n if logged_in_user:\n # update the version updated_by\n from anima.dcc import mayaEnv\n\n m_env = mayaEnv.Maya()\n v = m_env.get_current_version()\n if v:\n v.updated_by = logged_in_user\n\n from stalker.db.session import DBSession\n\n DBSession.commit()\n progress_controller.increment()\n progress_controller.complete()", "def updateEMPStudy(self, study_id, study_name, investigation_type, miens_compliant, submit_to_insdc, \n portal_type, study_title, study_alias, pmid, study_abstract, study_description,\n number_samples_collected, number_samples_promised , lab_person,\n lab_person_contact, emp_person, first_contact, most_recent_contact, sample_type, \n has_physical_specimen, has_extracted_data, timeseries, spatial_series,\n principal_investigator, principal_investigator_contact, default_emp_status, funding,\n includes_timeseries):\n con = self.getMetadataDatabaseConnection()\n results = con.cursor().callproc('qiime_assets.emp_study_update', \n [study_id, study_name, investigation_type, miens_compliant, submit_to_insdc, portal_type, \n study_title, study_alias, pmid, study_abstract, study_description,\n number_samples_collected, number_samples_promised , lab_person,\n lab_person_contact, emp_person, first_contact, most_recent_contact, sample_type, \n has_physical_specimen, has_extracted_data, timeseries, spatial_series,\n principal_investigator, principal_investigator_contact, default_emp_status, funding,\n includes_timeseries])", "def update(self,\n dpd_profile_id,\n ip_sec_vpn_dpd_profile,\n ):\n return self._invoke('update',\n {\n 'dpd_profile_id': dpd_profile_id,\n 'ip_sec_vpn_dpd_profile': ip_sec_vpn_dpd_profile,\n })", "def updateMdrizskyHistory(self,build):\n _plist = self.assoc.parlist[0]\n if build == True: _sky_output = _plist['output']\n else: _sky_output = _plist['outdata']\n \n fhdu = pyfits.open(_sky_output,mode='update')\n prihdr = fhdu[0].header\n \n for sky in self._getMdrizskyValues():\n sky_str = sky[0]+' MDRIZSKY = '+str(sky[1])\n prihdr.add_history(sky_str)\n \n fhdu.close()\n del fhdu", "def savepoint_rollback(self, id):\n self.execute(\"ROLLBACK TO SAVEPOINT {}\".format(id))", "def update_status(cls, build, build_profile_id, active):\n app_id = build.copy_of\n build_id = build.get_id\n version = build.version\n try:\n build_profile = LatestEnabledBuildProfiles.objects.get(\n app_id=app_id,\n version=version,\n build_profile_id=build_profile_id,\n build_id=build_id\n )\n except cls.DoesNotExist:\n build_profile = LatestEnabledBuildProfiles(\n app_id=app_id,\n version=version,\n build_profile_id=build_profile_id,\n build_id=build_id,\n domain=build.domain\n )\n # assign it to avoid re-fetching during validations\n build_profile._build = build\n build_profile.activate() if active else build_profile.deactivate()", "def flush_policy_history(self, flush_policy_history):\n\n self._flush_policy_history = flush_policy_history", "def update_job(self, job_id, end, status, output_path):\n\n # Close the DB connections\n django.db.connection.close()\n\n try:\n Job.objects.filter(id=job_id).update(\n end=end,\n status=status\n )\n\n qas = list()\n\n for product in glob.glob(output_path):\n qa = self.create_qa_bulk(product, job_id)\n if not qa:\n logger.warning('Error to create QA: {}'.format(product))\n continue\n\n qas.append(qa)\n\n QA.objects.bulk_create(qas)\n\n logger.info('Job {} updated.'.format(job_id))\n except Exception as err:\n logger.error('Job {} failed.'.format(job_id))\n logger.error(err)", "def test_userprofile_modification(self):\n self.user.userprofile.save(update_fields=['enabled'])\n self.assertTrue(AuditTrail.objects.count() >= 2)\n self.assertEqual(\n AuditTrail.objects.last().level, AuditTrail.LEVEL_INFO)", "def profile_data(self, profile_data):\n\n self._profile_data = profile_data", "def update_status(payment_id):\n EpayPayment = apps.get_model('epay', 'EpayPayment')\n with transaction.atomic():\n response = PaymentProcessor.get_status(payment_id)\n epay_payment = EpayPayment.import_or_update(response, payment_id=payment_id)\n return epay_payment", "def update(self, progress, save_id):\n\n cursor = self._conn.cursor()\n sql = \"UPDATE saves SET progress = ? WHERE id = ?\"\n cursor.execute(sql, (progress, save_id))\n self._conn.commit()", "def edit_profile(request):\n profile_to_edit = get_object_or_404(UserProfile, user=request.user)\n if request.method == \"POST\":\n form = UserProfileForm(request.POST, instance=profile_to_edit)\n if form.is_valid:\n form.save()\n messages.success(request, \"Profile updated succesfully\")\n return redirect('profile')\n else:\n messages.error(request, \"Updated failed. \\\n Please ensure the form is valid\")\n else:\n profile_form = UserProfileForm(instance=profile_to_edit)\n template = 'profiles/edit_profile.html'\n context = {\n 'form': profile_form,\n }\n return render(request, template, context)", "def update_history(self, loss, speakers):\r\n loss_copy = loss.detach().cpu().numpy()\r\n for loss_value, speaker in zip(loss_copy, speakers):\r\n speaker_index = self.s2i[speaker]\r\n \"\"\"Extract row corresponding to speaker\"\"\"\r\n history_row = self.history[speaker_index]\r\n \"\"\"Shift all elements by 1 to the right\"\"\"\r\n history_row = np.roll(history_row, shift=1)\r\n \"\"\"Overwrite the first value (the last value in the array rolled to the front and is overwritten\"\"\"\r\n history_row[0] = loss_value\r\n \"\"\"Set the history row equal to the modified row\"\"\"\r\n self.history[speaker_index] = history_row", "def update(self,\n ipfix_collector_profile_id,\n i_pfix_collector_profile,\n ):\n return self._invoke('update',\n {\n 'ipfix_collector_profile_id': ipfix_collector_profile_id,\n 'i_pfix_collector_profile': i_pfix_collector_profile,\n })", "def user_update_profile():\n \n if 'userid' and 'email' not in request.forms:\n return {'status':'Failure','message':'User Id is missing,please try with correct data.'}\n \n data = user_obj.user_update_profile(request.forms)\n return data", "def updateEMPSampleData(self, sample_id, sample_score, emp_status, web_app_user_id):\n con = self.getMetadataDatabaseConnection()\n con.cursor().callproc('qiime_assets.update_emp_sample_data', [sample_id, sample_score, emp_status, web_app_user_id])", "def updateJobsTable(self):\n self.checkJobsDict()\n jobdict = self.DB.meta.peatsa_jobs \n M = TableModel()\n #open job log from file\n f=open('jobstates.log','r')\n jl = pickle.load(f) \n for j in jobdict: \n jobid = jobdict[j] \n try:\n M.addRecord(j,state=jl[jobid]['State'],date=jl[jobid]['Date'])\n except:\n M.addRecord(j,state='Not in DB')\n self.jobstable = TableCanvas(self.tf, model=M, height=100, editable=False)\n self.jobstable.createTableFrame() \n self.log.yview('moveto', 1)\n f.close()\n return", "def __profileChanged(self, inst, topic, value):\n\n old, new = value\n\n if new is orthoeditprofile.OrthoEditProfile:\n self.__addEditMenu()\n elif old is orthoeditprofile.OrthoEditProfile:\n self.__removeEditMenu()", "def add_profile(self, profile):\r\n self.profiles.append(profile)", "def on_job_update(job):\n assert isinstance(job.id, uuid.UUID), 'Job id is not UUID!'\n job_log = job_event_log.setdefault(job.id, [])\n job_log.append(job.state)", "def updateByOid(self, oid, ddict=None, **kwdict):\n if not oid:\n return False\n if ddict is None:\n if kwdict.get(\"finish_time\") is None:\n kwdict['finish_time'] = self.now\n\n if kwdict.get(\"retry\"):\n self.mgdb.task_history.update(\n {\"_id\": ObjectId(oid)}, {\"$inc\": {\"retry\": 1}})\n del kwdict[\"retry\"]\n else:\n kwdict = ddict\n if not isinstance(kwdict, dict):\n return False\n\n return self.mgdb.task_history.update({\"_id\": ObjectId(oid)}, {\"$set\": kwdict})", "def put(self, _id):\n payload = self.request.json\n # TODO: validate the json before updating the db\n self.app.db.jobs.update({'_id': int(_id)}, {'$set': {'status': payload.get('status'), 'activity': payload.get('activity')}})", "def update_profile(model, payoff, attack_profile, \n defense_profile, attack_policy, defense_policy):\n n_action = payoff.shape[0]\n\n # A new row and column will be added to the payoff matrix \n new_payoff_col = np.array([])\n new_payoff_row = np.array([])\n\n # First get the new column \n for i in range(len(defense_profile)):\n new_payoff = get_payoff(model, attack_policy, defense_profile[i])\n new_payoff_col = np.append(new_payoff_col, new_payoff)\n new_payoff_col = np.expand_dims(new_payoff_col, axis=0)\n attack_profile.append(attack_policy) \n payoff = np.concatenate((payoff, new_payoff_col.T), axis=1)\n\n # Second, get the new row\n for j in range(len(attack_profile)):\n new_payoff = get_payoff(model, attack_profile[j], defense_policy)\n new_payoff_row = np.append(new_payoff_row, new_payoff)\n new_payoff_row = np.expand_dims(new_payoff_row, axis=0)\n defense_profile.append(defense_policy)\n payoff = np.concatenate((payoff, new_payoff_row), axis=0)\n return payoff, attack_profile, defense_profile", "def update_prediction_run(session: Session, prediction_run: PredictionModelRunTimestamp):\n session.add(prediction_run)\n session.commit()", "def updateStepLog(self, data: Dict) -> None:\n step_payload = {\n **data,\n **{\n \"step_end_ts\": str(datetime.datetime.now()),\n \"upsert_by\": \"DLoaderMS\",\n \"upsert_ts\": str(datetime.datetime.now()),\n },\n }\n UpdateQuery = \"\"\"\n UPDATE file_process_step_log\n SET step_status = '{step_status}',\n step_status_detail = '{step_status_detail}',\n step_end_ts = timestamp '{step_end_ts}',\n upsert_by = '{upsert_by}',\n upsert_ts = timestamp '{upsert_ts}'\n WHERE step_id = {step_id}\n \"\"\"\n cursor = self.engine.cursor()\n try:\n cursor.execute(UpdateQuery.format(**step_payload))\n except Exception as e:\n raise DLoaderException(\n \"Failed while inserting data into audit table {0}\".format(e)\n )\n finally:\n cursor.close()", "def update(self,\n ipfix_l2_collector_profile_id,\n i_pfix_l2_collector_profile,\n ):\n return self._invoke('update',\n {\n 'ipfix_l2_collector_profile_id': ipfix_l2_collector_profile_id,\n 'i_pfix_l2_collector_profile': i_pfix_l2_collector_profile,\n })" ]
[ "0.5801277", "0.57033116", "0.55727786", "0.5357609", "0.5347083", "0.52954924", "0.529461", "0.5294242", "0.52700007", "0.5151352", "0.5092951", "0.5087638", "0.5076853", "0.5068695", "0.5017037", "0.50102806", "0.50093156", "0.5007281", "0.50012374", "0.5000325", "0.49863422", "0.49825117", "0.4979344", "0.49755374", "0.49453056", "0.49182734", "0.49078512", "0.49030033", "0.48741964", "0.48657006", "0.48073074", "0.48045957", "0.48040506", "0.479342", "0.4789838", "0.47855738", "0.478032", "0.47526452", "0.4732913", "0.47306472", "0.47227317", "0.471765", "0.47156662", "0.47081795", "0.47044343", "0.4690334", "0.46762875", "0.46741197", "0.46597227", "0.46493438", "0.46342722", "0.4624089", "0.45946416", "0.45886844", "0.45817277", "0.4578259", "0.45782074", "0.4565806", "0.45557782", "0.4552061", "0.45372996", "0.4536079", "0.45334", "0.45252126", "0.45104432", "0.45057282", "0.44974282", "0.44703472", "0.44678923", "0.4464744", "0.4462438", "0.4460846", "0.44601575", "0.44527075", "0.4449784", "0.4448923", "0.4445232", "0.4442673", "0.44421148", "0.44394675", "0.4421236", "0.44190228", "0.44064718", "0.4396271", "0.4394374", "0.4390456", "0.43821022", "0.43789482", "0.43789434", "0.43774557", "0.43752527", "0.4365692", "0.43479866", "0.43447927", "0.43421748", "0.43408453", "0.43384612", "0.43379676", "0.43369052", "0.43366557" ]
0.6740493
0
Update education for given profile id.
def update_education(education_list, profile_id): saved_education_ids = set() for education in education_list: education_id = education.get("id") if education_id is not None: try: education_instance = Education.objects.get(profile_id=profile_id, id=education_id) except Education.DoesNotExist: raise ValidationError("Education {} does not exist".format(education_id)) else: education_instance = None education_serializer = EducationSerializer(instance=education_instance, data=education) education_serializer.is_valid(raise_exception=True) education_serializer.save(profile_id=profile_id) saved_education_ids.add(education_serializer.instance.id) Education.objects.filter(profile_id=profile_id).exclude(id__in=saved_education_ids).delete()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_profile(profile_id):\n \n profile = mongo.db.profiles\n profile.find_one_and_update({'_id': ObjectId(profile_id)},\n {'$set': {'date': datetime.utcnow(),\n 'headline': request.form.get('headline'),\n 'bio': request.form.get('bio'),\n 'xp': request.form.get('xp'),\n 'interests': request.form.get('interests'),\n 'stack': request.form.get('stack'),\n 'languages': request.form.get('languages'),\n 'frameworks': request.form.get('frameworks'),\n 'github': request.form.get('github'),\n 'linkedin': request.form.get('linkedin')\n }\n }\n )\n return redirect(url_for('dashboard'))", "def put(self, id):\n adm = Administration()\n print(api.payload)\n lp = LearnProfile.from_dict(api.payload)\n if lp is not None:\n lp.set_id(id)\n adm.save_learnprofile(lp)\n return lp, 200\n\n else:\n return '', 500", "def update_profile(orcid_id, data=None):\n \n u = db.session.query(User).filter_by(orcid_id=orcid_id).first()\n if u:\n u.updated = datetime.utcnow()\n if data:\n u.profile = data\n # save the user\n db.session.begin_nested()\n try:\n db.session.add(u)\n db.session.commit()\n except exc.IntegrityError as e:\n db.session.rollback()\n # per PEP-0249 a transaction is always in progress \n db.session.commit()", "def update_profile(id):\n tags = request.form.get('tags')\n user = User.query.get(id)\n speaker = Speaker.query.filter_by(id_assigned_user=user.id).first()\n\n speaker.tags = tags\n try:\n db.session.commit()\n except:\n abort(500)\n\n return redirect(url_for('get_profile', id=user.id))", "def edit_profile(profile_id):\n # This check is in place to avoid users trying to edit a profile via the dashboard\n # when they have not created one. If not the option is not displayed\n user = mongo.db.user.find_one({'username': session['username']})\n chck = mongo.db.profiles.find_one_or_404({'user_id': user['_id']})\n if chck: \n profile = mongo.db.profiles.find_one(\n {'_id': ObjectId(profile_id)})\n \n form=ProfileForm()\n form.headline.data = profile['headline']\n form.bio.data = profile['bio']\n form.xp.data = profile['xp']\n form.interests.data = profile['interests']\n form.stack.data = profile['stack']\n form.languages.data = profile['languages']\n form.frameworks.data = profile['frameworks']\n form.github.data = profile['github']\n form.linkedin.data = profile['linkedin']\n \n return render_template('pages/editprofile.html', form=form, profile=profile, legend='Edit your Profile')", "def update_policy_profile(self, profile, body=None):\r\n return self.put(self.policy_profile_path % (profile), body=body)", "def put(self, id ):\n adm = Administration()\n print(api.payload)\n p = Profile.from_dict(api.payload)\n if p is not None:\n p.set_id(id)\n adm.save_profile(p)\n return p, 200\n else:\n return '', 500", "def update(self,\n ike_profile_id,\n ip_sec_vpn_ike_profile,\n ):\n return self._invoke('update',\n {\n 'ike_profile_id': ike_profile_id,\n 'ip_sec_vpn_ike_profile': ip_sec_vpn_ike_profile,\n })", "def update(self,\n dpd_profile_id,\n ip_sec_vpn_dpd_profile,\n ):\n return self._invoke('update',\n {\n 'dpd_profile_id': dpd_profile_id,\n 'ip_sec_vpn_dpd_profile': ip_sec_vpn_dpd_profile,\n })", "def education(self, education):\n\n self.logger.debug(\"In 'education' setter.\")\n\n self._education = education", "def update_course_enrollment(self, student_id, course_id, course_section_id, term):\n conn = sqlite3.connect(self.db_path)\n cursor = conn.cursor()\n try:\n with conn:\n cursor.execute(\n \"\"\"\n UPDATE course_enrollments\n SET course_id = ?, course_section_id = ?\n WHERE student_id = ?\n (?,?,?)\"\"\",\n (course_id, course_section_id, student_id),\n )\n return 1\n except sqlite3.IntegrityError:\n return -1", "def update(self, profile: Dict[datetime.time, float]) -> None:\n\n if self._profile is None:\n self._profile = profile\n else:\n self._profile.update(profile)", "def edit_profile(request):\n profile_to_edit = get_object_or_404(UserProfile, user=request.user)\n if request.method == \"POST\":\n form = UserProfileForm(request.POST, instance=profile_to_edit)\n if form.is_valid:\n form.save()\n messages.success(request, \"Profile updated succesfully\")\n return redirect('profile')\n else:\n messages.error(request, \"Updated failed. \\\n Please ensure the form is valid\")\n else:\n profile_form = UserProfileForm(instance=profile_to_edit)\n template = 'profiles/edit_profile.html'\n context = {\n 'form': profile_form,\n }\n return render(request, template, context)", "def put(self, request, flavor_profile_id):\n update_flavor_profile(request)", "def update(self,\n tunnel_profile_id,\n ip_sec_vpn_tunnel_profile,\n ):\n return self._invoke('update',\n {\n 'tunnel_profile_id': tunnel_profile_id,\n 'ip_sec_vpn_tunnel_profile': ip_sec_vpn_tunnel_profile,\n })", "def putProfile(profileType,value):\n # PUT /profile/$profileType\n pass", "def edit_profile(request, pk=None):\n profiledetails = UserProfile.objects.filter(user=request.user).first()\n if UserProfile.objects.filter(user=request.user or request.user.is_superuser):\n\n if request.method == \"POST\":\n profile_details_form = UserProfileForm(request.POST, request.FILES, instance=profiledetails)\n if profile_details_form.is_valid():\n profiledetails = profile_details_form.save()\n messages.success(request, 'Your profile has been updated!')\n return redirect(user_profile)\n else:\n profile_details_form = UserProfileForm(instance=profiledetails)\n else:\n return HttpResponseForbidden()\n \n return render(request, 'newprofiledetails.html', {'profile_details_form': profile_details_form})", "def test_update_profile(self):\n self.cim.update_profile(\n customer_id=u\"222\",\n description=u\"Foo bar baz quz\",\n email=u\"dialtone@gmail.com\",\n customer_profile_id=u\"122\"\n )", "def update_profile(request, id):\n username = request.POST.get(\"username\")\n first_name, last_name = request.POST.get(\"fullname\").split()\n email = request.POST.get(\"email\")\n phone = request.POST.get(\"phone\")\n address = request.POST.get(\"address\")\n\n userObj = User.objects.get(id=id)\n userObj.first_name = first_name\n userObj.last_name= last_name\n userObj.username= username\n userObj.email = email\n userObj.phone = phone\n userObj.address = address\n userObj.save()\n messages.success(request, \"Your profile is successfully update.\", fail_silently=False)\n return redirect(\"user_profile\", id)", "def edit_profile(request):\r\n\r\n user = request.user\r\n profile = Profile.objects.for_user(user)\r\n\r\n if request.method != 'POST':\r\n profile_form = ProfileForm(instance=profile)\r\n user_form = UserForm(instance=user)\r\n else:\r\n profile_form = ProfileForm(request.POST, instance=profile)\r\n user_form = UserForm(request.POST, instance=user)\r\n\r\n if profile_form.is_valid() and user_form.is_valid():\r\n profile_form.save()\r\n user_form.save()\r\n\r\n return HttpResponseRedirect(reverse('epic.core.views.view_profile', kwargs={}))\r\n\r\n return render_to_response(\r\n 'core/edit_profile.html',\r\n {'profile_form': profile_form, 'user_form': user_form,},\r\n context_instance=RequestContext(request))", "def update_user(context, params):\n\n user = User.objects.filter(id=params.get('id')).first()\n if not user:\n raise ValueError(\"user not found\")\n user.language = Language.objects.filter(id=params.get('language_id', None)).first()\n user.deputy = User.objects.filter(id=params.get('deputy_id', None)).first()\n # user.edited_by = context.user\n\n user.save()\n\n update_person(context, user, params)\n\n user.save()\n return user", "def edit_student(request, student_id):\n\n\temp = models.Teacher.objects.get(user=request.user)\n\tif not emp.student_permit:\n\t\traise Http404\n\tstudent = models.Student.objects.filter(\n\t\tpk=student_id, soft_delete=False\n\t).first()\n\tif not student:\n\t\traise Http404\n\tcontext_dict = {\n\t\t\"all_courses\": context_helper.course_helper(),\n\t\t\"blood_groups\": context_helper.blood_group_helper(),\n\t\t\"guardian_types\": context_helper.guardian_type_helper(),\n\t\t\"gender_types\": context_helper.gender_helper(),\n\t\t'student_id': student_id\n\t}\n\tif request.method == 'POST':\n\t\tupdate_fields = []\n\t\tactivity = ''\n\t\tsname = request.POST.get('sname')\n\t\troll = request.POST.get('rno')\n\t\tdob = request.POST.get('dob')\n\t\tgender = request.POST.get('gender_picker')\n\t\tbgroup = request.POST.get('blood_group_picker')\n\t\tif bgroup == 'Choose option':\n\t\t\tbgroup = None\n\t\tphone = request.POST.get('phone')\n\t\tcurradd = request.POST.get('curradd')\n\t\tpermadd = request.POST.get('permadd')\n\t\tgname = request.POST.get('gname')\n\t\tcourse = request.POST.get('course_picker')\n\t\tbatch = request.POST.get('batch')\n\t\tgtype = request.POST.get('guardian_type_picker')\n\t\tgphone = request.POST.get('gphone')\n\t\temail = request.POST.get('email')\n\t\taddress_flag = request.POST.get('address_flag')\n\t\tprint (address_flag)\n\t\taddress_flag = True if address_flag == 'on' else False\n\t\tif address_flag == True:\n\t\t\tpermadd = curradd\n\t\ttry:\n\t\t\tif \"profile-img\" in request.FILES:\n\t\t\t\tstudent.photo = request.FILES[\"profile-img\"]\n\t\t\t\tupdate_fields.append('photo')\n\t\t\t\tactivity += 'Changed photo.\\n'\n\t\t\tif student.name != sname:\n\t\t\t\tstudent.name = sname\n\t\t\t\tupdate_fields.append('name')\n\t\t\t\tactivity += 'Changed name to '+ str(sname) +'.\\n'\n\t\t\tif student.roll_no != roll:\n\t\t\t\tstudent.roll_no = roll\n\t\t\t\tupdate_fields.append('roll_no')\n\t\t\t\tactivity += 'Changed roll number to '+ str(roll) +'.\\n'\n\t\t\tif str(student.dob) != str(dob):\n\t\t\t\tstudent.dob = dob\n\t\t\t\tupdate_fields.append('dob')\n\t\t\t\tactivity += 'Changed DOB to ' + str(dob) + '.\\n'\n\t\t\tif student.gender != gender:\n\t\t\t\tstudent.gender = gender\n\t\t\t\tupdate_fields.append('gender')\n\t\t\t\tactivity += 'Changed gender to ' + str(gender) + '.\\n'\n\t\t\tif student.blood_group != bgroup:\n\t\t\t\tstudent.blood_group = bgroup\n\t\t\t\tupdate_fields.append('blood_group')\n\t\t\t\tactivity += 'Changed blood group to ' + str(bgroup) + '.\\n'\n\t\t\tif student.phone != phone:\n\t\t\t\tstudent.phone = phone\n\t\t\t\tupdate_fields.append('phone')\n\t\t\t\tactivity += 'Changed phone number to ' + str(phone) + '.\\n'\n\t\t\tif student.curr_address != curradd:\n\t\t\t\tstudent.curr_address = curradd\n\t\t\t\tupdate_fields.append('curr_address')\n\t\t\t\tactivity += 'Changed current address to ' + str(curradd) + '.\\n'\n\t\t\tif student.perm_address != permadd:\n\t\t\t\tstudent.perm_address = permadd\n\t\t\t\tupdate_fields.append('perm_address')\n\t\t\t\tactivity += 'Changed permanent address to ' + str(permadd) + '.\\n'\n\t\t\tif student.curr_address != curradd:\n\t\t\t\tstudent.curr_address = curradd\n\t\t\t\tupdate_fields.append('curr_address')\n\t\t\t\tactivity += 'Changed current address to ' + str(curradd) + '.\\n'\n\t\t\tif student.guardian_name != gname:\n\t\t\t\tstudent.guardian_name = gname\n\t\t\t\tupdate_fields.append('guardian_name')\n\t\t\t\tactivity += 'Changed current address to ' + str(gname) + '.\\n'\n\t\t\tif student.guardian_phone != gphone:\n\t\t\t\tstudent.guardian_phone = gphone\n\t\t\t\tupdate_fields.append('guardian_phone')\n\t\t\t\tactivity += 'Changed guardian phone to ' + str(gphone) + '.\\n'\n\t\t\tif student.guardian_type != gtype:\n\t\t\t\tstudent.guardian_type = gtype\n\t\t\t\tupdate_fields.append('guardian_type')\n\t\t\t\tactivity += 'Changed current address to ' + str(gtype) + '.\\n'\n\t\t\tif str(student.course.pk) != str(course):\n\t\t\t\tstudent.course = models.Course.objects.get(pk=course)\n\t\t\t\tupdate_fields.append('course')\n\t\t\t\tactivity += 'Changed course to ' + str(course) + '.\\n'\n\t\t\tif student.batch != batch:\n\t\t\t\tstudent.batch = batch\n\t\t\t\tupdate_fields.append('batch')\n\t\t\t\tactivity += 'Changed batch to' + str(batch) + '.\\n'\n\t\t\tif student.email != email:\n\t\t\t\tstudent.email = email\n\t\t\t\tupdate_fields.append('email')\n\t\t\t\tactivity += 'Changed email to ' + str(email) + '.\\n'\n\t\t\tif student.address_flag != address_flag:\n\t\t\t\tstudent.address_flag = address_flag\n\t\t\t\tupdate_fields.append('address_flag')\n\t\t\t\tactivity += 'Changed address flag.'\n\t\t\tstudent.save(update_fields=update_fields)\n\t\t\thistory = models.History(\n\t\t\t\tuser=emp,\n\t\t\t\tactivity=activity,\n\t\t\t\tactivity_type=\"edit student\"\n\t\t\t)\n\t\t\thistory.save()\n\t\t\tcontext_dict[\"message\"] = 'Successfully updated student.'\n\t\t\tcontext_dict[\"success\"] = True\n\t\texcept Exception as e:\n\t\t\tcontext_dict[\"message\"] = str(e)\n\t\t\tcontext_dict[\"success\"] = False\n\t\t\tprint(e)\n\tcontext_dict.update(context_helper.get_student_info(student))\n\tif type(context_dict['dob']) == str:\n\t\tcontext_dict['dob'] = datetime.strptime(context_dict['dob'], '%Y-%m-%d')\n\tfor i in context_dict['course']:\n\t\ttry: del context_dict['all_courses'][i]\n\t\texcept: pass\n\tfor i in context_dict['blood_group']:\n\t\ttry: context_dict['blood_groups'].remove(i)\n\t\texcept: pass\n\tfor i in context_dict['guardian_type']:\n\t\ttry: context_dict['guardian_types'].remove(i)\n\t\texcept: pass\n\tfor i in context_dict['gender_type']:\n\t\ttry: context_dict['gender_types'].remove(i)\n\t\texcept: pass\n\tif context_dict.get('success', False):\n\t\treturn HttpResponseRedirect('/view-students')\n\treturn render(\n\t\trequest, \"editStudent.html\", context_dict\n\t)", "def edit_profile(request):\n if request.method == 'POST':\n form = EditProfileForm(request.POST, instance=request.user)\n\n if form.is_valid():\n form.save()\n messages.success(request, 'Profile updated successfully.')\n return redirect('profile')\n\n else:\n messages.error(request, 'Invalid entry, please try again.')\n return redirect('edit_profile')\n else:\n form = EditProfileForm(instance=request.user)\n return render(request, 'accounts/edit_profile.html', {'form': form})", "def put(self, id=None):\n\n if not id:\n return {'msg':'Missing achievement id.'}, 400\n\n if not all(\n [request.form.get('roll_no'),\n request.form.get('name'),\n request.form.get('batch'),\n request.form.get('programme'),\n request.form.get('category'),]):\n \n return {'msg':'Field(s) missing.'}, 400\n\n try:\n ach = AcademicAchievement.query.get(id)\n\n if not ach:\n return {'msg':'Academic achievement not found'}, 404\n\n ach.roll_no = request.form.get('roll_no'),\n ach.name = request.form.get('name'),\n ach.batch = checkBatch(request.form.get('batch')),\n ach.programme = request.form.get('programme'),\n ach.category = request.form.get('category'),\n\n ach.save()\n data = ach.toDict()\n\n return {'data' : data}, 200\n\n except (ValueError, mongoalchemy.exceptions.BadValueException) as e:\n print(e)\n return {'msg':'Invalid form data.'}, 400\n\n except Exception as e:\n print(e)\n return {'msg':'Could not modify academic achievement.'}, 500", "def edit_profile(request):\n profile = request.user.profile\n form = forms.ProfileForm(instance=profile)\n\n if request.method == 'POST':\n if settings.SYSTEM_MAINTENANCE_NO_UPLOAD:\n # Allow submitting the form, but do not allow the photo to\n # be modified.\n if 'delete_photo' in request.POST or request.FILES:\n raise ServiceUnavailable()\n\n if 'edit_profile' in request.POST:\n # Update the profile and return to the same page. Place a message\n # at the top of the page: 'your profile has been updated'\n form = forms.ProfileForm(data=request.POST, files=request.FILES,\n instance=profile)\n if form.is_valid():\n form.save()\n messages.success(request, 'Your profile has been updated.')\n elif 'delete_photo' in request.POST:\n profile.delete_photo()\n messages.success(request, 'Your profile photo has been deleted.')\n\n if not form.errors:\n form = forms.ProfileForm(instance=profile)\n\n return render(request, 'user/edit_profile.html', {'form':form})", "def update_profile():\n logger.debug(\"entering function update_profile\")\n response = update_user_profile(request.json)\n logger.debug(\"exiting function update_profile\")\n return jsonify(response)", "def test_update_payment_profile(self):\n self.cim.update_payment_profile(\n customer_profile_id=u\"122\",\n customer_payment_profile_id=u\"444\",\n card_number=u\"422222222222\",\n expiration_date=u\"2009-10\"\n )", "def put(self, expense_id):\n return UpdateExpense(current_user.id, expense_id, request)", "def update_profile(self, method=\"POST\", id=1, fullname=\"John Doe\",\r\n name=\"johndoe\", locale=\"es\",\r\n email_addr=\"johndoe@example.com\",\r\n new_name=None,\r\n btn='Profile'):\r\n url = \"/account/%s/update\" % name\r\n if new_name:\r\n name = new_name\r\n if (method == \"POST\"):\r\n return self.app.post(url,\r\n data={'id': id,\r\n 'fullname': fullname,\r\n 'name': name,\r\n 'locale': locale,\r\n 'email_addr': email_addr,\r\n 'btn': btn},\r\n follow_redirects=True)\r\n else:\r\n return self.app.get(url,\r\n follow_redirects=True)", "def update_user_profile(email, **kwargs): # PUT\n user = coll(\"users\").find_one({\"_id\": email})\n if not user:\n return {\"message\": \"User not found\"}, 404\n\n coll(\"users\").update_one({\"_id\": email}, {\"$set\": kwargs})\n\n return {\"message\": \"User profile successfully updated\"}, 200", "def editProfile():\n form = EditProfileForm(request.form)\n if request.method == \"GET\":\n return render_template(\"/pages/editprofile.html\", form=form)\n else:\n choose = True\n section = form.category.data\n return redirect(url_for(\"editProfileSection\", section=section))", "def user_update_profile():\n \n if 'userid' and 'email' not in request.forms:\n return {'status':'Failure','message':'User Id is missing,please try with correct data.'}\n \n data = user_obj.user_update_profile(request.forms)\n return data", "def edit_profile(request):\n form = ProfileForm(instance=request.user.profile)\n if request.method == \"POST\":\n form = ProfileForm(data=request.POST, files=request.FILES,\n instance=request.user.profile)\n if form.is_valid():\n form.save()\n return redirect('profile')\n return render(request, 'accounts/forms.html', {'form': form})", "def update_user(id):\n with app.app_context():\n user = User.query.get(id)\n if user is None:\n return \"User not found\", 404\n skills = validate_skills(request.get_json().get(\"skills\"))\n if not skills:\n return \"Invalid skills\", 400\n\n for skill in skills:\n skill_db = Skill.query.filter_by(name=skill).first()\n if skill_db is None:\n skill_db = Skill(name=skill)\n db.session.add(skill_db)\n \n user.skills = [\n skill for skill in Skill.query.filter(Skill.name.in_(skills)).all()\n ]\n \n users_response = UsersResponse(\n users=[\n {\n \"id\": user.id,\n \"name\": user.name,\n \"skills\": [skill.name for skill in user.skills]\n }\n ]\n )\n db.session.commit()\n return users_response.json(), 200", "def update_network_profile(self, profile, body=None):\r\n return self.put(self.network_profile_path % (profile), body=body)", "def update_account_with(self, id_, **kwargs):\n self.update_user_with(id_, **kwargs)\n self.update_profile_with(id_, **kwargs)\n # TODO:\n # self.update_prefecture_with(id_, kwargs)", "def update_user_profile_info(user_id, user_fname, user_lname, email):\n \n user=User.query.filter(User.user_id == user_id).first()\n\n if email != None:\n user.update_email(email)\n if user_fname != None:\n user.update_first_name(user_fname)\n if user_lname != None:\n user.update_last_name\n \n db.session.commit()", "def edit_user_profile(user_id):\n if CURRENT_USER_KEY not in session or session[CURRENT_USER_KEY] != user_id:\n raise Unauthorized()\n\n user = User.query.get_or_404(user_id)\n\n form = UserEditForm(obj=user)\n\n if form.validate_on_submit():\n try:\n user.email = form.email.data\n user.username = form.username.data\n user.first_name = form.first_name.data.capitalize()\n user.last_name = form.last_name.data.capitalize()\n user.image_url = form.image_url.data or User.image_url.default.arg\n user.cover_url = form.cover_url.data or User.cover_url.default.arg\n user.bio = form.bio.data\n\n db.session.commit()\n except IntegrityError:\n db.session.rollback()\n flash(\n \"Email or Username already taken!! Please try again\", 'danger')\n return render_template('edit_profile.html', form=form, user=user, img_src=user.image_url)\n\n flash('Profile Successfully Updated!', 'success')\n return redirect(url_for('show_user_profile', user_id=user.id))\n return render_template('edit_profile.html', form=form, user=user, img_src=user.image_url)", "def profile_edit():\n form = ProfileForm(obj=current_user)\n\n if form.validate_on_submit():\n form.populate_obj(current_user)\n\n try:\n correct = True\n db.session.commit()\n\n flash(_('Profile updated correctly'), 'success')\n\n return render_template('admin/profile/edit.html', form=form)\n\n except IntegrityError:\n # Email already exists\n correct = False\n form.errors.email.append(_('Email is already registered'))\n\n return render_template('admin/profile/edit.html', form=form)\n\n except Exception:\n # Catch anything unknown\n correct = False\n\n flash(_('Failed to update profile, contact an administrator'), 'error')\n\n return render_template('admin/profile/edit.html', form=form)\n\n finally:\n if not correct:\n db.session.rollback()\n\n return render_template('admin/profile/edit.html', form=form)", "def test_editProfile(self):\n\t\turl = \"/profiles/1/\"\n\t\tdata = { \"name\" : \"testName\", \"surname\" : \"testSurname\" }\n\t\tresponse = self.client.patch(url, data, format='json')\n\t\tself.assertEqual(response.status_code, status.HTTP_200_OK)\n\t\tself.assertEqual(response.data[\"name\"], \"testName\")\n\t\tself.assertEqual(response.data[\"surname\"], \"testSurname\")", "def update_user_profile(request):\n if request.method == 'POST':\n form = UserProfileForm(request.POST)\n\n if form.is_valid():\n user = get_object_or_404(User, pk=request.user.pk)\n user.first_name = request.POST['first_name']\n user.last_name = request.POST['last_name']\n user.profile_picture = request.POST['profile_picture']\n user.save()\n messages.success(request, 'Your profile has been updated!')\n else:\n messages.error(\n request, 'Unable to update your profile. Please try again later.')\n\n return HttpResponseRedirect(request.META.get('HTTP_REFERER', reverse('dev_panel')))", "def update(\n self,\n email,\n company_name,\n location,\n job_profile,\n salary,\n username,\n password,\n security_question,\n security_answer,\n notes,\n date_applied,\n status,\n):", "def update_enrollment(context: dict) -> dict:\n enrollment = (\n session.query(Enrollment)\n .filter_by(\n subject_code=context[\"subject_code\"],\n student_ra=context[\"student_ra\"],\n year=context[\"year\"],\n semester=context[\"semester\"],\n )\n .first()\n )\n\n if enrollment:\n for attr in context.keys():\n setattr(enrollment, attr, context[attr])\n\n enrollment.save()\n\n return enrollment.asdict()", "def edit_profile(self, name, username, email):\n return self.app.post('/_editProfile', data = dict(\n name = name,\n username = username,\n email = email\n ), follow_redirects = True)", "def put(self, request, pk):\n data = request.data\n data.pop('skills')\n Department_name = data.pop('department')\n department = Department.objects.get(name=Department_name)\n manager_name = data.pop('manager')\n manager = Manager.objects.get(name=manager_name)\n EmployeeDetail.objects.filter(pk=pk).update(department=department, manager=manager, **data)\n return Response(\n data=\"request.data\"\n )", "def edit_training(training_id):\n\n if not g.user:\n flash(\"Please login to access\", \"danger\")\n return redirect(\"/\")\n \n if g.user.is_admin == False:\n flash (\"Unauthorized\", \"danger\")\n return redirect(\"/login\")\n\n training = Training.query.get_or_404(training_id)\n form = Training_Form(obj = training)\n\n if form.validate_on_submit():\n training.name = form.name.data,\n training.city = form.city.data,\n training.state = form.state.data,\n training.room = form.room.data,\n training.hours = form.hours.data\n\n db.session.commit()\n flash(f\"{training.name} has been updated\")\n return redirect(\"/administrator\")\n\n else:\n return render_template(\"/admin/edit_training.html\", form = form, training = training)", "def update_employee(employee):\n employee_id = get_employee_input_int(\"Enter the employee id you want to update\")\n newGrade = get_employee_input_int(\"Enter the new grade for \")\n db.update_employee(employee_id, newGrade)\n print(employee.full_name + \"'s grade value has been updated to :-> \", newGrade)", "def put(self, request):\n profile = Profile.get_by_id(request.user.id)\n if not profile:\n return HttpResponse(status=403)\n user = CustomUser.objects.get(id=request.user.id)\n update_data = json.loads(request.body.decode('utf-8'))\n user.update(first_name=update_data.get('first_name'),\n last_name=update_data.get('last_name'))\n profile.update(\n birthday=update_data.get('birthday'),\n gender=update_data.get('gender'),\n hobbies=update_data.get('hobbies'),\n facebook=update_data.get('facebook'))\n data = profile.to_dict()\n return JsonResponse(data, status=200)", "def test_update_profile_attribute(self):\n user = self.users[0]\n user.profile.bio = 'bio'\n user.profile.save()\n query = User.objects.first()\n self.assertTrue(query.profile.bio == 'bio')", "def test_update_profile(self):\n url = self.url\n url = url + '{}/'.format(\n self.profile.pk\n )\n response = self.client.patch(url)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n\n update_data = {\n 'first_name': 'UpdateTest'\n }\n\n response = self.client.patch(url, update_data, format='json')\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n\n # Caso 1\n update_data['tasks_finalize'] = '14124123'\n update_data['tasks_pending'] = '124123132'\n update_data['tasks_created'] = '12412323'\n\n response = self.client.patch(url, update_data, format='json')\n self.assertEqual(response.data['tasks_finalize'], 0)\n self.assertEqual(response.data['tasks_pending'], 0)\n self.assertEqual(response.data['tasks_created'], 0)", "def edit_profile(request, userid):\n woofer_user = User.objects.get(id=userid)\n current_profile = Profile.objects.get(user=woofer_user)\n if woofer_user.id != request.user.id:\n return HttpResponseRedirect(reverse('view-profile', args=[userid]))\n\n if request.method == 'POST':\n form = ProfileForm(request.POST)\n if form.is_valid():\n new_profile = form.save(commit=False)\n # copy the ID of the User's current profile to the new profile so\n # Django performs an update when we call .save()\n new_profile.id = current_profile.id\n new_profile.user = woofer_user\n new_profile.save()\n return HttpResponseRedirect(reverse('view-profile', args=[userid]))\n else:\n form = ProfileForm(instance=current_profile)\n\n return render(request, 'woofer/show_form.html', {\n 'form' : form,\n 'message' : None,\n 'form_action' : reverse('edit-profile', args=[userid]),\n 'title' : \"Edit Profile\"\n })", "def update(self,\n ipfix_l2_profile_id,\n i_pfix_l2_profile,\n ):\n return self._invoke('update',\n {\n 'ipfix_l2_profile_id': ipfix_l2_profile_id,\n 'i_pfix_l2_profile': i_pfix_l2_profile,\n })", "def update(self,\n ipfix_dfw_profile_id,\n i_pfixdfw_profile,\n ):\n return self._invoke('update',\n {\n 'ipfix_dfw_profile_id': ipfix_dfw_profile_id,\n 'i_pfixdfw_profile': i_pfixdfw_profile,\n })", "def edit_attendance(request, attendance_id):\n\n\temp = models.Teacher.objects.get(user=request.user)\n\tif not emp.student_permit:\n\t\traise Http404\n\tattendance = models.Attendance.objects.filter(\n\t\tpk=attendance_id, soft_delete=False\n\t).first()\n\tprint(\"1\")\n\tcontext_dict = {\n\t\t\"all_subjects\": context_helper.subject_helper(),\n\t\t'attendance_id': attendance_id,\n\t}\n\tif request.method == 'POST':\n\t\tupdate_fields = []\n\t\tactivity = ''\n\t\troll = request.POST.get('roll')\n\t\tsubject = request.POST.get('subject_picker')\n\t\tobtained = request.POST.get('attendance')\n\t\ttotal = request.POST.get('total')\n\t\tstudent = models.Student.objects.filter(\n\t\t\troll_no=roll\n\t\t).first()\n\t\tif not student:\n\t\t\tcontext_dict[\"message\"] = 'Student at does not exist / Roll number has not been alloted.'\n\t\t\treturn render(request, \"editAttendance.html\", context_dict)\n\t\ttry:\n\t\t\tif attendance.student != student:\n\t\t\t\tattendance.student = student\n\t\t\t\tupdate_fields.append('student')\n\t\t\t\tactivity += 'Changed student to ' + str(student) + '.\\n'\n\t\t\tif attendance.total_attendance != total:\n\t\t\t\tattendance.total_attendance = total\n\t\t\t\tupdate_fields.append('total_attendance')\n\t\t\t\tactivity += 'Changed total attendance to ' + str(total) + '.\\n'\n\t\t\tif attendance.obtained_attendance != obtained:\n\t\t\t\tattendance.obtained_attendance = obtained\n\t\t\t\tupdate_fields.append('obtained_attendance')\n\t\t\t\tactivity += 'Changed obtained attendance to' + str(obtained) + '.\\n'\n\t\t\tif str(attendance.subject.pk) != str(subject):\n\t\t\t\tattendance.subject = models.Subject.objects.get(pk=subject)\n\t\t\t\tupdate_fields.append('subject')\n\t\t\t\tactivity += 'Changed subject to ' + str(subject) + '.\\n'\n\t\t\tattendance.save(update_fields=update_fields)\n\t\t\thistory = models.History(\n\t\t\t\tuser=emp,\n\t\t\t\tactivity=activity,\n\t\t\t\tactivity_type=\"edit attendance\"\n\t\t\t)\n\t\t\thistory.save()\n\t\t\tcontext_dict[\"message\"] = 'Successfully updated Attendance.'\n\t\t\tcontext_dict[\"success\"] = True\n\t\texcept Exception as e:\n\t\t\tcontext_dict[\"message\"] = str(e)\n\t\t\tcontext_dict[\"success\"] = False\n\t\t\tprint(e)\n\tcontext_dict.update(context_helper.get_attendance_info(attendance))\n\tfor i in context_dict['subjects']:\n\t\t# use for dynamic\n\t\ttry: del context_dict['all_subjects'][i]\n\t\texcept: pass\n\tif context_dict.get('success', False):\n\t\treturn HttpResponseRedirect('/view-attendance')\n\treturn render(\n\t\trequest, \"editAttendance.html\", context_dict\n\t)", "def edit_profile(request, userId):\n try:\n try:\n user = User.objects.get(pk=userId)\n profile = Profile.objects.get(user_id=userId)\n user.first_name = request.data['first_name'] if 'first_name' in request.data else user.first_name\n user.last_name = request.data['last_name'] if 'last_name' in request.data else user.last_name\n user.email = request.data['email'] if 'email' in request.data else user.email\n profile.phone = request.data['phone'] if 'phone' in request.data else profile.phone\n profile.profile_img_path = request.data['profile_img_path'] if 'profile_img_path' in request.data else profile.profile_img_path\n profile.information = request.data['information'] if 'information' in request.data else profile.information\n user.save()\n profile.save()\n return JsonResponse({\n \"statusCode\": 200,\n \"statusText\": \"Success\",\n \"message\": \"Save profile success\",\n \"error\": False\n })\n\n except ObjectDoesNotExist:\n return JsonResponse({\n \"statusCode\": 404,\n \"statusText\": \"Not Found\",\n \"message\": \"User Not Exist\",\n \"error\": True\n })\n except:\n return JsonResponse({\n \"statusCode\": 500,\n \"statusText\": \"Internal Server\",\n \"message\": \"Internal Server\",\n \"error\": True\n })", "def ipsecprofiles(self, ipsecprofile_id, data, tenant_id=None, api_version=\"v2.1\"):\n\n if tenant_id is None and self._parent_class.tenant_id:\n # Pull tenant_id from parent namespace cache.\n tenant_id = self._parent_class.tenant_id\n elif not tenant_id:\n # No value for tenant_id.\n raise TypeError(\"tenant_id is required but not set or cached.\")\n cur_ctlr = self._parent_class.controller\n\n url = str(cur_ctlr) + \"/{}/api/tenants/{}/ipsecprofiles/{}\".format(api_version,\n tenant_id,\n ipsecprofile_id)\n\n api_logger.debug(\"URL = %s\", url)\n return self._parent_class.rest_call(url, \"put\", data=data)", "def update_user_profile(id):\n token = request.json['token']\n u = user.User.query.filter(user.User.token == token).first()\n if u is None:\n abort(404)\n if u.id != id:\n print \"user id is wrong.\" #TODO: Support log system\n abort(500)\n u.name = request.json['name']\n u.nickname = request.json['nickname']\n u.company = request.json['nickname']\n with store_context(fs_store):\n with open(files.path(request.json['header'])) as f:\n u.header_icon.from_file(f)\n db.session.merge(u)\n db.session.commit()\n db.session.merge(u)\n db.session.commit()\n return jsonify(u.to_dict())", "def test_onchange_user_profile(self):\n admin = self.env.ref('base.user_root').id\n with self.assertRaises(ValidationError):\n self.env['res.users'].browse(\n self.user.id).write({'user_profile_id': admin})", "async def test_update(self):\n rsps = respx.put(f'{PROVISIONING_API_URL}/users/current/provisioning-profiles/id') \\\n .mock(return_value=Response(200))\n await provisioning_client.update_provisioning_profile('id', {'name': 'new name'})\n assert rsps.calls[0].request.url == \\\n f'{PROVISIONING_API_URL}/users/current/provisioning-profiles/id'\n assert rsps.calls[0].request.headers['auth-token'] == 'header.payload.sign'\n assert rsps.calls[0].request.content == json.dumps({'name': 'new name'}).encode('utf-8')", "def update(self, id, data):\n\t\tassert isinstance(id, str), 'The ID must be a string'\n\t\tassert id, 'The ID must not be an empty string'\n\t\tassert isinstance(data, dict), 'The data type must be a dictionary'\n\t\tassert data, 'Case data must not be an empty dictionary'\n\n\t\turl = f'{self.root.url}/api/v1.2/cases/{id}'\n\t\tdata = json.dumps(data)\n\t\treturn self.root.r('PUT', url, data, headers=None, verify=self.root.verify)", "def put(self, id):\n adm = Administration()\n lg = LearnGroup.from_dict(api.payload)\n if lg is not None:\n\n lg.set_id(id)\n adm.save_learngroup(lg)\n return lg, 200\n\n else:\n return '', 500", "def enroll_profile(region, subscription_key, wav_path):\n fs, audio_data = _check_and_load_wav_file_length(wav_path)\n profile_id = _add_profile(region, subscription_key)\n\n url = \"%s/speaker/identification/v2.0/text-independent/profiles/%s/enrollments\" % (\n _get_azure_endpoint(region), profile_id)\n headers = {\n \"Ocp-apim-subscription-key\": subscription_key,\n \"Content-Type\": \"audio/wav; codecs=audio/pcm; samplerate=%s\" % fs,\n }\n session = requests.Session()\n resp = session.post(url, headers=headers, data=audio_data)\n print(\"Enrollment response status code: %s\\n\" % resp.status_code)\n print(json.dumps(json.loads(resp.content), indent=2))", "def save_profile(self):\n self.save()", "def updateEMPStudyData(self, study_id, study_score, web_app_user_id):\n con = self.getMetadataDatabaseConnection()\n con.cursor().callproc('qiime_assets.update_emp_study_data', [study_id, study_score, web_app_user_id])", "def update_user_profile_deep(id):\n token = request.json['token']\n u = user.User.query.filter(user.User.token == token).first()\n if u is None:\n abort(404)\n if u.id != id:\n print \"user id is wrong.\" #TODO: Support log system\n abort(500)\n u.name = request.json['name']\n u.title = request.json['title']\n u.company = request.json['company']\n u.nickname = request.json['company']\n u.phone_number = request.json['phone_number']\n u.email = request.json['email']\n if request.json['password'] != None and request.json['password'] != '':\n u.password = request.json['password']\n db.session.merge(u)\n db.session.commit()\n return jsonify(u.to_dict())", "def put(self, id):\n adm = Administration()\n print(api.payload)\n p = Person.from_dict(api.payload)\n if p is not None:\n p.set_id(id)\n adm.save_person(p)\n return p, 200\n\n else:\n return '', 500", "def update_adminhod_view(request):\r\n # get current adminhod.\r\n adminhod = get_object_or_404(AdminHOD, user__id=request.user.id) \r\n # display adminhod's initial data.\r\n user_form = UpdateUserForm(\r\n request.POST or None,\r\n staff_student=adminhod, \r\n instance=adminhod,\r\n initial={'full_name': adminhod.user.full_name,\r\n 'email': adminhod.user.email, \r\n })\r\n if request.method == 'POST':\r\n if user_form.is_valid():\r\n # update adminhod.\r\n adminhod.user.full_name = user_form.cleaned_data.get(\"full_name\")\r\n adminhod.user.email = user_form.cleaned_data.get(\"email\")\r\n adminhod.user.save()\r\n # Display success message.\r\n messages.success(request, f'Your profile has been updated successfully.', extra_tags='update-adminhod-profile')\r\n return redirect('adminhod:update-adminhod-profile') \r\n context = {'user_form':user_form}\r\n return render(request, 'adminhod/update_adminhod_profile.html', context)", "def edit_user(request, user_id):\n profile = get_object_or_404(UserProfile, user=request.user)\n\n # make sure only managers and admins can add a team\n if profile.level == 'admin' or profile.level == 'manager':\n\n user = get_object_or_404(UserProfile, pk=user_id)\n if request.method == 'POST':\n form = UserProfileForm(request.POST, request.FILES, instance=user)\n if form.is_valid():\n form.save()\n messages.success(request, 'User edited successfully')\n\n users = UserProfile.objects.filter(company_id=profile.company_id)\n template = 'profiles/user_management.html'\n context = {\n 'users': users,\n 'profile': profile\n }\n return render(request, template, context)\n\n else:\n print(\"failed\")\n else:\n form = UserProfileForm(instance=user)\n\n template = 'profiles/profile.html'\n context = {\n 'form': form,\n 'profile': user,\n }\n\n return render(request, template, context)\n\n else:\n messages.info(request, \"Sorry, you are not authorized to edit users. Ask a Manager or Admin.\")\n\n return redirect(reverse('planning', ))", "def update_experience(uid, rid, increment):\n errmsg = []\n\n experience = Experience.query.filter(Experience.uid == uid).filter(Experience.rid == rid).first()\n if not experience:\n errmsg.append(\"Experience entry does not exist for the given user ID and restaurant ID.\")\n elif increment < 0:\n errmsg.append(\"Experience cannot be incremented by a negative number.\")\n\n if not errmsg:\n old_level = convert_experience_to_level(experience.experience)\n milestone = get_milestone(uid, rid)\n Experience.query.filter(Experience.uid == uid).filter(Experience.rid == rid).update(dict(experience=experience.experience + increment))\n db.session.commit()\n if milestone:\n new_level = convert_experience_to_level(experience.experience)\n if old_level < new_level and new_level == int(milestone[\"level\"]):\n update_points(uid, rid, milestone[\"reward\"])\n return None\n\n return errmsg", "def put(self, id):\n return userDao.update(id, api.payload)", "def update_user(id):\n pass", "def update(request):\n\tcourse_id = request.GET.get('course_id')\n\tif request.method == 'POST':\n\t\tcourse_title = request.POST['course_title']\n\t\tinstitute_name = request.POST['institute_name']\n\t\tcourse_desc = request.POST['course_desc']\n\t\tcurrent_data = Course.objects.get(course_id = course_id)\n\t\tcurrent_data.course_title = course_title\n\t\tcurrent_data.institute_name = institute_name\n\t\tcurrent_data.course_desc = course_desc\n\t\tcurrent_data.save()\n\t\treturn HttpResponseRedirect(reverse('courseapp:index'))\n\tdata = Course.objects.get(course_id = course_id)\n\treturn render(request,'update.html',{'data':data})", "def update_profile(username):\n\n description = request.json.get('description')\n token = request.headers.get('token')\n\n if description is None:\n return jsonify({'message': 'New description not provided'}), 404\n\n # Token Validation\n token_valid, response = is_token_valid(token)\n if not token_valid:\n return response\n token_username = response\n\n # Privilege handling\n if token_username != username:\n return jsonify({'message': \"You may not edit others profiles\"}), 404\n\n if username not in Profiles.keys():\n return jsonify({'message': 'User {} not found'.format(username)}), 404\n\n Profiles[username]['description'] = description\n return Profiles[username]", "def enable(self,\n profile_id=None):\n if profile_id is None:\n self._enabled = True\n else:\n self._profiles[profile_id] = True", "def patch(self, request):\n\n user_obj = UserProfile.objects.filter(id=request.user.id).first()\n if not user_obj:\n return existence_error('user')\n\n request_json = {\n 'profile_photo': request.data.get('profile_photo')\n }\n\n user_serialized = EditUserProfileSerializer(user_obj, data=request_json, partial=True)\n if not user_serialized.is_valid():\n return validate_error(user_serialized)\n user_serialized.save()\n\n response_json = {\n 'status': True,\n 'message': 'successful',\n 'data': {}\n }\n\n return Response(response_json, status=201)", "def test_update_risk_profile_using_put(self):\n pass", "def edit_server_profile_for_dl(profile_obj):\n # This keyword is deprecated, please do not use.\n FusionUIBase.navigate_to_section(SectionType.SERVER_PROFILES, time_for_loading=5)\n\n total = len(profile_obj)\n not_exists = 0\n edited = 0\n\n for n, profile in enumerate(profile_obj):\n logger.info(\"{2} No: {0} --- Total: {1} {2}\".format((n + 1), total, '-' * 14))\n\n logger.info(\"editing a server profile with name '%s' ...\" % profile.name)\n if not VerifyServerProfile.verify_server_profile_exist(profile.name, fail_if_false=False):\n logger.warn(\"server profile '%s' does not exist\" % profile.name)\n not_exists += 1\n continue\n # - Prep the auto_power_off switch\n # - By default, this keyword will power off the server if it's powered on -- unless the attribute 'auto_power_off' is explicitly set to 'false'\n auto_power_off = False if getattr(profile, 'auto_power_off', '').lower() == 'false' else True\n # open Edit SP dialog and enter data ...\n CommonOperationServerProfile.click_server_profile(profile.name)\n EditServerProfile.select_action_edit()\n EditServerProfile.wait_edit_server_profile_dialog_shown()\n\n EditServerProfile.input_name(profile.newName)\n EditServerProfile.input_description(profile.desc)\n # Input 'Server hardware'\n # - input server name,\n # - select option from the popped out drop-down list,\n # - verify the server hardware is refreshed to the type name displayed in the drop-down list for selecting server hardware\n if not EditServerProfile.input_select_server_hardware(profile.server, auto_power_off=auto_power_off):\n logger.warn(\"server hardware '%s' is not selected for editing server profile, may be wrong name, or powered on but failed to power it off. \"\n \"test will skip this profile '%s' and continue to edit other server profiles\" % (profile.server, profile.name))\n continue\n msg = EditServerProfile.get_error_message_from_server_hardware()\n if msg is not None:\n logger.warn(\"error occurred, server profile can not be edited successfully\")\n ui_lib.fail_test(msg)\n sht_selected = EditServerProfile.get_selected_server_hardware_type(profile.server)\n if profile.hardwaretype not in sht_selected:\n logger.warn(\"the server hardware type of server '%s' is NOT consistent with test data '%s'\" % (sht_selected, profile.hardwaretype))\n # set boot mode if attribute 'manageBootMode' is true - only for Gen 9 (or later) server:\n FusionUIBase.select_view_by_name('Boot Settings')\n if 'gen9' in sht_selected.lower():\n logger.info(\"setting 'Boot mode' for Gen 9 specially ...\")\n if getattr(profile, 'manageBootMode', '').lower() == 'true':\n CommonOperationServerProfile.BootSettings.tick_manage_boot_mode()\n CommonOperationServerProfile.BootSettings.select_boot_mode_by_text(profile.bootMode) if hasattr(profile, 'bootMode') else None\n if getattr(profile, 'bootMode', '').lower() == 'legacy bios':\n CommonOperationServerProfile.BootSettings.set_legacy_bios_mode_boot_order(profile)\n else:\n CommonOperationServerProfile.BootSettings.set_non_legacy_bios_mode_boot_order(profile, hardware_type=sht_selected)\n else:\n CommonOperationServerProfile.BootSettings.untick_manage_boot_mode()\n else:\n CommonOperationServerProfile.BootSettings.set_legacy_bios_mode_boot_order(profile)\n\n EditServerProfile.click_ok_button()\n # if EditServerProfile.get_error_message_from_boot_mode() is not None:\n if CommonOperationServerProfile.BootSettings.get_error_message_from_boot_mode() is not None:\n logger.warn(\"test data may be wrongly defined for 'Boot mode', which caused an error that blocks profile being edited. \"\n \"Test will skip this profile '%s' and continue to edit other server profiles\" % profile.name)\n continue\n\n status, msg = FusionUIBase.get_error_message_from_dialog(timeout=10)\n if status is True:\n logger.warn(\"unexpected error occurred: %s\" % msg)\n ui_lib.fail_test(msg)\n\n EditServerProfile.wait_edit_server_profile_dialog_disappear(timeout=180)\n FusionUIBase.show_activity_sidebar()\n FusionUIBase.wait_activity_action_ok(profile.newName, 'Update', timeout=300, fail_if_false=False)\n FusionUIBase.show_activity_sidebar()\n CommonOperationServerProfile.wait_server_profile_status_ok(profile.newName, timeout=180, fail_if_false=False)\n logger.info(\"edited server profile '%s' successfully\" % profile.newName)\n edited += 1\n\n logger.info(\"{0} == Summary == {0}\".format('-' * 14))\n if total - not_exists == 0:\n logger.warn(\"no server profile to edit! all %s server profile(s) is NOT existing, hence test is considered PASS\" % not_exists)\n return True\n else:\n if edited < total:\n logger.warn(\"not all of the server profile(s) is successfully edited - %s out of %s edited \" % (edited, total))\n if edited + not_exists == total:\n logger.warn(\"%s non-existing server profile(s) is skipped being edited, hence test is considered PASS\" % not_exists)\n return True\n else:\n logger.warn(\"%s non-existing server profile(s) is skipped being edited, but %s profile(s) left is failed being edited \" % (not_exists, total - edited - not_exists))\n return False\n\n logger.info(\"all of the server profile(s) is successfully edited - %s out of %s \" % (edited, total))\n return True", "def update_estado_entidad(self, entidad_id, estado):\n proy = self.get_by_id(entidad_id)\n proy._estado = estado \n self.update(proy)", "def do_update(self):\n params = self.inputs\n new_profile_id = params.get('new_profile_id', None)\n if new_profile_id and new_profile_id == self.entity.profile_id:\n params.pop('new_profile_id')\n\n if not params:\n return self.RES_OK, 'No property to update.'\n\n res = self.entity.do_update(self.context, params)\n if res:\n return self.RES_OK, 'Node updated successfully.'\n else:\n return self.RES_ERROR, 'Node update failed.'", "def patch(self,\n dpd_profile_id,\n ip_sec_vpn_dpd_profile,\n ):\n return self._invoke('patch',\n {\n 'dpd_profile_id': dpd_profile_id,\n 'ip_sec_vpn_dpd_profile': ip_sec_vpn_dpd_profile,\n })", "def edit_grade(self, username: str, token: str, course_abbreviation: str, student_id: str, updated_grade: float) -> bool:\n\n # Validate user first\n if not self.validate(username=username, token=token, check_privilege='instructor'):\n raise RuntimeError(\"User not verified!\")\n\n # Get the student's UID\n student_uid = self.get_uid(username=student_id)\n\n # Get a DB cursor\n cursor = self._db_connection.cursor()\n\n # Get the course ID from the abbreviation\n cursor.execute('''\n SELECT course_id FROM courses WHERE course_abbreviation LIKE ?;\n ''', (course_abbreviation,))\n db_result = cursor.fetchone()\n\n # If no associated courses are found\n if db_result is None:\n RuntimeError(f\"Could not find course associated with: {course_abbreviation}\")\n\n # Extract the course ID from the returned tuple\n course_id = db_result[0]\n\n # Run update in the DB\n cursor.execute('''\n UPDATE enrollment_records SET grade = ? WHERE uid = ? AND course_id = ?\n ''', (updated_grade, student_uid, course_id))\n self._db_connection.commit()\n\n return True", "def save_user_profile(instance, **_):\n instance.profile.save()", "def update_grade(self, course, grade):\n if course not in self.courses:\n raise NameError('This student is not enrolled in that course')\n else:\n self.courses[course] = grade\n\n return self", "def about_me(user_id, text):\n UserProfile.objects.filter(pk=user_id).update(about_me=text)", "def put(self, id):\n data = flask.request.json\n user_dao.update_user(id, data)\n return None, 204", "def update_employee(self, obj):\n cursor = self.dbconnect.get_cursor()\n try:\n cursor.execute('UPDATE employee '\n 'SET name = %s, email = %s, office = %s, extra_info = %s, picture_location = %s, '\n 'research_group = %s, title = %s, is_external = %s, is_admin = %s, is_active = %s '\n 'WHERE id = %s;',\n (obj.name, obj.email, obj.office, obj.extra_info, obj.picture_location, obj.research_group,\n obj.title, obj.is_external, obj.is_admin, obj.is_active, obj.e_id))\n self.dbconnect.commit()\n except:\n self.dbconnect.rollback()\n raise", "def test_user_edit_profile(self):\n with self.client as c:\n with c.session_transaction() as sess:\n sess[CURR_USER_KEY] = self.testuser_id\n\n address = \"1215 Brookview Ave, Kettering, Ohio 45409\"\n\n resp = c.get(f\"/users/8989/\")\n html = resp.get_data(as_text=True)\n\n self.assertEqual(resp.status_code, 200)\n self.assertIn(\n '<h1 class=\"Display-4 text-center mt-3\"><b>Profile Information:</b></h1>',\n html,\n )\n self.assertIn(\"<p>testuser</p>\", html)\n self.assertIn(\"<p>test@test.com</p>\", html)\n self.assertIn(\"<p>662-996-3356</p>\", html)\n self.assertIn(\n '<a class=\"font-weight-bold btn winter-neva-gradient color-block btn-block my-4 waves-effect z-depth-0\" href=\"/users/8989/edit\">Edit Profile</a>',\n html,\n )", "def edit_payee(self, payee_id, new_payee_name):\n # [todo] - add check that new_payee_name is unique\n\n # open a cursor\n cur = self.get_cursor()\n\n edit_payee_statement = \"UPDATE payees \" + \\\n \"SET payee_name='{0}' \".format(new_payee_name) + \\\n \"WHERE payee_id={0}\".format(payee_id)\n\n cur.execute(edit_payee_statement)\n\n # close the cursor\n self.close_cursor()", "def update_user_profile(user_info):\n user_id = user_info[\"USER_ID\"]\n user_collection.find_one_and_update(\n {\"_id\": user_id},\n {\n \"$set\": {\n \"username\": user_info[\"username\"],\n \"email\": user_info[\"email\"],\n \"avatar\": user_info[\"avatar\"],\n \"githubURL\": user_info[\"githubURL\"],\n \"linkedinURL\": user_info[\"linkedinURL\"],\n \"stackoverflowURL\": user_info[\"stackoverflowURL\"],\n \"skills\": user_info[\"skills\"],\n }\n },\n upsert=False,\n )", "def update_my_profile(\n body: Optional[UserProfileUpdate] = None,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = UpdateMyProfile.create(\n body=body,\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)", "def update(self, expense_id, expense, receipt=None):\n url = base_url + expense_id\n json_object = dumps(expense.to_json())\n data = {\n 'JSONString': json_object\n }\n if receipt is None:\n attachments = None\n else:\n attachments = [{\n 'receipt': {\n 'filename': basename(receipt),\n 'content': open(receipt).read()\n }\n }]\n resp = zoho_http_client.put(url, self.details, self.headers, data, None,\n attachments)\n return parser.get_expense(resp)", "def edit_profile():\n # handle pre-flight for browsers CORS access\n if request.method == \"OPTIONS\":\n return generate_response()\n # part1: verify that user has logged in and the request is legit\n checked_and_verified, response = check_verify_token(request,login_session)\n if checked_and_verified != True: return response\n # handle the edge case where user is authorized to perform create user but not other method\n if not is_loggedin(login_session):\n response = generate_message(MESSAGE_USER_NOT_LOGGED_IN,401)\n return response\n # part2: check json\n checked_json, response, requested_json = check_json_form(request,MESSAGE_BAD_JSON,MESSAGE_CREATE_USER_NO_JSON)\n if checked_json != True: return response\n # part3: verify json data\n try:\n user_email = login_session[\"login_user_email\"]\n except KeyError:\n # key error means we are offline til this far\n user_email = requested_json[\"email\"]\n # design decision: if there are invalid field names, only update the valid fields.\n # check updates keys and formats\n try:\n update_pairs = convert_to_underscore(requested_json[\"updates\"])\n \n if isinstance(update_pairs,dict) != True:\n response = generate_message(MESSAGE_UPDATE_PROFILE_NON_DICT,400)\n else:\n correct_format,valid_update_pairs, response = process_request_json(User,update_pairs)\n if correct_format == True: \n update_field(User, session, {\"email\": user_email},valid_update_pairs)\n response = generate_message(MESSAGE_UPDATE_PROFILE_SUCCESS,200)\n except KeyError:\n response = generate_message(MESSAGE_UPDATE_PROFILE_NO_ENTRY,400)\n return response", "def update_flavor_profile(request, **kwargs):\n data = request.DATA\n flavor_profile_id = data['flavor_profile']['id']\n\n conn = get_sdk_connection(request)\n flavor_profile = conn.load_balancer.update_flavor_profile(\n flavor_profile_id,\n name=data['flavor_profile'].get('name'),\n provider_name=data['flavor_profile'].get('provider_name'),\n flavor_data=data['flavor_profile'].get('flavor_data'),\n )\n\n return _get_sdk_object_dict(flavor_profile)", "def test_that_a_user_can_edit_their_profile(self):\n self.authorize_user(self.user_login_details)\n url = self.profiles_url + \\\n '{}'.format(self.user['user']['username']) + \"/\"\n response = self.client.patch(url, data=self.user_bio)\n self.assertEqual(response.data['bio'], \"You are a peculiar man.\")\n self.assertEqual(response.status_code, status.HTTP_200_OK)", "def edit_user(user_id):\n if request.method == 'GET':\n # init form with current user:\n form = ProfileForm(\n nickname = session[Session.PROFILE][\"nickname\"], \n location = session[Session.PROFILE][\"location\"],\n about_me = session[Session.PROFILE][\"about_me\"]\n )\n if request.method == 'POST': \n # init form with POSTed form:\n form = ProfileForm(request.form)\n\n if form.validate(): \n # update backend:\n response = service_user_management.patch(\n id = f'auth0|{user_id}', \n nickname = form.nickname.data, \n location = form.location.data,\n about_me = form.about_me.data\n )\n\n # success:\n if 'identities' in response: \n try:\n # update db:\n delegated_user = DelegatedUser.query.get_or_404(\n user_id, \n description='There is no user with id={}'.format(user_id)\n )\n delegated_user.nickname = form.nickname.data\n # update:\n db.session.add(delegated_user)\n # write\n db.session.commit()\n\n # update session:\n session[Session.PROFILE][\"nickname\"] = form.nickname.data\n session[Session.PROFILE][\"location\"] = form.location.data\n session[Session.PROFILE][\"about_me\"] = form.about_me.data\n \n # on successful profile update, flash success\n flash('Your profile was successfully updated.')\n\n return redirect(url_for('.show_user', user_id = user_id))\n except:\n db.session.rollback()\n # on unsuccessful registration, flash an error instead.\n flash('An error occurred. New account could not be created.')\n finally:\n db.session.close()\n # failure:\n else:\n flash(response['message']) \n else:\n # for debugging only:\n flash(form.errors)\n \n return render_template('users/forms/user.html', form=form, user_id=user_id)", "def edit_user_profile(request):\n user = request.user\n user_profile = UserProfile.objects.filter(user=user)[0]\n if request.method == 'POST':\n form = MemberProfileForm(request.POST)\n additional_form = MemberAdditionalProfileForm(request.POST)\n if form.is_valid() and additional_form.is_valid():\n cd = form.cleaned_data\n user.first_name = cd['first_name']\n user.last_name = cd['last_name']\n user.email = cd['email']\n user.save()\n if 'picture' in request.FILES:\n file = request.FILES['picture']\n user_profile.picture.save(file.name, file, save=True)\n user_profile.gravatar = additional_form.cleaned_data['gravatar']\n user_profile.save()\n return HttpResponseRedirect('/')\n else:\n form = MemberProfileForm(instance=request.user)\n additional_form = MemberAdditionalProfileForm(instance=user_profile)\n return render_to_response('edit_profile.html', locals())", "def update(self, request, pk=None):\n exp = Experiment.objects.get(pk=pk)\n serializer = ExperimentSerializer(exp, data=request.data)\n if serializer.is_valid():\n serializer.save()\n return send_response(request.method, serializer)", "def update(self, id, metadataProfile, xsdData = NotImplemented, viewsData = NotImplemented):\n\n kparams = KalturaParams()\n kparams.addIntIfDefined(\"id\", id);\n kparams.addObjectIfDefined(\"metadataProfile\", metadataProfile)\n kparams.addStringIfDefined(\"xsdData\", xsdData)\n kparams.addStringIfDefined(\"viewsData\", viewsData)\n self.client.queueServiceActionCall(\"metadata_metadataprofile\", \"update\", \"KalturaMetadataProfile\", kparams)\n if self.client.isMultiRequest():\n return self.client.getMultiRequestResult()\n resultNode = self.client.doQueue()\n return KalturaObjectFactory.create(resultNode, 'KalturaMetadataProfile')", "def update(self, request, pk=None):\n\n missing_keys = self._get_missing_keys()\n if len(missing_keys) > 0:\n return Response(\n {'message':\n f'Request body is missing the following required properties: {\", \".join(missing_keys)}'\n },\n status=status.HTTP_400_BAD_REQUEST\n )\n\n user = User.objects.get(id=request.auth.user.id)\n\n expense = Expenses.objects.get(pk=pk)\n expense.date_purchased = request.data[\"date_purchased\"]\n expense.cost = request.data[\"cost\"]\n expense.image = request.data[\"image\"]\n expense.user = user\n\n supply_type = Supply_Type.objects.get(\n pk=request.data[\"supply_type_id\"])\n expense.supply_type = supply_type\n\n expense.save()\n\n return Response({}, status=status.HTTP_204_NO_CONTENT)", "def editAuthorByID(id: int, name: str, birth: str):\n if not id:\n abort(400)\n author = Author.query.get(id)\n if not author:\n abort(404, \"Author is not found\")\n if name:\n author.name = name\n if birth:\n author.birth = birth\n db.session.commit()\n app.logger.info(f\"The author {id} has been edited\")" ]
[ "0.6951867", "0.6946061", "0.645494", "0.6349245", "0.63402873", "0.63243157", "0.6253774", "0.62004584", "0.6143139", "0.61277544", "0.60156894", "0.5982269", "0.59785736", "0.59742916", "0.5897288", "0.5836116", "0.5803204", "0.5785535", "0.5748711", "0.5654298", "0.5577862", "0.55769783", "0.5574313", "0.554739", "0.5545592", "0.55255437", "0.55244046", "0.55208397", "0.5500301", "0.54889977", "0.5486504", "0.5481432", "0.54732966", "0.54654676", "0.54482484", "0.54356074", "0.5417956", "0.5388537", "0.53688496", "0.5357304", "0.53568256", "0.5352601", "0.5337847", "0.5328254", "0.5322264", "0.5313593", "0.5301171", "0.529697", "0.52908146", "0.52653563", "0.523512", "0.5224222", "0.52038693", "0.52009857", "0.5183721", "0.5176455", "0.5173276", "0.5152279", "0.5132498", "0.5120651", "0.51180166", "0.5107008", "0.5093748", "0.5085753", "0.5071255", "0.50543886", "0.5047839", "0.5046519", "0.50420195", "0.50378764", "0.502902", "0.502783", "0.50172955", "0.501647", "0.5014695", "0.4995047", "0.4991361", "0.4985782", "0.49784145", "0.49670818", "0.49646664", "0.4959694", "0.49559247", "0.49527782", "0.49503502", "0.49499673", "0.49498633", "0.4945862", "0.49411264", "0.49338117", "0.49277586", "0.4926289", "0.4917979", "0.49157527", "0.49077058", "0.4892193", "0.48708567", "0.48693678", "0.48641738", "0.48578942" ]
0.67715335
2
Iterate through fields in serializer and set all to required except ignore_fields
def set_fields_to_required(serializer, ignore_fields=None): if ignore_fields is None: ignore_fields = [] for field in serializer.fields.values(): if field.field_name not in ignore_fields: field.required = True field.allow_null = False field.allow_blank = False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_required_fields(self) -> Iterable[fields.Field]:\n for model_field in self.get_fields():\n if model_field.required:\n yield model_field", "def clean_fields(self, instance, exclude=None):\n errors = {}\n exclude = exclude or []\n for name, f in self.properties.items():\n raw_value = getattr(instance, name, None)\n is_blank = not bool(raw_value)\n is_nullable = f.null\n is_defaulted = f.column.default or f.column.server_default\n is_required = f.required\n\n is_skippable = is_blank and (is_nullable or is_defaulted or not is_required)\n\n if name in exclude or is_skippable:\n continue\n try:\n setattr(instance, name, f.clean(raw_value, instance))\n except ValidationError as e:\n errors[name] = e.error_list\n if errors:\n raise NestedValidationError(errors)", "def enforce_required_fields(self, attrs):\n if self.instance is not None:\n return\n # missing_items = {\n # field_name: self.missing_message\n # for field_name in self.fields\n # if field_name not in attrs\n # }\n # if missing_items:\n # raise ValidationError(missing_items, code='required')", "def forbid_properties(schema: Dict[str, Any], forbidden: List[str]) -> None:\n not_schema = schema.setdefault(\"not\", {})\n already_forbidden = not_schema.setdefault(\"required\", [])\n already_forbidden.extend(forbidden)\n not_schema[\"required\"] = list(set(chain(already_forbidden, forbidden)))", "def setRequiredValues(self, instance):\n for key in instance.__slots__:\n if key in instance.requiredFields:\n value = self.getTypicalValue(type(instance), key)\n setattr(instance, key, value)", "def required_dict_validator(self, dict_fields, model_name, erp_required=[]):\n required_fields = self.env['settings.field'].sudo().search([('model_id.model', '=', model_name)])\n\n if required_fields:\n erp_required.extend(required_fields.required_field_ids.filtered(lambda x: x.id not in [er.id for er in erp_required]))\n\n for field in erp_required:\n if field.name in dict_fields and 'required' not in dict_fields[field.name]:\n dict_fields[field.name]['required'] = True\n dict_fields[field.name]['empty'] = False\n\n return dict_fields", "def required_fields(model, values):\n if values:\n for k in list(values):\n if k not in model.__table__.columns.keys():\n values.pop(k)\n return values", "def clean_fields(self, exclude=None):\n obj = self._obj\n if obj is None:\n return None\n\n self.event = self.clean_event(self.event)\n self.resource_name = self.clean_resource_name(obj.__class__.__name__)\n self.resource_id = obj.id\n self.site = self.clean_site(obj)\n\n serializer_class = self.get_serializer_for_resource(self.resource_name)\n serializer = serializer_class(obj)\n self._resource = serializer.data", "def get_empty_required_fields(self):\n empty_fields = self.get_empty_fields()\n return [f for f in empty_fields if f in self.REQUIRED_FIELDS]", "def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n ignore_fields = (\n 'about_me',\n 'romanized_first_name',\n 'romanized_last_name',\n 'postal_code',\n )\n set_fields_to_required(self, ignore_fields=ignore_fields)", "def _filtered_attributes(\n self, required_attributes: Union[Iterable, Literal[\"__all__\"]], dontformat=False\n ) -> Tuple[Dict, Dict]:\n if required_attributes == \"__all__\":\n required_attributes = self.__atomic_fields_set__ | {\"meta\"}\n required_attributes = set(required_attributes)\n errors = []\n attrs = {name: getattr(self, name, None) for name in required_attributes-{\"meta\"}}\n for name in required_attributes - {\"meta\"}:\n if name not in self.__atomic_fields_set__:\n errors.append(f\" Unexpected required attribute: '{name}'.\")\n continue\n if attrs.get(name) is None:\n if not utils.is_an_optional_type_hint(self.__fields_types__[name]):\n errors.append(f\" Missing required attribute: '{name}'.\")\n if errors:\n raise ValueError(\"\\n\" + \"\\n\".join(errors))\n attrs = {\n utils.snake_to_camel_case(k, dontformat): v\n for (k, v) in attrs.items()\n if k in set(required_attributes) - self._identifier_fields\n }\n meta_attrs = {\n utils.snake_to_camel_case(name, dontformat): getattr(self, name)\n for name in self.__meta_attributes__\n if getattr(self, name) is not None\n } if \"meta\" in required_attributes else None\n return attrs, meta_attrs", "def remove_null_fields(self):\n with open(self.schema_path, 'r') as file_obj:\n schema_data = yaml.safe_load(file_obj)\n schema_fields = schema_data.get('mapping').keys()\n for field in schema_fields:\n # We want to keep 'false' and 0 values, and avoid removing fields that are required in the schema.\n if field in self.data and self.data[field] in (None, '', [], {}) and \\\n not schema_data.get('mapping', {}).get(field, {}).get('required'):\n self.data.pop(field)", "def only(self, *fields):\n for field in fields:\n self._only_fields.add(field)\n\n return self", "def _populate_always_present_fields(self, field):\n defaults = [\n (\"label\", \"\"),\n (\"instructions\", \"\"),\n (\"placeholder\", \"\"),\n (\"defaultValue\", \"\"),\n (\"restrictions\", {}),\n (\"errorMessages\", {}),\n ]\n field.update({\n key: value\n for key, value in defaults if key not in field\n })", "def check_for_required_fields(cls, fields=[], dataDict={}):\n\n validateRequired = Validate.required(fields=fields, dataDict=dataDict)\n if validateRequired['status'] == False:\n res = jsonify(\n {'status': 400, 'error': validateRequired['message'], 'data': []})\n return abort(make_response(res, 400))\n return True", "def set_additional_fields(cls, model, data):\n for k, v in data.items():\n if not hasattr(model, k):\n setattr(model, k, v)", "def depopulate(self, is_update):\n fields = {}\n schema = self.schema\n for k, field in schema.fields.items():\n is_modified = k in self.modified_fields\n orig_v = getattr(self, k)\n v = field.iset(\n self,\n orig_v,\n is_update=is_update,\n is_modified=is_modified\n )\n\n if is_modified or v is not None:\n if is_update and field.is_pk() and v == orig_v:\n continue\n\n else:\n fields[k] = v\n\n if not is_update:\n for field_name in schema.required_fields.keys():\n if field_name not in fields:\n raise KeyError(\"Missing required field {}\".format(field_name))\n\n return fields", "def validate_fields_for_magento(self,data):\n for field in data:\n if data[field] == None :\n del data[field]\n if data[field] == True:\n data[field] = 1\n if data[field] == False :\n data[field] = 0", "def test_object_is_not_created_without_required_fields(self):\n data1 = self.data.copy()\n del data1[\"title\"]\n\n serializer = ProductSerializer(data=data1)\n\n self.assertFalse(serializer.is_valid())\n self.assertEqual(serializer.errors.get(\"title\")[0], self.error_message)\n\n data2 = self.data.copy()\n del data2[\"description\"]\n\n serializer = ProductSerializer(data=data2)\n self.assertFalse(serializer.is_valid())\n self.assertEqual(serializer.errors.get(\"description\")[0], self.error_message)\n\n data3 = self.data.copy()\n del data3[\"price\"]\n\n serializer = ProductSerializer(data=data3)\n self.assertFalse(serializer.is_valid())\n self.assertEqual(serializer.errors.get(\"price\")[0], self.error_message)", "def required_fields(required_fields=[]):\n def decorator(func):\n \"\"\" The decorator applied to the obj_create method\"\"\"\n def wrapper(resource, bundle=None, **kwargs):\n \"\"\" wraps the decorated method and verifies a list of required\n fields when a new object is being created.\n\n \"\"\"\n if not isinstance(bundle, Bundle):\n request = bundle\n data = resource.deserialize(\n request, request.body,\n format=request.META.get('CONTENT_TYPE', 'application/json')\n )\n bundle = resource.build_bundle(request=request, data=data)\n else:\n request = None\n\n for required_field in required_fields:\n if required_field not in bundle.data:\n response = HttpBadRequest(\n json.dumps(\"missing %s field\" % required_field),\n content_type=bundle.request.META['CONTENT_TYPE'])\n raise ImmediateHttpResponse(response=response)\n return func(resource, bundle=bundle, **kwargs)\n return wrapper\n return decorator", "def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.fields['first_name'].required = False\n self.fields['last_name'].required = False\n self.fields['institution'].required = False\n self.fields['institution_logo'].required = False\n self.fields['allow_notifications'].required = False", "def test_create_enforces_required_fields(self):\n serializer = ServiceSerializer(data = {}, context = dict(project = self.project))\n self.assertFalse(serializer.is_valid())\n required_fields = {'name', 'category'}\n self.assertCountEqual(serializer.errors.keys(), required_fields)\n for name in required_fields:\n self.assertEqual(serializer.errors[name][0].code, 'required')", "def test_defining_only_or_defer_on_nonexistant_fields_fails(self):", "def data_only(self, fields):\n only = {}\n data = json.loads(self.data())\n for field, value in data.items():\n if field in fields:\n only[field] = value\n return json.dumps(only)", "def writeRequiredFields(self, outputFile):\n fields = []\n for field in self.getFields():\n if not field.has_default:\n fields.append(field)\n if len(fields) < 1:\n self._writeWithIndent('requiredFields = set([])', outputFile)\n else:\n self._writeWithIndent('requiredFields = set([', outputFile)\n for field in fields:\n string_ = '\"{0}\",'.format(field.name)\n self._writeWithIndent(string_, outputFile, 2)\n self._writeWithIndent('])', outputFile)", "def get_fields(self, exclude=('id',)):\n fields = {}\n for field in self._meta.fields:\n if not field.name in exclude and getattr(self, field.name):\n fields[field.name] = getattr(self, field.name)\n return fields", "def _validate_fields(fields):\n for field in fields:\n if field not in request.form:\n raise FieldNotFoundError(field)", "def subfields_all(verifield, required):\n for req_key, req_val in required.items():\n if getitem(verifield, req_key, '') != req_val:\n return False\n return True", "def check_for_empties():\n if hasattr(self.instance, 'fields_required_for_publish'):\n errors_for_empties = {\n field_name: try_adding_error_to_field(\n field_name, field_value)\n for (field_name, field_value) in self.data.items()\n if (len(field_value) == 0 or field_value == 'null') and field_name in self.instance.fields_required_for_publish\n }", "def missing_required_fields(proposal):\n data = flatten(proposal.data[0])\n sections = search_multiple_keys(proposal.schema, primary_search='isRequired', search_list=['label', 'name'])\n\n missing_fields = []\n for flat_key in data.items():\n for item in sections:\n if flat_key[0].endswith(item['name']):\n if not flat_key[1].strip():\n missing_fields.append( dict(name=flat_key[0], label=item['label']) )\n return missing_fields", "def whitelist_form_fields(form, whitlisted_fields):\n for schema in getAdditionalSchemata(form):\n behavior_interface_name = schema.__name__\n for fieldname in schema:\n full_name = '{}.{}'.format(behavior_interface_name, fieldname)\n if full_name in whitlisted_fields:\n continue\n\n remove(form, fieldname, behavior_interface_name)", "def required_fields():\n return tuple(MIMARKS._fields.keys())", "def strip_unwanted_fields(self, data, many, **kwargs):\n unwanted_fields = [\"resource_type\"]\n for field in unwanted_fields:\n if field in data:\n data.pop(field)\n return data", "def validate(self, data):\n # calling subserializer validate method (fields, and presets)\n data = super(FormidableSerializer, self).validate(data)\n # we check every field define in presets are define inside the form.\n if 'fields' in data and 'presets' in data:\n data = self.check_presets_cohesion(data)\n return data", "def only(self, *fields):\n for field in fields:\n path = \".\".join(self.document._meta.resolve_subfield_hierarchy(field.split(\".\")))\n self._only_fields.add(path)\n if self.query._Cursor__fields is None:\n # Identifier and version fields must always be included\n self.query._Cursor__fields = { \"_id\" : 1, \"_version\" : 1 }\n\n self.query._Cursor__fields.update({ path : 1 })\n\n return self", "def filter_excluded_fields(fields, Meta, exclude_dump_only):\n exclude = getattr(Meta, \"exclude\", [])\n if exclude_dump_only:\n exclude += getattr(Meta, \"dump_only\", [])\n\n filtered_fields = OrderedDict(\n (key, value) for key, value in fields.items() if key not in exclude\n )\n\n return filtered_fields", "def _check_non_empty_fields(self, instance, exclusions=[]):\n empty_values = [None, \"\", [], {}]\n for slot in instance.__slots__:\n attribute = instance.__getattribute__(slot)\n if slot not in exclusions:\n self.assertTrue(\n attribute not in empty_values,\n \"Field '{}.{}' is empty!\".format(instance.__class__, slot)\n )\n if instance.__class__.isEmbeddedType(slot):\n if isinstance(attribute, list):\n for element in attribute:\n self._check_non_empty_fields(element, exclusions)\n elif isinstance(attribute, dict):\n for element in attribute.values():\n self._check_non_empty_fields(element, exclusions)\n else:\n self._check_non_empty_fields(attribute, exclusions)", "def subfields_none(verifield, required):\n for req_key, req_val in required.items():\n if getitem(verifield, req_key, '') == req_val:\n return False\n return True", "def remove_all_fields(self):\n self.fields = None", "def _populate(self, fields):\n schema = self.schema\n for k, v in fields.items():\n fields[k] = schema.fields[k].iget(self, v)\n\n self.modify(fields)\n self.reset_modified()", "def _make_reverse_relations_valid(self, data):\n for field_name, (field, related_field) in self._get_reverse_fields().items():\n if data.get(field.source) is None:\n continue\n if isinstance(field, serializers.ListSerializer):\n field = field.child\n if isinstance(field, serializers.ModelSerializer):\n # find the serializer field matching the reverse model relation\n for sub_field in field.fields.values():\n if sub_field.source == related_field.name:\n sub_field.required = False\n # found the matching field, move on\n break", "def get_fields(schema: BaseModel, exclude_dump_only: bool = False) -> dict:\n if hasattr(schema, \"fields\"):\n fields = schema.fields\n elif hasattr(schema, \"_declared_fields\"):\n fields = copy.deepcopy(schema._declared_fields)\n else:\n raise ValueError(\n \"{!r} doesn't have either `fields` or `_declared_fields`.\".format(schema)\n )\n Meta = getattr(schema, \"Meta\", None)\n return filter_excluded_fields(fields, Meta, exclude_dump_only)", "def modify(self, fields=None, **fields_kwargs):\n modified_fields = set()\n fields = self.make_dict(fields, fields_kwargs)\n fields = self._modify(fields)\n for field_name, field_val in fields.items():\n in_schema = field_name in self.schema.fields\n if in_schema:\n setattr(self, field_name, field_val)\n modified_fields.add(field_name)\n\n return modified_fields", "def data_without(self, fields):\n without = {}\n data = json.loads(self.data())\n for field, value in data.items():\n if field not in fields:\n without[field] = value\n return json.dumps(without)", "def _attrs(self):\n for field in self.model._meta.get_fields():\n if isinstance(field, (models.OneToOneField, models.ManyToOneRel)):\n # Skip non-field attributes\n continue\n if field is self._geom_field:\n # Skip the geometry field, which is not an attribute\n continue\n yield field", "def test_all_exclude_op_fields(self):\n for field in OCIExcludeSerializer._opfields:\n field = \"and:\" + field\n exclude_param = {field: [\"1\", \"2\"]}\n serializer = OCIExcludeSerializer(data=exclude_param)\n self.assertTrue(serializer.is_valid())\n for field in OCIExcludeSerializer._opfields:\n field = \"or:\" + field\n exclude_param = {field: [\"1\", \"2\"]}\n serializer = OCIExcludeSerializer(data=exclude_param)\n self.assertTrue(serializer.is_valid())", "def get_empty_fields(self):\n return [f for f in self.__dict__ if not self.__dict__[f]]", "def update(self, instance, validated_data):\n assert hasattr(self.Meta, 'allowed_update_fields'), \"Serializers that are used for update must set Meta.allowed_update_fields\"\n if set(validated_data.keys()) - set(self.Meta.allowed_update_fields):\n raise serializers.ValidationError('Only updates on these fields are allowed: %s' % ', '.join(self.Meta.allowed_update_fields))\n return super(BaseSerializer, self).update(instance, validated_data)", "def _validate(self):\n fields, schema = self.__dict__, self._def.default\n extra_fields = fields.viewkeys() - schema.viewkeys()\n if len(extra_fields) > 0:\n raise AttributeError('Fields found that are not in the schema: %r' % (list(extra_fields)))\n for key in fields.iterkeys():\n if type(fields[key]) is not type(schema[key]):\n raise AttributeError('Invalid %s for field \"%s\", should be %s' %\n (type(fields[key]), key, type(schema[key])))", "def test_exclude_params_invalid_fields(self):\n exclude_params = {\"invalid\": \"param\"}\n serializer = OCIExcludeSerializer(data=exclude_params)\n with self.assertRaises(serializers.ValidationError):\n serializer.is_valid(raise_exception=True)", "def intialize_from_fields(self):\n raise NotImplementedError", "def only(self, *fields):\n from jetengine.fields.base_field import BaseField\n\n only_fields = {}\n for field_name in fields:\n if isinstance(field_name, (BaseField,)):\n field_name = field_name.name\n\n only_fields[field_name] = QueryFieldList.ONLY\n\n # self.only_fields = fields.keys()\n return self.fields(True, **only_fields)", "def remove_read_only_fields(self):\n self.fields = XML_List(Elements.FIELDS, [field for field in self.fields if\n not field.read_only or not str_to_bool(field.read_only)])", "def fields2jsonschema(self, fields, *, ordered=False, partial=None):\n jsonschema = {\"type\": \"object\", \"properties\": OrderedDict() if ordered else {}}\n\n for field_name, field_obj in fields.items():\n observed_field_name = field_obj.data_key or field_name\n prop = self.field2property(field_obj)\n jsonschema[\"properties\"][observed_field_name] = prop\n\n if field_obj.required:\n if not partial or (\n is_collection(partial) and field_name not in partial\n ):\n jsonschema.setdefault(\"required\", []).append(observed_field_name)\n\n if \"required\" in jsonschema:\n jsonschema[\"required\"].sort()\n\n return jsonschema", "def _update_allowed_fields(self) -> list:\n raise NotImplementedError('Each model has to have its list of update allowed fields')", "def set_form_readonly_fields(formset, read_only_fields):\n for form in formset:\n for field in form.fields:\n print(field)\n if any(field in s for s in read_only_fields):\n print(field)\n form.fields[field].widget.attrs['disabled'] = True", "def __init__(self, **kwargs):\n\n for (k, v) in self._fields:\n if k in kwargs:\n self.__dict__[k] = v.validate(kwargs[k])\n self.__dict__[k] = v.default", "def include_extra_kwargs(self, kwargs, extra_kwargs):\n if extra_kwargs.get(\"read_only\", False):\n for attr in [\n \"required\",\n \"default\",\n \"allow_blank\",\n \"allow_null\",\n \"min_length\",\n \"max_length\",\n \"min_value\",\n \"max_value\",\n \"validators\",\n \"queryset\",\n ]:\n kwargs.pop(attr, None)\n\n if extra_kwargs.get(\"default\") and kwargs.get(\"required\") is False:\n kwargs.pop(\"required\")\n\n if extra_kwargs.get(\"read_only\", kwargs.get(\"read_only\", False)):\n extra_kwargs.pop(\n \"required\", None\n ) # Read only fields should always omit the 'required' argument.\n\n kwargs.update(extra_kwargs)\n\n return kwargs", "def test_missing_mandatory_attributes():\n model_definition = {'source': {'type': 'list',\n 'required': True,\n 'persisted': True},\n 'resources.title': {'type': 'text',\n 'required': True,\n 'persisted': True}}\n # missing language in the model\n _ = ProductModelFactory(model_definition)", "def _check_required_fields(self):\n assert self.title\n assert self.format", "def full_clean(self, exclude=None, validate_unique=True):\n if isinstance(self.json_data, (str, bytes, bytearray)):\n try:\n _unused = loads(self.json_data)\n\n except JSONDecodeError:\n raise ValidationError('This field contains invalid JSON.')\n\n super().full_clean(\n exclude=exclude, validate_unique=validate_unique\n )", "def is_required(self, field):\n return field.scheme.is_required and not field.scheme.is_pk", "def get_fields(self):\n fields = super(GeoModelSerializer, self).get_fields()\n # Set the geometry field name when it's undeclared.\n if not self.Meta.geom_field:\n for name, field in fields.items():\n if isinstance(field, GeometryField):\n self.Meta.geom_field = name\n break\n return fields", "def only(self, fields):\r\n return self._only_or_defer('only', fields)", "def required_fields(self, gid):\n r = self.get(\"/groups/{g:d}/fields\".format(g=gid))\n return r.json()", "def set_fields(self, **kwargs):\n for key, value in kwargs.items():\n if key in self.fields.keys():\n if type(value) != bool:\n raise TypeError('Expecting Bool passed {}'\n .format(type(value)))\n self.fields[key] = value\n else:\n raise KeyError", "def add_required_properties(self, p: str):\n # TODO: Deprecate\n for k in p.keys():\n try:\n self._properties[k].set_required(True)\n except KeyError:\n self._properties.define_property(name=k, supported=False, required=True)", "def validate_fields(self, tree):\n # Check fields\n fields = list(tree.keys())\n for k in self.fields:\n assert (k in fields)", "def run_validation(self, data=empty):\n\n # no idea why there is no such built in feature in DRF\n if data is not empty:\n unknown = set(data) - set(self.fields)\n if unknown:\n errors = ['Unknown field: {}'.format(f) for f in unknown]\n raise ValidationError({api_settings.NON_FIELD_ERRORS_KEY: errors})\n return super().run_validation(data)", "def _prepare_add_missing_fields(self, values):\n res = {}\n onchange_fields = ['name', 'price_unit', 'product_uom', 'tax_id']\n if values.get('order_id') and values.get('product_id') and any(f not in values for f in onchange_fields):\n line = self.new(values)\n line.product_id_change()\n for field in onchange_fields:\n if field not in values:\n res[field] = line._fields[field].convert_to_write(line[field], line)\n res['init_qty'] = values.get('product_uom_qty')\n _logger.debug(\"********************* dropship_portal\\sale_order res **********************: %r\", res)\n return res", "def clean(self):\n cleaned_data = super().clean()\n cleaned_data = {key: field for key, field in cleaned_data.items()\n if field is not None}\n return cleaned_data", "def test_unknown_fields_are_not_allowed() -> None:\n with pytest.raises(pydantic.ValidationError):\n r4.Meta(unknown_field=True)", "def test_api_fields(self) -> None:\n expected_fields = set(Stream.API_FIELDS) | {\"stream_id\"}\n expected_fields -= {\"id\", \"can_remove_subscribers_group_id\"}\n expected_fields |= {\"can_remove_subscribers_group\"}\n\n stream_dict_fields = set(APIStreamDict.__annotations__.keys())\n computed_fields = {\"is_announcement_only\", \"is_default\", \"stream_weekly_traffic\"}\n\n self.assertEqual(stream_dict_fields - computed_fields, expected_fields)\n\n expected_fields = set(Subscription.API_FIELDS)\n\n subscription_dict_fields = set(APISubscriptionDict.__annotations__.keys())\n computed_fields = {\"in_home_view\", \"email_address\", \"stream_weekly_traffic\", \"subscribers\"}\n # `APISubscriptionDict` is a subclass of `APIStreamDict`, therefore having all the\n # fields in addition to the computed fields and `Subscription.API_FIELDS` that\n # need to be excluded here.\n self.assertEqual(\n subscription_dict_fields - computed_fields - stream_dict_fields,\n expected_fields,\n )", "def _initFields(self):\n pass", "def strip_empty_optional_fields(object_dict):\n return {k: v for k, v in object_dict.items() if v is not None}", "def validate(self):\n for fieldname in getattr(self, '_body_fields', []):\n val_name = 'validate_{fieldname}'.format(fieldname=fieldname)\n field = getattr(self, fieldname)\n val = getattr(self, val_name, None)\n if val is not None:\n val()\n elif isinstance(\n field,\n BaseAgaveResource\n ):\n field.validate()", "def check_mandatory_props(klass: pyorient.ogm.declarative.DeclarativeMeta, obj: Dict):\n missing = []\n props = klass.objects.g.props_from_db[klass](Graph.compute_all_properties(klass))\n for k, v in props.items():\n prop = getattr(klass, k)\n if hasattr(prop, 'mandatory'):\n if prop.mandatory and k not in obj:\n # Fix values if default set \n if k == \"revoked\":\n obj[k] = False\n continue\n if k == \"spec_version\":\n obj[k] = \"2.1\"\n continue\n missing.append(k)\n if isinstance(prop, odbproperty.String):\n obj[k] = 'added_default'\n elif isinstance(prop, (odbproperty.Date, odbproperty.DateTime)):\n obj[k] = get_datetime()\n elif isinstance(prop, odbproperty.EmbeddedList):\n obj[k] = ['added_default']\n elif isinstance(prop, odbproperty.Integer):\n obj[k] = 0\n elif isinstance(prop, odbproperty.Float):\n obj[k] = 0.0\n elif isinstance(prop, odbproperty.Binary):\n obj[k] = 0\n elif isinstance(prop, odbproperty.Byte):\n obj[k] = 0\n elif isinstance(prop, odbproperty.Decimal):\n obj[k] = 0.0\n elif isinstance(prop, odbproperty.Long):\n obj[k] = 0\n elif isinstance(prop, odbproperty.Short):\n obj[k] = 0\n elif isinstance(prop, odbproperty.Boolean):\n obj[k] = True\n else:\n logging.info(f'What to do with missing mandatory field {k} of type {v.__class__}?')\n if missing:\n logging.info(f'missing mandatory fields for {obj[\"id_\"]}: {missing}')\n return obj", "def test_set_non_dictionary_based_field(self):\n self.assertRaises(TypeError, self._p.set_fields, '')", "def set_specific_fields(self):\n raise NotImplementedError(\"Must be defined by subclass!\")", "def test_prep_country_fields_flat(self):\n original_flag = self.form.country_optional\n self.form.country_optional = True\n original_fields = self.form.fields\n original_removed = getattr(self.form, 'removed_fields', None)\n original_computed = getattr(self.form, 'computed_fields', None)\n self.form.fields = original_fields.copy()\n if original_removed is not None:\n self.form.removed_fields = original_removed.copy()\n if original_computed is not None:\n self.form.computed_fields = original_computed.copy()\n remaining = original_fields.copy()\n opts, field_rows = {'fake_opts': 'fake', 'fields': ['nope']}, [{'name': 'assigned_field'}]\n args = ['arbitrary', 'input', 'args']\n kwargs = {'test_1': 'data_1', 'test_2': 'data_2'}\n field_names = (self.form.country_field_name, 'country_flag', )\n if not any(remaining.get(name, None) for name in field_names):\n fix_fields = {name: self.get_missing_field(name) for name in field_names if name not in remaining}\n remaining.update(fix_fields)\n expected_add = {name: remaining[name] for name in field_names if name in remaining}\n expected_field_rows = field_rows.copy()\n expected_field_rows.append(expected_add)\n expected_remaining = {name: field for name, field in remaining.items() if name not in expected_add}\n expected_opts = deepcopy(opts)\n # expected_opts['fields'].append(field_names)\n kwargs['flat_fields'] = True\n expected_remaining.update(expected_add)\n\n sent = (opts, field_rows, remaining, *args)\n r_opts, r_rows, r_remaining, *r_args, r_kwargs = self.form.prep_country_fields(*sent, **kwargs)\n self.assertEqual(expected_opts, r_opts)\n self.assertEqual(expected_field_rows, r_rows)\n self.assertEqual(expected_remaining, r_remaining)\n self.assertEqual(args, r_args)\n self.assertEqual(kwargs, r_kwargs)\n\n self.form.country_optional = original_flag\n self.form.fields = original_fields\n if original_removed is not None:\n self.form.removed_fields = original_removed\n if original_computed is not None:\n self.form.computed_fields = original_computed\n pass", "def set_fields(self, upstream_obj, nonparam_fields=None):\n default_data = upstream_obj.default_data(start_year=self.start_year,\n metadata=True)\n\n if self.raw_input_fields is None:\n self.raw_input_fields = {}\n for field in self._meta.fields:\n if (getattr(self, field.attname, None) and\n field.name not in nonparam_fields):\n raw_val = getattr(self, field.attname)\n if field.name.endswith(\"cpi\") and isinstance(raw_val, bool):\n raw_val = str(raw_val)\n self.raw_input_fields[field.name] = raw_val\n\n input_fields, failed_lookups = param_formatters.parse_fields(\n self.raw_input_fields,\n default_data\n )\n\n if failed_lookups:\n # distinct elements\n potential_failed_lookups = set(failed_lookups)\n # only keep parameters that used to be in the upstream package\n set_failed_lookups = potential_failed_lookups - nonparam_fields\n if self.deprecated_fields is None:\n self.deprecated_fields = []\n # drop parameters that we already know are deprecated\n set_failed_lookups.difference_update(self.deprecated_fields)\n self.deprecated_fields += list(set_failed_lookups)\n\n self.input_fields = input_fields", "def get_fields(self):\n fields = {}\n allowed_types = (\n SerializerMethodField,\n Field,\n Serializer,\n )\n for attr in dir(self):\n if attr == 'data':\n continue\n\n if isinstance(getattr(self, attr), allowed_types):\n fields[attr] = getattr(self, attr)\n\n return fields", "def test_base_schema_ignores_unknown_fields():\n assert BaseSchema().load({\"unknown\": \"field\"}) == {}", "def __init__(self, ignoreUnknownFields = False):\n super(Deserializer, self).__init__(ignore_unknown_fields = ignoreUnknownFields)", "def __createFields(self):\n fields = self.updateFields\n for field in fields:\n self.__createField(field)", "def get_fields(cls, fields=None, excludes=None):\r\n\r\n final_fields = {}\r\n fields = fields or []\r\n excludes = excludes or []\r\n\r\n if not cls._meta.object_class:\r\n return final_fields\r\n\r\n for name, f in cls._meta.object_class._fields.iteritems():\r\n # If the field name is already present, skip\r\n if name in cls.base_fields:\r\n continue\r\n\r\n # If field is not present in explicit field listing, skip\r\n if fields and name not in fields:\r\n continue\r\n\r\n # If field is in exclude list, skip\r\n if excludes and name in excludes:\r\n continue\r\n\r\n # TODO: Might need it in the future\r\n # if cls.should_skip_field(f):\r\n # continue\r\n\r\n api_field_class = cls.api_field_from_mongo_field(f)\r\n\r\n primary_key = f.primary_key or name == getattr(cls._meta, 'id_field', 'id')\r\n\r\n kwargs = {\r\n 'attribute': name,\r\n 'unique': f.unique or primary_key,\r\n 'null': not f.required and not primary_key,\r\n 'help_text': f.help_text,\r\n }\r\n\r\n # If field is not required, it does not matter if set default value,\r\n # so we do\r\n if not f.required:\r\n kwargs['default'] = f.default\r\n else:\r\n # MongoEngine does not really differ between user-specified default\r\n # and its default, so we try to guess\r\n if isinstance(f, mongoengine.ListField):\r\n if not callable(f.default) or f.default() != []: # If not MongoEngine's default\r\n kwargs['default'] = f.default\r\n elif isinstance(f, mongoengine.DictField):\r\n if not callable(f.default) or f.default() != {}: # If not MongoEngine's default\r\n kwargs['default'] = f.default\r\n else:\r\n if f.default is not None: # If not MongoEngine's default\r\n kwargs['default'] = f.default\r\n\r\n kwargs = cls.api_field_options(name, f, kwargs)\r\n\r\n final_fields[name] = api_field_class(**kwargs)\r\n final_fields[name].instance_name = name\r\n final_fields[name]._primary_key = primary_key\r\n\r\n # We store MongoEngine field so that schema output can show\r\n # to which content the list is limited to (if any)\r\n if isinstance(f, mongoengine.ListField):\r\n final_fields[name].field = f.field\r\n\r\n return final_fields", "def copy_fields(self, entity, all_fields=False):\n\n if all_fields:\n fields = self.get_all_fields()\n else:\n fields = self.get_non_pk_fields()\n\n for field in fields.keys():\n setattr(self, field, getattr(entity, field, None))", "def _test_bad_request_omit_field(self, user, fields, omit_field, zendesk_mock_class, datadog_mock):\r\n filtered_fields = {k: v for (k, v) in fields.items() if k != omit_field}\r\n resp = self._build_and_run_request(user, filtered_fields)\r\n self._assert_bad_request(resp, omit_field, zendesk_mock_class, datadog_mock)", "def warn_on_missing_correlated_fields(cls, values):\n accumulated_warnings = []\n for field_set in CORRELATED_STRUCTURE_FIELDS:\n missing_fields = {f for f in field_set if values.get(f) is None}\n if missing_fields and len(missing_fields) != len(field_set):\n accumulated_warnings += [\n f\"Structure with values {values} is missing fields {missing_fields} which are required if {field_set - missing_fields} are present.\"\n ]\n\n for warn in accumulated_warnings:\n warnings.warn(warn, MissingExpectedField)\n\n return values", "def parse_fields(self, request, fields=None, skip=set(), additional=[]):\n fields = fields or self.fields\n fields = [f for f in fields if f.name not in skip]\n fields.extend(additional)\n result = dict()\n for field in fields:\n try:\n result[field.name] = field.get_value(request, self)\n except ValueError, msg:\n raise HTTP_BAD_REQUEST(str(msg))\n return result", "def _parse_fields(self, fields):\n\n parsed_fields = set()\n\n if fields is not None and isinstance(fields, (list, tuple)):\n if len(fields) > 0 and isinstance(fields[0], (list,tuple)):\n parsed_fields.update(fields)\n else:\n parsed_fields.update([(x, None) for x in fields])\n\n # Does not support field.attname.\n field_names = set((field.name, None) for field in self.model._meta.fields if not field.primary_key)\n non_model_fields = parsed_fields.difference(field_names)\n if non_model_fields:\n raise ValueError(\"The following fields do not exist in this\"\n \" model: {0}\".format(\", \".join(x[0] for x in non_model_fields)))\n else:\n parsed_fields.update(self._find_text_fields())\n\n return parsed_fields", "def validate(self, attrs):\n\n unknown = set(self.initial_data) - set(self.fields)\n if unknown:\n raise ValidationError('Unknown field(s): {}'.format('', ''.join(unknown)))\n return attrs", "def _save_direct_relations(self, kwargs):\n for field_name, field in self.fields.items():\n if field.read_only:\n continue\n if isinstance(self._validated_data, dict) and self._validated_data.get(field.source) is None:\n continue\n if not isinstance(field, serializers.BaseSerializer):\n continue\n if hasattr(self, 'Meta') and hasattr(self.Meta, 'model'):\n # ModelSerializer (or similar) so we need to exclude reverse relations\n try:\n _, direct = self._get_related_field(field)\n except FieldDoesNotExist:\n continue\n if not direct:\n continue\n\n # reinject validated_data\n field._validated_data = self._validated_data[field_name]\n self._validated_data[field_name] = field.save(**kwargs.pop(field_name, {}))", "def prepare_duplication(self):\n for field in self.fields:\n ofield = self.fields[field]\n\n if self.duplicate:\n if ofield.primary_key:\n self.exclude_field(field)\n continue\n\n if not self.auto_fields:\n # add others if needed\n if hasattr(ofield, 'auto_now') or \\\n hasattr(ofield, 'auto_now_add'):\n if ofield.auto_now or ofield.auto_now_add:\n self.exclude_field(field)\n continue", "def filter_allowed_fields(self):\n allowed_fields = super().filter_allowed_fields\n # Remove assignment_id\n allowed_fields.remove('assignment_id')\n return allowed_fields", "def get_fields(self, fields=None, excludes=None):\n final_fields = {}\n fields = fields or []\n excludes = excludes or []\n\n for f in self.model._meta.fields:\n # If the field name is already present, skip\n if f.name in self.fields:\n continue\n\n # If field is not present in explicit field listing, skip\n if fields and f.name not in fields:\n continue\n\n # If field is in exclude list, skip\n if excludes and f.name in excludes:\n continue\n\n if self.should_skip_field(f):\n continue\n\n index_field_class = index_field_from_django_field(f)\n\n kwargs = copy.copy(self.extra_field_kwargs)\n kwargs.update({\"model_attr\": f.name})\n\n if f.null is True:\n kwargs[\"null\"] = True\n\n if f.has_default():\n kwargs[\"default\"] = f.default\n\n final_fields[f.name] = index_field_class(**kwargs)\n final_fields[f.name].set_instance_name(self.get_index_fieldname(f))\n\n return final_fields", "def required_fields():\n module_logger.debug(\"In required_fields.\")\n return (\"comment\", \"lib_layout\", \"lib_selection\",\n \"ncbi_taxon_id\", \"prep_id\", \"sequencing_center\",\n \"sequencing_contact\", \"storage_duration\", \"tags\")", "def _get_not_simple_fields(cls) -> Dict[str, str]:\n\n return {\n name: anno\n for name, anno in cls._annotations().items()\n if not AnnotationWrapper(anno).is_simple_in_opt_and_not_opt\n }", "def assert_has_fields(obj: dict, fields: List[str]) -> None:\n for field in fields:\n assert field in obj.keys()", "def fields(self, fields):\n\n self._fields = fields" ]
[ "0.64765036", "0.6341806", "0.63284636", "0.6283481", "0.62026083", "0.6162853", "0.6133982", "0.6094568", "0.5909462", "0.5858007", "0.58525836", "0.58163685", "0.5798981", "0.5755898", "0.5753509", "0.5744149", "0.57343006", "0.5726579", "0.57135725", "0.57066965", "0.5697895", "0.5679485", "0.56732327", "0.5667943", "0.5658092", "0.56472766", "0.5645767", "0.5642113", "0.5634381", "0.5613377", "0.5597485", "0.556664", "0.5563153", "0.5562932", "0.5556804", "0.55545753", "0.55507946", "0.5546197", "0.5518787", "0.5501841", "0.54938257", "0.5489907", "0.546792", "0.5465436", "0.5460778", "0.5458379", "0.5456547", "0.5452633", "0.5430271", "0.5424323", "0.54187435", "0.5415642", "0.54106253", "0.54092026", "0.53982997", "0.5391427", "0.53885454", "0.5380756", "0.5376163", "0.5369968", "0.53688973", "0.5365635", "0.5345369", "0.5340302", "0.5336625", "0.5315556", "0.5298086", "0.5297106", "0.5296297", "0.5294244", "0.5292724", "0.5280245", "0.52660614", "0.5245422", "0.524073", "0.52399427", "0.5234158", "0.52335715", "0.5232401", "0.522527", "0.5221", "0.52206576", "0.5207439", "0.5196467", "0.51959836", "0.51877165", "0.51793694", "0.5170765", "0.5162671", "0.51586664", "0.5146215", "0.5138446", "0.51370716", "0.5131456", "0.5121035", "0.5120331", "0.5119835", "0.5113692", "0.51068026", "0.51066685" ]
0.87516534
0
Update serializer_field_mapping to use fields setting required=True
def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) set_fields_to_required(self, ['end_date'])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_fields_to_required(serializer, ignore_fields=None):\n if ignore_fields is None:\n ignore_fields = []\n for field in serializer.fields.values():\n if field.field_name not in ignore_fields:\n field.required = True\n field.allow_null = False\n field.allow_blank = False", "def required_dict_validator(self, dict_fields, model_name, erp_required=[]):\n required_fields = self.env['settings.field'].sudo().search([('model_id.model', '=', model_name)])\n\n if required_fields:\n erp_required.extend(required_fields.required_field_ids.filtered(lambda x: x.id not in [er.id for er in erp_required]))\n\n for field in erp_required:\n if field.name in dict_fields and 'required' not in dict_fields[field.name]:\n dict_fields[field.name]['required'] = True\n dict_fields[field.name]['empty'] = False\n\n return dict_fields", "def get_fields(self):\n fields = super(GeoModelSerializer, self).get_fields()\n # Set the geometry field name when it's undeclared.\n if not self.Meta.geom_field:\n for name, field in fields.items():\n if isinstance(field, GeometryField):\n self.Meta.geom_field = name\n break\n return fields", "def serialize_field(self, payload, model, field, mapping):\n try:\n if field in model.fk_field_names():\n payload[field] = self.serialize_related_field(\n model, field, mapping\n )\n else:\n payload[field] = getattr(model, field)\n except SkipField:\n payload.pop(field, None)", "def __init__(self, *args, **kwargs):\n location_controls = kwargs.pop('location_controls', False)\n\n self.Meta.fields = list(self.Meta.fields)\n\n if location_controls:\n self.Meta.fields += ['next_location_id', 'previous_location_id']\n\n super(LocationFullSerializer, self).__init__(*args, **kwargs)", "def _populate_always_present_fields(self, field):\n defaults = [\n (\"label\", \"\"),\n (\"instructions\", \"\"),\n (\"placeholder\", \"\"),\n (\"defaultValue\", \"\"),\n (\"restrictions\", {}),\n (\"errorMessages\", {}),\n ]\n field.update({\n key: value\n for key, value in defaults if key not in field\n })", "def fields2jsonschema(self, fields, *, ordered=False, partial=None):\n jsonschema = {\"type\": \"object\", \"properties\": OrderedDict() if ordered else {}}\n\n for field_name, field_obj in fields.items():\n observed_field_name = field_obj.data_key or field_name\n prop = self.field2property(field_obj)\n jsonschema[\"properties\"][observed_field_name] = prop\n\n if field_obj.required:\n if not partial or (\n is_collection(partial) and field_name not in partial\n ):\n jsonschema.setdefault(\"required\", []).append(observed_field_name)\n\n if \"required\" in jsonschema:\n jsonschema[\"required\"].sort()\n\n return jsonschema", "def setRequiredValues(self, instance):\n for key in instance.__slots__:\n if key in instance.requiredFields:\n value = self.getTypicalValue(type(instance), key)\n setattr(instance, key, value)", "def remove_null_fields(self):\n with open(self.schema_path, 'r') as file_obj:\n schema_data = yaml.safe_load(file_obj)\n schema_fields = schema_data.get('mapping').keys()\n for field in schema_fields:\n # We want to keep 'false' and 0 values, and avoid removing fields that are required in the schema.\n if field in self.data and self.data[field] in (None, '', [], {}) and \\\n not schema_data.get('mapping', {}).get(field, {}).get('required'):\n self.data.pop(field)", "def intialize_from_fields(self):\n raise NotImplementedError", "def _initFields(self):\n pass", "def requires_mapping(self):", "def _make_reverse_relations_valid(self, data):\n for field_name, (field, related_field) in self._get_reverse_fields().items():\n if data.get(field.source) is None:\n continue\n if isinstance(field, serializers.ListSerializer):\n field = field.child\n if isinstance(field, serializers.ModelSerializer):\n # find the serializer field matching the reverse model relation\n for sub_field in field.fields.values():\n if sub_field.source == related_field.name:\n sub_field.required = False\n # found the matching field, move on\n break", "def test_map_field_base_case(self):\n field = 'title'\n mapping = {\n 'type': 'text',\n 'index': True\n }\n\n actual = mapper._map_field(mapping, field)\n expected = {\n 'properties': {\n 'title': {\n 'type': 'text',\n 'index': True\n }\n }\n }\n self.assertEqual(actual, expected)", "def api_field_from_django_field(cls, f, default=CharField):\n if isinstance(f, JSONField):\n return JSONApiField\n \n return super(PandaModelResource, cls).api_field_from_django_field(f, default)", "def _validate(mapping):\n missing_fields = _MANDATORY_FIELDS - set(mapping)\n if missing_fields:\n raise ValueError(\n \"Missing mandatory fields: {0}\".format(\n \", \".join(repr(field) for field in sorted(missing_fields))\n )\n )", "def test_create_enforces_required_fields(self):\n serializer = ServiceSerializer(data = {}, context = dict(project = self.project))\n self.assertFalse(serializer.is_valid())\n required_fields = {'name', 'category'}\n self.assertCountEqual(serializer.errors.keys(), required_fields)\n for name in required_fields:\n self.assertEqual(serializer.errors[name][0].code, 'required')", "def __init__(self, **kwargs):\n\n for (k, v) in self._fields:\n if k in kwargs:\n self.__dict__[k] = v.validate(kwargs[k])\n self.__dict__[k] = v.default", "def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.fields['first_name'].required = False\n self.fields['last_name'].required = False\n self.fields['institution'].required = False\n self.fields['institution_logo'].required = False\n self.fields['allow_notifications'].required = False", "def required_fields(required_fields=[]):\n def decorator(func):\n \"\"\" The decorator applied to the obj_create method\"\"\"\n def wrapper(resource, bundle=None, **kwargs):\n \"\"\" wraps the decorated method and verifies a list of required\n fields when a new object is being created.\n\n \"\"\"\n if not isinstance(bundle, Bundle):\n request = bundle\n data = resource.deserialize(\n request, request.body,\n format=request.META.get('CONTENT_TYPE', 'application/json')\n )\n bundle = resource.build_bundle(request=request, data=data)\n else:\n request = None\n\n for required_field in required_fields:\n if required_field not in bundle.data:\n response = HttpBadRequest(\n json.dumps(\"missing %s field\" % required_field),\n content_type=bundle.request.META['CONTENT_TYPE'])\n raise ImmediateHttpResponse(response=response)\n return func(resource, bundle=bundle, **kwargs)\n return wrapper\n return decorator", "def convert_fields(fields, _fields):\n mapper = {\n \"id\": \"local_id\",\n \"local_id\": \"id\"\n }\n fields = deepcopy(fields)\n for field in fields:\n if field['name'] in _fields:\n field['name'] = mapper[field['name']]\n return fields", "def _update_fields_with_default(annotation_fields, defaults_dict):\n all_fields = OrderedDict()\n all_filed_keys = _merge_field_keys(annotation_fields, defaults_dict)\n for name in all_filed_keys:\n # Get or create annotation\n annotation = (\n annotation_fields[name]\n if name in annotation_fields\n else _get_annotation_by_value(defaults_dict.get(name, Input._EMPTY))\n )\n # Create annotation if is class type and update default\n annotation = _update_annotation_with_default(annotation, name, defaults_dict.get(name, Input._EMPTY))\n all_fields[name] = annotation\n return all_fields", "def get_required_fields(self) -> Iterable[fields.Field]:\n for model_field in self.get_fields():\n if model_field.required:\n yield model_field", "def _get_simple_fields(cls) -> dict:\n return {\n name: tp\n for name, tp in cls._annotations().items()\n if AnnotationWrapper(tp).is_simple_in_opt_and_not_opt\n }", "def _assign_fields_to_params(cls, fields, params):\n if fields is None:\n fields = cls.get_default_read_fields()\n if fields:\n params['fields'] = ','.join(fields)", "def test_set_non_dictionary_based_field(self):\n self.assertRaises(TypeError, self._p.set_fields, '')", "def test_deserialize_required_fields():\n data = {}\n serializer = DogSerializer(data=data)\n assert not serializer.is_valid()\n assert len(serializer.errors) == 1\n\n data = {'name': 'bruce'}\n serializer = DogSerializer(data=data)\n assert serializer.is_valid()\n\n serializer.save()\n assert Dog.objects.count() == 1\n\n dog = Dog.objects.first()\n assert dog.name == 'bruce'", "def enforce_required_fields(self, attrs):\n if self.instance is not None:\n return\n # missing_items = {\n # field_name: self.missing_message\n # for field_name in self.fields\n # if field_name not in attrs\n # }\n # if missing_items:\n # raise ValidationError(missing_items, code='required')", "def test_defining_only_or_defer_on_nonexistant_fields_fails(self):", "def get_fields(self):\n fields = super(RelationSerializer, self).get_fields()\n\n if self.request.method == \"GET\":\n fields['type'] = serializers.CharField(source='type.name')\n else:\n fields['type'] = serializers.PrimaryKeyRelatedField(queryset=RelationType.objects.all())\n\n return fields", "def jsonable(self, *args, **options):\n d = {}\n for field_name, field in self.schema.normal_fields.items():\n field_val = getattr(self, field_name, None)\n field_val = field.jsonable(self, field_val)\n if field_val is not None:\n d[field_name] = field_val\n\n return d", "def update(self, instance, validated_data):\n assert hasattr(self.Meta, 'allowed_update_fields'), \"Serializers that are used for update must set Meta.allowed_update_fields\"\n if set(validated_data.keys()) - set(self.Meta.allowed_update_fields):\n raise serializers.ValidationError('Only updates on these fields are allowed: %s' % ', '.join(self.Meta.allowed_update_fields))\n return super(BaseSerializer, self).update(instance, validated_data)", "def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n ignore_fields = (\n 'about_me',\n 'romanized_first_name',\n 'romanized_last_name',\n 'postal_code',\n )\n set_fields_to_required(self, ignore_fields=ignore_fields)", "def _populate(self, fields):\n schema = self.schema\n for k, v in fields.items():\n fields[k] = schema.fields[k].iget(self, v)\n\n self.modify(fields)\n self.reset_modified()", "def test_alright_when_non_required_field_is_missing():\n\n model_definition = {'language': {'type': 'fixed',\n 'required': True,\n 'persisted': True},\n 'source': {'type': 'list',\n 'required': False,\n 'persisted': True},\n 'resources.title': {'type': 'text',\n 'required': False,\n 'persisted': True}}\n product1 = {'language': 'english'}\n factory = ProductModelFactory(model_definition)\n factory.build('product1', product1)\n # Ok. No exceptions were raised.", "def clean_fields(self, instance, exclude=None):\n errors = {}\n exclude = exclude or []\n for name, f in self.properties.items():\n raw_value = getattr(instance, name, None)\n is_blank = not bool(raw_value)\n is_nullable = f.null\n is_defaulted = f.column.default or f.column.server_default\n is_required = f.required\n\n is_skippable = is_blank and (is_nullable or is_defaulted or not is_required)\n\n if name in exclude or is_skippable:\n continue\n try:\n setattr(instance, name, f.clean(raw_value, instance))\n except ValidationError as e:\n errors[name] = e.error_list\n if errors:\n raise NestedValidationError(errors)", "def _validate_fields(self, change_fields):\n pass", "def _update_fields_with_default(\n annotation_fields: Dict[str, Union[Annotation, Input, Output]], defaults_dict: Dict[str, Any]\n ) -> Dict[str, Union[Annotation, Input, Output]]:\n all_fields = OrderedDict()\n all_filed_keys = _merge_field_keys(annotation_fields, defaults_dict)\n for name in all_filed_keys:\n # Get or create annotation\n annotation = (\n annotation_fields[name]\n if name in annotation_fields\n else _get_annotation_by_value(defaults_dict.get(name, Input._EMPTY))\n )\n # Create annotation if is class type and update default\n annotation = _update_annotation_with_default(annotation, name, defaults_dict.get(name, Input._EMPTY))\n all_fields[name] = annotation\n return all_fields", "def test_missing_mandatory_attributes():\n model_definition = {'source': {'type': 'list',\n 'required': True,\n 'persisted': True},\n 'resources.title': {'type': 'text',\n 'required': True,\n 'persisted': True}}\n # missing language in the model\n _ = ProductModelFactory(model_definition)", "def test_swagger_field_is_required():\n raw_schema = RawSchemaFactory()\n raw_schema.pop('swagger', None)\n\n assert 'swagger' not in raw_schema\n\n with pytest.raises(ValidationError) as err:\n swagger_schema_validator(raw_schema)\n\n assert_message_in_errors(\n MESSAGES['required']['required'],\n err.value.detail,\n 'required.swagger',\n )", "def is_required(self, field):\n return field.scheme.is_required and not field.scheme.is_pk", "def test_alright_when_required_field_is_missing_but_default_is_given():\n\n model_definition = {'language': {'type': 'fixed',\n 'required': True,\n 'persisted': True,\n 'default': 'portuguese'},\n 'source': {'type': 'list',\n 'required': False,\n 'persisted': True}}\n product1 = {'source': ['Whatever']}\n factory = ProductModelFactory(model_definition)\n factory.build('product1', product1)\n # Ok. No exceptions were raised.", "def clean_fields(self, exclude=None):\n obj = self._obj\n if obj is None:\n return None\n\n self.event = self.clean_event(self.event)\n self.resource_name = self.clean_resource_name(obj.__class__.__name__)\n self.resource_id = obj.id\n self.site = self.clean_site(obj)\n\n serializer_class = self.get_serializer_for_resource(self.resource_name)\n serializer = serializer_class(obj)\n self._resource = serializer.data", "def field_mapping(self):\n fields = self.fields\n if self.target_field is not None:\n del fields[self.target_field.get('name')]\n field_labels = list(self.fields.keys())\n\n field_mapping = {\n name: (\n field_labels.index(name),\n lambda value, e=e: self.parse_type(value, e)\n )\n for name, e in fields.items()\n if e.tag == f'{{{self.namespace}}}DataField'\n }\n\n field_mapping.update({\n name: (\n field_labels.index(self.find(e, 'FieldRef').get('field')),\n lambda value, e=e: self.parse_type(value, e)\n )\n for name, e in fields.items()\n if e.tag == f'{{{self.namespace}}}DerivedField'\n })\n\n return field_mapping", "def required_fields(model, values):\n if values:\n for k in list(values):\n if k not in model.__table__.columns.keys():\n values.pop(k)\n return values", "def check_for_required_fields(cls, fields=[], dataDict={}):\n\n validateRequired = Validate.required(fields=fields, dataDict=dataDict)\n if validateRequired['status'] == False:\n res = jsonify(\n {'status': 400, 'error': validateRequired['message'], 'data': []})\n return abort(make_response(res, 400))\n return True", "def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]:\n from .schedule_change_request import ScheduleChangeRequest\n\n from .schedule_change_request import ScheduleChangeRequest\n\n fields: Dict[str, Callable[[Any], None]] = {\n \"endDateTime\": lambda n : setattr(self, 'end_date_time', n.get_datetime_value()),\n \"startDateTime\": lambda n : setattr(self, 'start_date_time', n.get_datetime_value()),\n \"timeOffReasonId\": lambda n : setattr(self, 'time_off_reason_id', n.get_str_value()),\n }\n super_fields = super().get_field_deserializers()\n fields.update(super_fields)\n return fields", "def set_additional_fields(cls, model, data):\n for k, v in data.items():\n if not hasattr(model, k):\n setattr(model, k, v)", "def test_fields(self):\n\n class Foo(Model):\n field1 = StringField()\n field2 = IntegralField()\n\n assert hasattr(Foo, \"_fields\")\n assert type(Foo._fields) is dict\n\n assert not hasattr(Foo, \"field1\")\n assert \"field1\" in Foo._fields\n assert type(Foo._fields[\"field1\"]) is StringField\n\n assert not hasattr(Foo, \"field2\")\n assert \"field2\" in Foo._fields\n assert type(Foo._fields[\"field2\"]) is IntegralField", "def test_to_dict(self):\n\n class Person(Model):\n name = StringField()\n age = IntegralField(bounds = (0, None))\n siblings = ListField(of = StringField())\n\n data1 = {\n \"name\": \"Joe Shmoe\",\n \"age\": 21,\n \"siblings\": [\"Dick Shmoe\", \"Jane Shmoe\"]\n }\n person1 = Person(**data1)\n assert person1.to_dict() == data1\n\n # The defined but unset fields should still be present, but set to none\n data2 = {\"notaname\": 2, \"age\": \"lots\"}\n person2 = Person.from_dict(data2)\n assert person2.to_dict() == {\n \"notaname\": 2,\n \"age\": \"lots\",\n \"name\": None,\n \"siblings\": None\n }", "def set_fields(self, fields: FieldDict):\n super().set_fields(fields)\n nested_field: NestedField = self.fields[self.nested]\n if not isinstance(nested_field, NestedField):\n raise TypeError(\n f'The field \"{self.nested}\" must be a NestedField instance, not \"{nested_field}\".')\n if nested_field.many:\n raise ValueError(f'The field \"{self.nested}\" can not be set as \"many=True\".')\n self.nested_field = nested_field\n # create partial methods\n self._do_dump = partial(\n getattr(self, self.dump_method),\n target=nested_field.dump_target,\n method=nested_field.dump,\n )\n self._do_load = partial(\n getattr(self, self.load_method),\n target=nested_field.load_target,\n method=nested_field.load,\n )", "def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]:\n fields: Dict[str, Callable[[Any], None]] = {\n \"allowedToCreateApps\": lambda n : setattr(self, 'allowed_to_create_apps', n.get_bool_value()),\n \"allowedToCreateSecurityGroups\": lambda n : setattr(self, 'allowed_to_create_security_groups', n.get_bool_value()),\n \"allowedToCreateTenants\": lambda n : setattr(self, 'allowed_to_create_tenants', n.get_bool_value()),\n \"allowedToReadBitlockerKeysForOwnedDevice\": lambda n : setattr(self, 'allowed_to_read_bitlocker_keys_for_owned_device', n.get_bool_value()),\n \"allowedToReadOtherUsers\": lambda n : setattr(self, 'allowed_to_read_other_users', n.get_bool_value()),\n \"@odata.type\": lambda n : setattr(self, 'odata_type', n.get_str_value()),\n \"permissionGrantPoliciesAssigned\": lambda n : setattr(self, 'permission_grant_policies_assigned', n.get_collection_of_primitive_values(str)),\n }\n return fields", "def test_reader_serializer(self):\n transaction = TransactionFactory.build()\n serializer = TransactionSerializer(transaction)\n serialized_data = serializer.data\n\n for (\n serializer_field,\n model_field,\n ) in transaction_reader_fields_mapping.items():\n self.assertIn(serializer_field, serialized_data)\n\n if serializer_field == \"estabelecimento\":\n value = transaction.company.cnpj\n else:\n value = getattr(transaction, model_field)\n\n self.assertEqual(serialized_data[serializer_field], value)\n\n # tests if serializer validates\n serializer = TransactionSerializer(data=serialized_data)\n self.assertTrue(serializer.is_valid())", "def fields(self, fields):\n\n self._fields = fields", "def _patch_schema(self):\n fields = get_json()['data']['attributes'].keys()\n return make_entity_schema(\n self.SCHEMA, self.RESOURCE_NAME,\n make_data_schema(\n self.SCHEMA, id_required=True,\n only=fields, partial=True\n )\n )", "def test_required_field_values_are_present():\n\n model_definition = {'language': {'type': 'fixed',\n 'required': True,\n 'persisted': True},\n 'source': {'type': 'list',\n 'required': False,\n 'persisted': True},\n 'resources.title': {'type': 'text',\n 'required': True,\n 'persisted': True}}\n product1 = {'language': 'english'}\n factory = ProductModelFactory(model_definition)\n factory.build('product1', product1)", "def _save_direct_relations(self, kwargs):\n for field_name, field in self.fields.items():\n if field.read_only:\n continue\n if isinstance(self._validated_data, dict) and self._validated_data.get(field.source) is None:\n continue\n if not isinstance(field, serializers.BaseSerializer):\n continue\n if hasattr(self, 'Meta') and hasattr(self.Meta, 'model'):\n # ModelSerializer (or similar) so we need to exclude reverse relations\n try:\n _, direct = self._get_related_field(field)\n except FieldDoesNotExist:\n continue\n if not direct:\n continue\n\n # reinject validated_data\n field._validated_data = self._validated_data[field_name]\n self._validated_data[field_name] = field.save(**kwargs.pop(field_name, {}))", "def get_fields(cls, fields=None, excludes=None):\r\n\r\n final_fields = {}\r\n fields = fields or []\r\n excludes = excludes or []\r\n\r\n if not cls._meta.object_class:\r\n return final_fields\r\n\r\n for name, f in cls._meta.object_class._fields.iteritems():\r\n # If the field name is already present, skip\r\n if name in cls.base_fields:\r\n continue\r\n\r\n # If field is not present in explicit field listing, skip\r\n if fields and name not in fields:\r\n continue\r\n\r\n # If field is in exclude list, skip\r\n if excludes and name in excludes:\r\n continue\r\n\r\n # TODO: Might need it in the future\r\n # if cls.should_skip_field(f):\r\n # continue\r\n\r\n api_field_class = cls.api_field_from_mongo_field(f)\r\n\r\n primary_key = f.primary_key or name == getattr(cls._meta, 'id_field', 'id')\r\n\r\n kwargs = {\r\n 'attribute': name,\r\n 'unique': f.unique or primary_key,\r\n 'null': not f.required and not primary_key,\r\n 'help_text': f.help_text,\r\n }\r\n\r\n # If field is not required, it does not matter if set default value,\r\n # so we do\r\n if not f.required:\r\n kwargs['default'] = f.default\r\n else:\r\n # MongoEngine does not really differ between user-specified default\r\n # and its default, so we try to guess\r\n if isinstance(f, mongoengine.ListField):\r\n if not callable(f.default) or f.default() != []: # If not MongoEngine's default\r\n kwargs['default'] = f.default\r\n elif isinstance(f, mongoengine.DictField):\r\n if not callable(f.default) or f.default() != {}: # If not MongoEngine's default\r\n kwargs['default'] = f.default\r\n else:\r\n if f.default is not None: # If not MongoEngine's default\r\n kwargs['default'] = f.default\r\n\r\n kwargs = cls.api_field_options(name, f, kwargs)\r\n\r\n final_fields[name] = api_field_class(**kwargs)\r\n final_fields[name].instance_name = name\r\n final_fields[name]._primary_key = primary_key\r\n\r\n # We store MongoEngine field so that schema output can show\r\n # to which content the list is limited to (if any)\r\n if isinstance(f, mongoengine.ListField):\r\n final_fields[name].field = f.field\r\n\r\n return final_fields", "def _make_field_map(fields):\n field_map = {}\n for field in fields:\n if field.name in field_map:\n raise SchemaParseException(\n 'Duplicate record field name %r.' % field.name)\n field_map[field.name] = field\n return field_map", "def set_specific_fields(self):\n raise NotImplementedError(\"Must be defined by subclass!\")", "def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]:\n fields: Dict[str, Callable[[Any], None]] = {\n \"assignedDateTime\": lambda n : setattr(self, 'assigned_date_time', n.get_datetime_value()),\n \"capabilityStatus\": lambda n : setattr(self, 'capability_status', n.get_str_value()),\n \"@odata.type\": lambda n : setattr(self, 'odata_type', n.get_str_value()),\n \"service\": lambda n : setattr(self, 'service', n.get_str_value()),\n \"servicePlanId\": lambda n : setattr(self, 'service_plan_id', n.get_uuid_value()),\n }\n return fields", "def add_base_fields(self) -> None:\n for field in get_model_fields(self.model, foreign=False, m2m=False):\n if hasattr(self.factory, field.name) or field.has_default() or field.blank:\n continue\n setattr(\n self.factory, field.name, self._get_decl_for_model_field(field)\n )", "def depopulate(self, is_update):\n fields = {}\n schema = self.schema\n for k, field in schema.fields.items():\n is_modified = k in self.modified_fields\n orig_v = getattr(self, k)\n v = field.iset(\n self,\n orig_v,\n is_update=is_update,\n is_modified=is_modified\n )\n\n if is_modified or v is not None:\n if is_update and field.is_pk() and v == orig_v:\n continue\n\n else:\n fields[k] = v\n\n if not is_update:\n for field_name in schema.required_fields.keys():\n if field_name not in fields:\n raise KeyError(\"Missing required field {}\".format(field_name))\n\n return fields", "def __createFields(self):\n fields = self.updateFields\n for field in fields:\n self.__createField(field)", "def test_field_nullable(self):\n node_dict = {\n 'host_name': 'abc',\n 'local_router_id': '1.1.1.1',\n 'as_num': 100,\n 'bgpls_id': '0.0.0.0',\n 'igp_id': '0.0.0.0'\n }\n node = Node(**node_dict)\n for name, field in node_dict.items():\n self.assertEqual(field, node.__dict__[name])", "def _get_not_simple_fields(cls) -> Dict[str, str]:\n\n return {\n name: anno\n for name, anno in cls._annotations().items()\n if not AnnotationWrapper(anno).is_simple_in_opt_and_not_opt\n }", "def _validate(self):\n fields, schema = self.__dict__, self._def.default\n extra_fields = fields.viewkeys() - schema.viewkeys()\n if len(extra_fields) > 0:\n raise AttributeError('Fields found that are not in the schema: %r' % (list(extra_fields)))\n for key in fields.iterkeys():\n if type(fields[key]) is not type(schema[key]):\n raise AttributeError('Invalid %s for field \"%s\", should be %s' %\n (type(fields[key]), key, type(schema[key])))", "def validate_fields_for_magento(self,data):\n for field in data:\n if data[field] == None :\n del data[field]\n if data[field] == True:\n data[field] = 1\n if data[field] == False :\n data[field] = 0", "def clean(self):\n cleaned_data = super().clean()\n cleaned_data = {key: field for key, field in cleaned_data.items()\n if field is not None}\n return cleaned_data", "def _initializeRequestField(self,field,referenceField):\n\t\tvaluesDict = referenceField.values\n\t\tfield.initialize_values(valuesDict)\n\t\t\n\t\tpass", "def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n for field in self.fields:\n self.fields[field].label = False", "def _update_allowed_fields(self) -> list:\n raise NotImplementedError('Each model has to have its list of update allowed fields')", "def __init__(self):\n super(ObjectSchema, self).__init__()\n self.is_allow_undefined = False", "def test_prep_fields(self):\n pass", "def required_fields(self, gid):\n r = self.get(\"/groups/{g:d}/fields\".format(g=gid))\n return r.json()", "def writeRequiredFields(self, outputFile):\n fields = []\n for field in self.getFields():\n if not field.has_default:\n fields.append(field)\n if len(fields) < 1:\n self._writeWithIndent('requiredFields = set([])', outputFile)\n else:\n self._writeWithIndent('requiredFields = set([', outputFile)\n for field in fields:\n string_ = '\"{0}\",'.format(field.name)\n self._writeWithIndent(string_, outputFile, 2)\n self._writeWithIndent('])', outputFile)", "def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]:\n from .app_management_configuration import AppManagementConfiguration\n from .policy_base import PolicyBase\n\n from .app_management_configuration import AppManagementConfiguration\n from .policy_base import PolicyBase\n\n fields: Dict[str, Callable[[Any], None]] = {\n \"applicationRestrictions\": lambda n : setattr(self, 'application_restrictions', n.get_object_value(AppManagementConfiguration)),\n \"isEnabled\": lambda n : setattr(self, 'is_enabled', n.get_bool_value()),\n \"servicePrincipalRestrictions\": lambda n : setattr(self, 'service_principal_restrictions', n.get_object_value(AppManagementConfiguration)),\n }\n super_fields = super().get_field_deserializers()\n fields.update(super_fields)\n return fields", "def namespaced_fields(self):\n ...", "def __call__(self, func):\n # Set or extend the function's \"custom_fields\" attribute\n func.required_fields = getattr(func, \"required_fields\", {})\n func.required_fields[self.fieldname] = self.input_type\n # The decorated function is unchanged\n return func", "def update(self, mapping):\n if not ismapping(mapping):\n raise TypeError(\"mapping type required\")\n field_names = getpyattr(type(self), 'field_names')\n for key, value in mapping.items():\n if key in field_names:\n setattr(self, key, value)", "def set_default_read_fields(cls, fields):\n cls._default_read_fields = fields", "def validation_required(self, validation_required):\n self._validation_required = validation_required", "def extract_fields(self, json_dict):\n raise NotImplementedError()", "def test_map_field_recursive_case(self):\n field = 'content.title'\n mapping = {\n 'type': 'text',\n 'index': True\n }\n\n actual = mapper._map_field(mapping, field)\n expected = {\n 'properties': {\n 'content': {\n 'properties': {\n 'title': {\n 'type': 'text',\n 'index': True\n }\n }\n }\n }\n }\n self.assertEqual(actual, expected)", "def field_mappings(self) -> Optional[Sequence['outputs.FieldMappingResponse']]:\n return pulumi.get(self, \"field_mappings\")", "def set_fields(self, **kwargs):\n for key, value in kwargs.items():\n if key in self.fields.keys():\n if type(value) != bool:\n raise TypeError('Expecting Bool passed {}'\n .format(type(value)))\n self.fields[key] = value\n else:\n raise KeyError", "def _validate_default_fields() -> None:\n default_fields = [\n field\n for field in fields\n if field[\"name\"] in DEFAULT_PREDICTIONS_TABLE_FIELDS\n ]\n if len(DEFAULT_PREDICTIONS_TABLE_FIELDS) != len(default_fields):\n raise NotFilledDefaultFields", "def update_json(self):\n self.set_version_to_default()\n self.remove_null_fields()\n self.remove_unnecessary_keys()\n self.set_fromVersion(from_version=self.from_version)", "def validate(self, data):\n # calling subserializer validate method (fields, and presets)\n data = super(FormidableSerializer, self).validate(data)\n # we check every field define in presets are define inside the form.\n if 'fields' in data and 'presets' in data:\n data = self.check_presets_cohesion(data)\n return data", "def set_fields(self, upstream_obj, nonparam_fields=None):\n default_data = upstream_obj.default_data(start_year=self.start_year,\n metadata=True)\n\n if self.raw_input_fields is None:\n self.raw_input_fields = {}\n for field in self._meta.fields:\n if (getattr(self, field.attname, None) and\n field.name not in nonparam_fields):\n raw_val = getattr(self, field.attname)\n if field.name.endswith(\"cpi\") and isinstance(raw_val, bool):\n raw_val = str(raw_val)\n self.raw_input_fields[field.name] = raw_val\n\n input_fields, failed_lookups = param_formatters.parse_fields(\n self.raw_input_fields,\n default_data\n )\n\n if failed_lookups:\n # distinct elements\n potential_failed_lookups = set(failed_lookups)\n # only keep parameters that used to be in the upstream package\n set_failed_lookups = potential_failed_lookups - nonparam_fields\n if self.deprecated_fields is None:\n self.deprecated_fields = []\n # drop parameters that we already know are deprecated\n set_failed_lookups.difference_update(self.deprecated_fields)\n self.deprecated_fields += list(set_failed_lookups)\n\n self.input_fields = input_fields", "def check_mandatory_props(klass: pyorient.ogm.declarative.DeclarativeMeta, obj: Dict):\n missing = []\n props = klass.objects.g.props_from_db[klass](Graph.compute_all_properties(klass))\n for k, v in props.items():\n prop = getattr(klass, k)\n if hasattr(prop, 'mandatory'):\n if prop.mandatory and k not in obj:\n # Fix values if default set \n if k == \"revoked\":\n obj[k] = False\n continue\n if k == \"spec_version\":\n obj[k] = \"2.1\"\n continue\n missing.append(k)\n if isinstance(prop, odbproperty.String):\n obj[k] = 'added_default'\n elif isinstance(prop, (odbproperty.Date, odbproperty.DateTime)):\n obj[k] = get_datetime()\n elif isinstance(prop, odbproperty.EmbeddedList):\n obj[k] = ['added_default']\n elif isinstance(prop, odbproperty.Integer):\n obj[k] = 0\n elif isinstance(prop, odbproperty.Float):\n obj[k] = 0.0\n elif isinstance(prop, odbproperty.Binary):\n obj[k] = 0\n elif isinstance(prop, odbproperty.Byte):\n obj[k] = 0\n elif isinstance(prop, odbproperty.Decimal):\n obj[k] = 0.0\n elif isinstance(prop, odbproperty.Long):\n obj[k] = 0\n elif isinstance(prop, odbproperty.Short):\n obj[k] = 0\n elif isinstance(prop, odbproperty.Boolean):\n obj[k] = True\n else:\n logging.info(f'What to do with missing mandatory field {k} of type {v.__class__}?')\n if missing:\n logging.info(f'missing mandatory fields for {obj[\"id_\"]}: {missing}')\n return obj", "def test_prep_country_fields_flat(self):\n original_flag = self.form.country_optional\n self.form.country_optional = True\n original_fields = self.form.fields\n original_removed = getattr(self.form, 'removed_fields', None)\n original_computed = getattr(self.form, 'computed_fields', None)\n self.form.fields = original_fields.copy()\n if original_removed is not None:\n self.form.removed_fields = original_removed.copy()\n if original_computed is not None:\n self.form.computed_fields = original_computed.copy()\n remaining = original_fields.copy()\n opts, field_rows = {'fake_opts': 'fake', 'fields': ['nope']}, [{'name': 'assigned_field'}]\n args = ['arbitrary', 'input', 'args']\n kwargs = {'test_1': 'data_1', 'test_2': 'data_2'}\n field_names = (self.form.country_field_name, 'country_flag', )\n if not any(remaining.get(name, None) for name in field_names):\n fix_fields = {name: self.get_missing_field(name) for name in field_names if name not in remaining}\n remaining.update(fix_fields)\n expected_add = {name: remaining[name] for name in field_names if name in remaining}\n expected_field_rows = field_rows.copy()\n expected_field_rows.append(expected_add)\n expected_remaining = {name: field for name, field in remaining.items() if name not in expected_add}\n expected_opts = deepcopy(opts)\n # expected_opts['fields'].append(field_names)\n kwargs['flat_fields'] = True\n expected_remaining.update(expected_add)\n\n sent = (opts, field_rows, remaining, *args)\n r_opts, r_rows, r_remaining, *r_args, r_kwargs = self.form.prep_country_fields(*sent, **kwargs)\n self.assertEqual(expected_opts, r_opts)\n self.assertEqual(expected_field_rows, r_rows)\n self.assertEqual(expected_remaining, r_remaining)\n self.assertEqual(args, r_args)\n self.assertEqual(kwargs, r_kwargs)\n\n self.form.country_optional = original_flag\n self.form.fields = original_fields\n if original_removed is not None:\n self.form.removed_fields = original_removed\n if original_computed is not None:\n self.form.computed_fields = original_computed\n pass", "def set_fields(self, fields: FieldDict):\n super().set_fields(fields)\n # bind fields to attrs\n for attr in ('a', 'b'):\n setattr(self, f'field_{attr}', self.fields[getattr(self, attr)])\n # get error messages\n dump_error = self.error_cls(self.get_error_message(\n self.op, a=self.field_a.dump_source, b=self.field_b.dump_source))\n load_error = self.error_cls(self.get_error_message(\n self.op, a=self.field_a.load_source, b=self.field_b.load_source))\n # set partial arguments for `validate`\n self.validate_dump = partial(\n self.validate,\n a_key=self.field_a.dump_target,\n b_key=self.field_b.dump_target,\n error=dump_error)\n self.validate_load = partial(\n self.validate,\n a_key=self.field_a.load_target,\n b_key=self.field_b.load_target,\n error=load_error)", "def remove_read_only_fields(self):\n self.fields = XML_List(Elements.FIELDS, [field for field in self.fields if\n not field.read_only or not str_to_bool(field.read_only)])", "def build_standard_field(self, field_name, model_field_type):\n field_mapping = self.serializer_field_mapping\n field_class = field_mapping[model_field_type]\n field_kwargs = get_field_kwargs(field_name, model_field_type)\n\n if \"choices\" in field_kwargs:\n # Fields with choices get coerced into `ChoiceField`\n # instead of using their regular typed field.\n field_class = self.serializer_choice_field\n # Some model fields may introduce kwargs that would not be valid\n # for the choice field. We need to strip these out.\n # Eg. models.DecimalField(max_digits=3, decimal_places=1, choices=DECIMAL_CHOICES)\n valid_kwargs = {\n \"read_only\",\n \"write_only\",\n \"required\",\n \"default\",\n \"initial\",\n \"source\",\n \"label\",\n \"help_text\",\n \"style\",\n \"error_messages\",\n \"validators\",\n \"allow_null\",\n \"allow_blank\",\n \"choices\",\n }\n for key in list(field_kwargs):\n if key not in valid_kwargs:\n field_kwargs.pop(key)\n\n if not issubclass(field_class, fields.CharField) and not issubclass(\n field_class, fields.ChoiceField\n ):\n # `allow_blank` is only valid for textual fields.\n field_kwargs.pop(\"allow_blank\", None)\n\n return field_class, field_kwargs", "def objectFields(self):\n raise NotImplementedError", "def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]:\n from .delegated_admin_relationship_request_action import DelegatedAdminRelationshipRequestAction\n from .delegated_admin_relationship_request_status import DelegatedAdminRelationshipRequestStatus\n from .entity import Entity\n\n from .delegated_admin_relationship_request_action import DelegatedAdminRelationshipRequestAction\n from .delegated_admin_relationship_request_status import DelegatedAdminRelationshipRequestStatus\n from .entity import Entity\n\n fields: Dict[str, Callable[[Any], None]] = {\n \"action\": lambda n : setattr(self, 'action', n.get_enum_value(DelegatedAdminRelationshipRequestAction)),\n \"createdDateTime\": lambda n : setattr(self, 'created_date_time', n.get_datetime_value()),\n \"lastModifiedDateTime\": lambda n : setattr(self, 'last_modified_date_time', n.get_datetime_value()),\n \"status\": lambda n : setattr(self, 'status', n.get_enum_value(DelegatedAdminRelationshipRequestStatus)),\n }\n super_fields = super().get_field_deserializers()\n fields.update(super_fields)\n return fields", "def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]:\n from .app_identity import AppIdentity\n from .entity import Entity\n from .print_task import PrintTask\n\n from .app_identity import AppIdentity\n from .entity import Entity\n from .print_task import PrintTask\n\n fields: Dict[str, Callable[[Any], None]] = {\n \"createdBy\": lambda n : setattr(self, 'created_by', n.get_object_value(AppIdentity)),\n \"displayName\": lambda n : setattr(self, 'display_name', n.get_str_value()),\n \"tasks\": lambda n : setattr(self, 'tasks', n.get_collection_of_object_values(PrintTask)),\n }\n super_fields = super().get_field_deserializers()\n fields.update(super_fields)\n return fields", "def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]:\n from .container_filter import ContainerFilter\n from .group_filter import GroupFilter\n from .object_mapping import ObjectMapping\n from .string_key_string_value_pair import StringKeyStringValuePair\n\n from .container_filter import ContainerFilter\n from .group_filter import GroupFilter\n from .object_mapping import ObjectMapping\n from .string_key_string_value_pair import StringKeyStringValuePair\n\n fields: Dict[str, Callable[[Any], None]] = {\n \"containerFilter\": lambda n : setattr(self, 'container_filter', n.get_object_value(ContainerFilter)),\n \"editable\": lambda n : setattr(self, 'editable', n.get_bool_value()),\n \"groupFilter\": lambda n : setattr(self, 'group_filter', n.get_object_value(GroupFilter)),\n \"id\": lambda n : setattr(self, 'id', n.get_str_value()),\n \"metadata\": lambda n : setattr(self, 'metadata', n.get_collection_of_object_values(StringKeyStringValuePair)),\n \"name\": lambda n : setattr(self, 'name', n.get_str_value()),\n \"objectMappings\": lambda n : setattr(self, 'object_mappings', n.get_collection_of_object_values(ObjectMapping)),\n \"@odata.type\": lambda n : setattr(self, 'odata_type', n.get_str_value()),\n \"priority\": lambda n : setattr(self, 'priority', n.get_int_value()),\n \"sourceDirectoryName\": lambda n : setattr(self, 'source_directory_name', n.get_str_value()),\n \"targetDirectoryName\": lambda n : setattr(self, 'target_directory_name', n.get_str_value()),\n }\n return fields", "def to_dict(self):\n _dict = self.dict(by_alias=True,\n exclude={\n },\n exclude_none=True)\n # set to None if billing_cycle_start (nullable) is None\n # and __fields_set__ contains the field\n if self.billing_cycle_start is None and \"billing_cycle_start\" in self.__fields_set__:\n _dict['billing_cycle_start'] = None\n\n # set to None if billing_cycle_end (nullable) is None\n # and __fields_set__ contains the field\n if self.billing_cycle_end is None and \"billing_cycle_end\" in self.__fields_set__:\n _dict['billing_cycle_end'] = None\n\n # set to None if canceled_at (nullable) is None\n # and __fields_set__ contains the field\n if self.canceled_at is None and \"canceled_at\" in self.__fields_set__:\n _dict['canceled_at'] = None\n\n # set to None if charge_id (nullable) is None\n # and __fields_set__ contains the field\n if self.charge_id is None and \"charge_id\" in self.__fields_set__:\n _dict['charge_id'] = None\n\n # set to None if paused_at (nullable) is None\n # and __fields_set__ contains the field\n if self.paused_at is None and \"paused_at\" in self.__fields_set__:\n _dict['paused_at'] = None\n\n # set to None if trial_start (nullable) is None\n # and __fields_set__ contains the field\n if self.trial_start is None and \"trial_start\" in self.__fields_set__:\n _dict['trial_start'] = None\n\n # set to None if trial_end (nullable) is None\n # and __fields_set__ contains the field\n if self.trial_end is None and \"trial_end\" in self.__fields_set__:\n _dict['trial_end'] = None\n\n return _dict", "def add_required_properties(self, p: str):\n # TODO: Deprecate\n for k in p.keys():\n try:\n self._properties[k].set_required(True)\n except KeyError:\n self._properties.define_property(name=k, supported=False, required=True)" ]
[ "0.73499066", "0.59110147", "0.5813631", "0.5798847", "0.57730836", "0.5717525", "0.5697753", "0.56940204", "0.56850916", "0.5684714", "0.5658657", "0.56166375", "0.5562546", "0.5559823", "0.55570066", "0.5553048", "0.5540108", "0.5510436", "0.546832", "0.5458542", "0.5449583", "0.5412325", "0.539837", "0.538003", "0.5378419", "0.5373643", "0.53728694", "0.5365694", "0.53516424", "0.53432953", "0.5338093", "0.53177", "0.5313075", "0.53011745", "0.5277172", "0.52687126", "0.5267163", "0.5265927", "0.5257691", "0.5253287", "0.5244607", "0.5225675", "0.5201452", "0.5180719", "0.5170421", "0.5163651", "0.5158553", "0.5154661", "0.5133564", "0.51289105", "0.51255214", "0.51206934", "0.5115963", "0.51145935", "0.5112749", "0.51080555", "0.5102547", "0.5101465", "0.5101133", "0.50927114", "0.50899", "0.5070939", "0.5068621", "0.5063792", "0.50629044", "0.5049571", "0.5049191", "0.5048", "0.5032366", "0.50246173", "0.5024494", "0.5023711", "0.5023599", "0.5011825", "0.50095683", "0.5000383", "0.49987996", "0.49949083", "0.49909762", "0.4990734", "0.49857712", "0.49792814", "0.4977994", "0.49756882", "0.4963428", "0.4953853", "0.49538064", "0.49515435", "0.49489567", "0.49448028", "0.4944325", "0.49292135", "0.49225643", "0.4918645", "0.49073404", "0.49015024", "0.49009657", "0.49004465", "0.48980913", "0.48950574", "0.4887455" ]
0.0
-1
Update serializer_field_mapping to use fields setting required=True
def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) set_fields_to_required(self, ['field_of_study'])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_fields_to_required(serializer, ignore_fields=None):\n if ignore_fields is None:\n ignore_fields = []\n for field in serializer.fields.values():\n if field.field_name not in ignore_fields:\n field.required = True\n field.allow_null = False\n field.allow_blank = False", "def required_dict_validator(self, dict_fields, model_name, erp_required=[]):\n required_fields = self.env['settings.field'].sudo().search([('model_id.model', '=', model_name)])\n\n if required_fields:\n erp_required.extend(required_fields.required_field_ids.filtered(lambda x: x.id not in [er.id for er in erp_required]))\n\n for field in erp_required:\n if field.name in dict_fields and 'required' not in dict_fields[field.name]:\n dict_fields[field.name]['required'] = True\n dict_fields[field.name]['empty'] = False\n\n return dict_fields", "def get_fields(self):\n fields = super(GeoModelSerializer, self).get_fields()\n # Set the geometry field name when it's undeclared.\n if not self.Meta.geom_field:\n for name, field in fields.items():\n if isinstance(field, GeometryField):\n self.Meta.geom_field = name\n break\n return fields", "def serialize_field(self, payload, model, field, mapping):\n try:\n if field in model.fk_field_names():\n payload[field] = self.serialize_related_field(\n model, field, mapping\n )\n else:\n payload[field] = getattr(model, field)\n except SkipField:\n payload.pop(field, None)", "def __init__(self, *args, **kwargs):\n location_controls = kwargs.pop('location_controls', False)\n\n self.Meta.fields = list(self.Meta.fields)\n\n if location_controls:\n self.Meta.fields += ['next_location_id', 'previous_location_id']\n\n super(LocationFullSerializer, self).__init__(*args, **kwargs)", "def _populate_always_present_fields(self, field):\n defaults = [\n (\"label\", \"\"),\n (\"instructions\", \"\"),\n (\"placeholder\", \"\"),\n (\"defaultValue\", \"\"),\n (\"restrictions\", {}),\n (\"errorMessages\", {}),\n ]\n field.update({\n key: value\n for key, value in defaults if key not in field\n })", "def fields2jsonschema(self, fields, *, ordered=False, partial=None):\n jsonschema = {\"type\": \"object\", \"properties\": OrderedDict() if ordered else {}}\n\n for field_name, field_obj in fields.items():\n observed_field_name = field_obj.data_key or field_name\n prop = self.field2property(field_obj)\n jsonschema[\"properties\"][observed_field_name] = prop\n\n if field_obj.required:\n if not partial or (\n is_collection(partial) and field_name not in partial\n ):\n jsonschema.setdefault(\"required\", []).append(observed_field_name)\n\n if \"required\" in jsonschema:\n jsonschema[\"required\"].sort()\n\n return jsonschema", "def setRequiredValues(self, instance):\n for key in instance.__slots__:\n if key in instance.requiredFields:\n value = self.getTypicalValue(type(instance), key)\n setattr(instance, key, value)", "def intialize_from_fields(self):\n raise NotImplementedError", "def remove_null_fields(self):\n with open(self.schema_path, 'r') as file_obj:\n schema_data = yaml.safe_load(file_obj)\n schema_fields = schema_data.get('mapping').keys()\n for field in schema_fields:\n # We want to keep 'false' and 0 values, and avoid removing fields that are required in the schema.\n if field in self.data and self.data[field] in (None, '', [], {}) and \\\n not schema_data.get('mapping', {}).get(field, {}).get('required'):\n self.data.pop(field)", "def _initFields(self):\n pass", "def requires_mapping(self):", "def _make_reverse_relations_valid(self, data):\n for field_name, (field, related_field) in self._get_reverse_fields().items():\n if data.get(field.source) is None:\n continue\n if isinstance(field, serializers.ListSerializer):\n field = field.child\n if isinstance(field, serializers.ModelSerializer):\n # find the serializer field matching the reverse model relation\n for sub_field in field.fields.values():\n if sub_field.source == related_field.name:\n sub_field.required = False\n # found the matching field, move on\n break", "def test_map_field_base_case(self):\n field = 'title'\n mapping = {\n 'type': 'text',\n 'index': True\n }\n\n actual = mapper._map_field(mapping, field)\n expected = {\n 'properties': {\n 'title': {\n 'type': 'text',\n 'index': True\n }\n }\n }\n self.assertEqual(actual, expected)", "def api_field_from_django_field(cls, f, default=CharField):\n if isinstance(f, JSONField):\n return JSONApiField\n \n return super(PandaModelResource, cls).api_field_from_django_field(f, default)", "def _validate(mapping):\n missing_fields = _MANDATORY_FIELDS - set(mapping)\n if missing_fields:\n raise ValueError(\n \"Missing mandatory fields: {0}\".format(\n \", \".join(repr(field) for field in sorted(missing_fields))\n )\n )", "def test_create_enforces_required_fields(self):\n serializer = ServiceSerializer(data = {}, context = dict(project = self.project))\n self.assertFalse(serializer.is_valid())\n required_fields = {'name', 'category'}\n self.assertCountEqual(serializer.errors.keys(), required_fields)\n for name in required_fields:\n self.assertEqual(serializer.errors[name][0].code, 'required')", "def __init__(self, **kwargs):\n\n for (k, v) in self._fields:\n if k in kwargs:\n self.__dict__[k] = v.validate(kwargs[k])\n self.__dict__[k] = v.default", "def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.fields['first_name'].required = False\n self.fields['last_name'].required = False\n self.fields['institution'].required = False\n self.fields['institution_logo'].required = False\n self.fields['allow_notifications'].required = False", "def required_fields(required_fields=[]):\n def decorator(func):\n \"\"\" The decorator applied to the obj_create method\"\"\"\n def wrapper(resource, bundle=None, **kwargs):\n \"\"\" wraps the decorated method and verifies a list of required\n fields when a new object is being created.\n\n \"\"\"\n if not isinstance(bundle, Bundle):\n request = bundle\n data = resource.deserialize(\n request, request.body,\n format=request.META.get('CONTENT_TYPE', 'application/json')\n )\n bundle = resource.build_bundle(request=request, data=data)\n else:\n request = None\n\n for required_field in required_fields:\n if required_field not in bundle.data:\n response = HttpBadRequest(\n json.dumps(\"missing %s field\" % required_field),\n content_type=bundle.request.META['CONTENT_TYPE'])\n raise ImmediateHttpResponse(response=response)\n return func(resource, bundle=bundle, **kwargs)\n return wrapper\n return decorator", "def convert_fields(fields, _fields):\n mapper = {\n \"id\": \"local_id\",\n \"local_id\": \"id\"\n }\n fields = deepcopy(fields)\n for field in fields:\n if field['name'] in _fields:\n field['name'] = mapper[field['name']]\n return fields", "def _update_fields_with_default(annotation_fields, defaults_dict):\n all_fields = OrderedDict()\n all_filed_keys = _merge_field_keys(annotation_fields, defaults_dict)\n for name in all_filed_keys:\n # Get or create annotation\n annotation = (\n annotation_fields[name]\n if name in annotation_fields\n else _get_annotation_by_value(defaults_dict.get(name, Input._EMPTY))\n )\n # Create annotation if is class type and update default\n annotation = _update_annotation_with_default(annotation, name, defaults_dict.get(name, Input._EMPTY))\n all_fields[name] = annotation\n return all_fields", "def get_required_fields(self) -> Iterable[fields.Field]:\n for model_field in self.get_fields():\n if model_field.required:\n yield model_field", "def _get_simple_fields(cls) -> dict:\n return {\n name: tp\n for name, tp in cls._annotations().items()\n if AnnotationWrapper(tp).is_simple_in_opt_and_not_opt\n }", "def _assign_fields_to_params(cls, fields, params):\n if fields is None:\n fields = cls.get_default_read_fields()\n if fields:\n params['fields'] = ','.join(fields)", "def test_set_non_dictionary_based_field(self):\n self.assertRaises(TypeError, self._p.set_fields, '')", "def test_deserialize_required_fields():\n data = {}\n serializer = DogSerializer(data=data)\n assert not serializer.is_valid()\n assert len(serializer.errors) == 1\n\n data = {'name': 'bruce'}\n serializer = DogSerializer(data=data)\n assert serializer.is_valid()\n\n serializer.save()\n assert Dog.objects.count() == 1\n\n dog = Dog.objects.first()\n assert dog.name == 'bruce'", "def enforce_required_fields(self, attrs):\n if self.instance is not None:\n return\n # missing_items = {\n # field_name: self.missing_message\n # for field_name in self.fields\n # if field_name not in attrs\n # }\n # if missing_items:\n # raise ValidationError(missing_items, code='required')", "def test_defining_only_or_defer_on_nonexistant_fields_fails(self):", "def get_fields(self):\n fields = super(RelationSerializer, self).get_fields()\n\n if self.request.method == \"GET\":\n fields['type'] = serializers.CharField(source='type.name')\n else:\n fields['type'] = serializers.PrimaryKeyRelatedField(queryset=RelationType.objects.all())\n\n return fields", "def jsonable(self, *args, **options):\n d = {}\n for field_name, field in self.schema.normal_fields.items():\n field_val = getattr(self, field_name, None)\n field_val = field.jsonable(self, field_val)\n if field_val is not None:\n d[field_name] = field_val\n\n return d", "def update(self, instance, validated_data):\n assert hasattr(self.Meta, 'allowed_update_fields'), \"Serializers that are used for update must set Meta.allowed_update_fields\"\n if set(validated_data.keys()) - set(self.Meta.allowed_update_fields):\n raise serializers.ValidationError('Only updates on these fields are allowed: %s' % ', '.join(self.Meta.allowed_update_fields))\n return super(BaseSerializer, self).update(instance, validated_data)", "def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n ignore_fields = (\n 'about_me',\n 'romanized_first_name',\n 'romanized_last_name',\n 'postal_code',\n )\n set_fields_to_required(self, ignore_fields=ignore_fields)", "def _populate(self, fields):\n schema = self.schema\n for k, v in fields.items():\n fields[k] = schema.fields[k].iget(self, v)\n\n self.modify(fields)\n self.reset_modified()", "def test_alright_when_non_required_field_is_missing():\n\n model_definition = {'language': {'type': 'fixed',\n 'required': True,\n 'persisted': True},\n 'source': {'type': 'list',\n 'required': False,\n 'persisted': True},\n 'resources.title': {'type': 'text',\n 'required': False,\n 'persisted': True}}\n product1 = {'language': 'english'}\n factory = ProductModelFactory(model_definition)\n factory.build('product1', product1)\n # Ok. No exceptions were raised.", "def _validate_fields(self, change_fields):\n pass", "def clean_fields(self, instance, exclude=None):\n errors = {}\n exclude = exclude or []\n for name, f in self.properties.items():\n raw_value = getattr(instance, name, None)\n is_blank = not bool(raw_value)\n is_nullable = f.null\n is_defaulted = f.column.default or f.column.server_default\n is_required = f.required\n\n is_skippable = is_blank and (is_nullable or is_defaulted or not is_required)\n\n if name in exclude or is_skippable:\n continue\n try:\n setattr(instance, name, f.clean(raw_value, instance))\n except ValidationError as e:\n errors[name] = e.error_list\n if errors:\n raise NestedValidationError(errors)", "def _update_fields_with_default(\n annotation_fields: Dict[str, Union[Annotation, Input, Output]], defaults_dict: Dict[str, Any]\n ) -> Dict[str, Union[Annotation, Input, Output]]:\n all_fields = OrderedDict()\n all_filed_keys = _merge_field_keys(annotation_fields, defaults_dict)\n for name in all_filed_keys:\n # Get or create annotation\n annotation = (\n annotation_fields[name]\n if name in annotation_fields\n else _get_annotation_by_value(defaults_dict.get(name, Input._EMPTY))\n )\n # Create annotation if is class type and update default\n annotation = _update_annotation_with_default(annotation, name, defaults_dict.get(name, Input._EMPTY))\n all_fields[name] = annotation\n return all_fields", "def test_missing_mandatory_attributes():\n model_definition = {'source': {'type': 'list',\n 'required': True,\n 'persisted': True},\n 'resources.title': {'type': 'text',\n 'required': True,\n 'persisted': True}}\n # missing language in the model\n _ = ProductModelFactory(model_definition)", "def test_swagger_field_is_required():\n raw_schema = RawSchemaFactory()\n raw_schema.pop('swagger', None)\n\n assert 'swagger' not in raw_schema\n\n with pytest.raises(ValidationError) as err:\n swagger_schema_validator(raw_schema)\n\n assert_message_in_errors(\n MESSAGES['required']['required'],\n err.value.detail,\n 'required.swagger',\n )", "def is_required(self, field):\n return field.scheme.is_required and not field.scheme.is_pk", "def test_alright_when_required_field_is_missing_but_default_is_given():\n\n model_definition = {'language': {'type': 'fixed',\n 'required': True,\n 'persisted': True,\n 'default': 'portuguese'},\n 'source': {'type': 'list',\n 'required': False,\n 'persisted': True}}\n product1 = {'source': ['Whatever']}\n factory = ProductModelFactory(model_definition)\n factory.build('product1', product1)\n # Ok. No exceptions were raised.", "def clean_fields(self, exclude=None):\n obj = self._obj\n if obj is None:\n return None\n\n self.event = self.clean_event(self.event)\n self.resource_name = self.clean_resource_name(obj.__class__.__name__)\n self.resource_id = obj.id\n self.site = self.clean_site(obj)\n\n serializer_class = self.get_serializer_for_resource(self.resource_name)\n serializer = serializer_class(obj)\n self._resource = serializer.data", "def field_mapping(self):\n fields = self.fields\n if self.target_field is not None:\n del fields[self.target_field.get('name')]\n field_labels = list(self.fields.keys())\n\n field_mapping = {\n name: (\n field_labels.index(name),\n lambda value, e=e: self.parse_type(value, e)\n )\n for name, e in fields.items()\n if e.tag == f'{{{self.namespace}}}DataField'\n }\n\n field_mapping.update({\n name: (\n field_labels.index(self.find(e, 'FieldRef').get('field')),\n lambda value, e=e: self.parse_type(value, e)\n )\n for name, e in fields.items()\n if e.tag == f'{{{self.namespace}}}DerivedField'\n })\n\n return field_mapping", "def required_fields(model, values):\n if values:\n for k in list(values):\n if k not in model.__table__.columns.keys():\n values.pop(k)\n return values", "def check_for_required_fields(cls, fields=[], dataDict={}):\n\n validateRequired = Validate.required(fields=fields, dataDict=dataDict)\n if validateRequired['status'] == False:\n res = jsonify(\n {'status': 400, 'error': validateRequired['message'], 'data': []})\n return abort(make_response(res, 400))\n return True", "def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]:\n from .schedule_change_request import ScheduleChangeRequest\n\n from .schedule_change_request import ScheduleChangeRequest\n\n fields: Dict[str, Callable[[Any], None]] = {\n \"endDateTime\": lambda n : setattr(self, 'end_date_time', n.get_datetime_value()),\n \"startDateTime\": lambda n : setattr(self, 'start_date_time', n.get_datetime_value()),\n \"timeOffReasonId\": lambda n : setattr(self, 'time_off_reason_id', n.get_str_value()),\n }\n super_fields = super().get_field_deserializers()\n fields.update(super_fields)\n return fields", "def set_additional_fields(cls, model, data):\n for k, v in data.items():\n if not hasattr(model, k):\n setattr(model, k, v)", "def test_fields(self):\n\n class Foo(Model):\n field1 = StringField()\n field2 = IntegralField()\n\n assert hasattr(Foo, \"_fields\")\n assert type(Foo._fields) is dict\n\n assert not hasattr(Foo, \"field1\")\n assert \"field1\" in Foo._fields\n assert type(Foo._fields[\"field1\"]) is StringField\n\n assert not hasattr(Foo, \"field2\")\n assert \"field2\" in Foo._fields\n assert type(Foo._fields[\"field2\"]) is IntegralField", "def test_to_dict(self):\n\n class Person(Model):\n name = StringField()\n age = IntegralField(bounds = (0, None))\n siblings = ListField(of = StringField())\n\n data1 = {\n \"name\": \"Joe Shmoe\",\n \"age\": 21,\n \"siblings\": [\"Dick Shmoe\", \"Jane Shmoe\"]\n }\n person1 = Person(**data1)\n assert person1.to_dict() == data1\n\n # The defined but unset fields should still be present, but set to none\n data2 = {\"notaname\": 2, \"age\": \"lots\"}\n person2 = Person.from_dict(data2)\n assert person2.to_dict() == {\n \"notaname\": 2,\n \"age\": \"lots\",\n \"name\": None,\n \"siblings\": None\n }", "def set_fields(self, fields: FieldDict):\n super().set_fields(fields)\n nested_field: NestedField = self.fields[self.nested]\n if not isinstance(nested_field, NestedField):\n raise TypeError(\n f'The field \"{self.nested}\" must be a NestedField instance, not \"{nested_field}\".')\n if nested_field.many:\n raise ValueError(f'The field \"{self.nested}\" can not be set as \"many=True\".')\n self.nested_field = nested_field\n # create partial methods\n self._do_dump = partial(\n getattr(self, self.dump_method),\n target=nested_field.dump_target,\n method=nested_field.dump,\n )\n self._do_load = partial(\n getattr(self, self.load_method),\n target=nested_field.load_target,\n method=nested_field.load,\n )", "def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]:\n fields: Dict[str, Callable[[Any], None]] = {\n \"allowedToCreateApps\": lambda n : setattr(self, 'allowed_to_create_apps', n.get_bool_value()),\n \"allowedToCreateSecurityGroups\": lambda n : setattr(self, 'allowed_to_create_security_groups', n.get_bool_value()),\n \"allowedToCreateTenants\": lambda n : setattr(self, 'allowed_to_create_tenants', n.get_bool_value()),\n \"allowedToReadBitlockerKeysForOwnedDevice\": lambda n : setattr(self, 'allowed_to_read_bitlocker_keys_for_owned_device', n.get_bool_value()),\n \"allowedToReadOtherUsers\": lambda n : setattr(self, 'allowed_to_read_other_users', n.get_bool_value()),\n \"@odata.type\": lambda n : setattr(self, 'odata_type', n.get_str_value()),\n \"permissionGrantPoliciesAssigned\": lambda n : setattr(self, 'permission_grant_policies_assigned', n.get_collection_of_primitive_values(str)),\n }\n return fields", "def fields(self, fields):\n\n self._fields = fields", "def test_reader_serializer(self):\n transaction = TransactionFactory.build()\n serializer = TransactionSerializer(transaction)\n serialized_data = serializer.data\n\n for (\n serializer_field,\n model_field,\n ) in transaction_reader_fields_mapping.items():\n self.assertIn(serializer_field, serialized_data)\n\n if serializer_field == \"estabelecimento\":\n value = transaction.company.cnpj\n else:\n value = getattr(transaction, model_field)\n\n self.assertEqual(serialized_data[serializer_field], value)\n\n # tests if serializer validates\n serializer = TransactionSerializer(data=serialized_data)\n self.assertTrue(serializer.is_valid())", "def _patch_schema(self):\n fields = get_json()['data']['attributes'].keys()\n return make_entity_schema(\n self.SCHEMA, self.RESOURCE_NAME,\n make_data_schema(\n self.SCHEMA, id_required=True,\n only=fields, partial=True\n )\n )", "def test_required_field_values_are_present():\n\n model_definition = {'language': {'type': 'fixed',\n 'required': True,\n 'persisted': True},\n 'source': {'type': 'list',\n 'required': False,\n 'persisted': True},\n 'resources.title': {'type': 'text',\n 'required': True,\n 'persisted': True}}\n product1 = {'language': 'english'}\n factory = ProductModelFactory(model_definition)\n factory.build('product1', product1)", "def _make_field_map(fields):\n field_map = {}\n for field in fields:\n if field.name in field_map:\n raise SchemaParseException(\n 'Duplicate record field name %r.' % field.name)\n field_map[field.name] = field\n return field_map", "def _save_direct_relations(self, kwargs):\n for field_name, field in self.fields.items():\n if field.read_only:\n continue\n if isinstance(self._validated_data, dict) and self._validated_data.get(field.source) is None:\n continue\n if not isinstance(field, serializers.BaseSerializer):\n continue\n if hasattr(self, 'Meta') and hasattr(self.Meta, 'model'):\n # ModelSerializer (or similar) so we need to exclude reverse relations\n try:\n _, direct = self._get_related_field(field)\n except FieldDoesNotExist:\n continue\n if not direct:\n continue\n\n # reinject validated_data\n field._validated_data = self._validated_data[field_name]\n self._validated_data[field_name] = field.save(**kwargs.pop(field_name, {}))", "def get_fields(cls, fields=None, excludes=None):\r\n\r\n final_fields = {}\r\n fields = fields or []\r\n excludes = excludes or []\r\n\r\n if not cls._meta.object_class:\r\n return final_fields\r\n\r\n for name, f in cls._meta.object_class._fields.iteritems():\r\n # If the field name is already present, skip\r\n if name in cls.base_fields:\r\n continue\r\n\r\n # If field is not present in explicit field listing, skip\r\n if fields and name not in fields:\r\n continue\r\n\r\n # If field is in exclude list, skip\r\n if excludes and name in excludes:\r\n continue\r\n\r\n # TODO: Might need it in the future\r\n # if cls.should_skip_field(f):\r\n # continue\r\n\r\n api_field_class = cls.api_field_from_mongo_field(f)\r\n\r\n primary_key = f.primary_key or name == getattr(cls._meta, 'id_field', 'id')\r\n\r\n kwargs = {\r\n 'attribute': name,\r\n 'unique': f.unique or primary_key,\r\n 'null': not f.required and not primary_key,\r\n 'help_text': f.help_text,\r\n }\r\n\r\n # If field is not required, it does not matter if set default value,\r\n # so we do\r\n if not f.required:\r\n kwargs['default'] = f.default\r\n else:\r\n # MongoEngine does not really differ between user-specified default\r\n # and its default, so we try to guess\r\n if isinstance(f, mongoengine.ListField):\r\n if not callable(f.default) or f.default() != []: # If not MongoEngine's default\r\n kwargs['default'] = f.default\r\n elif isinstance(f, mongoengine.DictField):\r\n if not callable(f.default) or f.default() != {}: # If not MongoEngine's default\r\n kwargs['default'] = f.default\r\n else:\r\n if f.default is not None: # If not MongoEngine's default\r\n kwargs['default'] = f.default\r\n\r\n kwargs = cls.api_field_options(name, f, kwargs)\r\n\r\n final_fields[name] = api_field_class(**kwargs)\r\n final_fields[name].instance_name = name\r\n final_fields[name]._primary_key = primary_key\r\n\r\n # We store MongoEngine field so that schema output can show\r\n # to which content the list is limited to (if any)\r\n if isinstance(f, mongoengine.ListField):\r\n final_fields[name].field = f.field\r\n\r\n return final_fields", "def set_specific_fields(self):\n raise NotImplementedError(\"Must be defined by subclass!\")", "def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]:\n fields: Dict[str, Callable[[Any], None]] = {\n \"assignedDateTime\": lambda n : setattr(self, 'assigned_date_time', n.get_datetime_value()),\n \"capabilityStatus\": lambda n : setattr(self, 'capability_status', n.get_str_value()),\n \"@odata.type\": lambda n : setattr(self, 'odata_type', n.get_str_value()),\n \"service\": lambda n : setattr(self, 'service', n.get_str_value()),\n \"servicePlanId\": lambda n : setattr(self, 'service_plan_id', n.get_uuid_value()),\n }\n return fields", "def add_base_fields(self) -> None:\n for field in get_model_fields(self.model, foreign=False, m2m=False):\n if hasattr(self.factory, field.name) or field.has_default() or field.blank:\n continue\n setattr(\n self.factory, field.name, self._get_decl_for_model_field(field)\n )", "def depopulate(self, is_update):\n fields = {}\n schema = self.schema\n for k, field in schema.fields.items():\n is_modified = k in self.modified_fields\n orig_v = getattr(self, k)\n v = field.iset(\n self,\n orig_v,\n is_update=is_update,\n is_modified=is_modified\n )\n\n if is_modified or v is not None:\n if is_update and field.is_pk() and v == orig_v:\n continue\n\n else:\n fields[k] = v\n\n if not is_update:\n for field_name in schema.required_fields.keys():\n if field_name not in fields:\n raise KeyError(\"Missing required field {}\".format(field_name))\n\n return fields", "def __createFields(self):\n fields = self.updateFields\n for field in fields:\n self.__createField(field)", "def test_field_nullable(self):\n node_dict = {\n 'host_name': 'abc',\n 'local_router_id': '1.1.1.1',\n 'as_num': 100,\n 'bgpls_id': '0.0.0.0',\n 'igp_id': '0.0.0.0'\n }\n node = Node(**node_dict)\n for name, field in node_dict.items():\n self.assertEqual(field, node.__dict__[name])", "def _validate(self):\n fields, schema = self.__dict__, self._def.default\n extra_fields = fields.viewkeys() - schema.viewkeys()\n if len(extra_fields) > 0:\n raise AttributeError('Fields found that are not in the schema: %r' % (list(extra_fields)))\n for key in fields.iterkeys():\n if type(fields[key]) is not type(schema[key]):\n raise AttributeError('Invalid %s for field \"%s\", should be %s' %\n (type(fields[key]), key, type(schema[key])))", "def _get_not_simple_fields(cls) -> Dict[str, str]:\n\n return {\n name: anno\n for name, anno in cls._annotations().items()\n if not AnnotationWrapper(anno).is_simple_in_opt_and_not_opt\n }", "def validate_fields_for_magento(self,data):\n for field in data:\n if data[field] == None :\n del data[field]\n if data[field] == True:\n data[field] = 1\n if data[field] == False :\n data[field] = 0", "def clean(self):\n cleaned_data = super().clean()\n cleaned_data = {key: field for key, field in cleaned_data.items()\n if field is not None}\n return cleaned_data", "def _initializeRequestField(self,field,referenceField):\n\t\tvaluesDict = referenceField.values\n\t\tfield.initialize_values(valuesDict)\n\t\t\n\t\tpass", "def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n for field in self.fields:\n self.fields[field].label = False", "def __init__(self):\n super(ObjectSchema, self).__init__()\n self.is_allow_undefined = False", "def _update_allowed_fields(self) -> list:\n raise NotImplementedError('Each model has to have its list of update allowed fields')", "def test_prep_fields(self):\n pass", "def required_fields(self, gid):\n r = self.get(\"/groups/{g:d}/fields\".format(g=gid))\n return r.json()", "def writeRequiredFields(self, outputFile):\n fields = []\n for field in self.getFields():\n if not field.has_default:\n fields.append(field)\n if len(fields) < 1:\n self._writeWithIndent('requiredFields = set([])', outputFile)\n else:\n self._writeWithIndent('requiredFields = set([', outputFile)\n for field in fields:\n string_ = '\"{0}\",'.format(field.name)\n self._writeWithIndent(string_, outputFile, 2)\n self._writeWithIndent('])', outputFile)", "def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]:\n from .app_management_configuration import AppManagementConfiguration\n from .policy_base import PolicyBase\n\n from .app_management_configuration import AppManagementConfiguration\n from .policy_base import PolicyBase\n\n fields: Dict[str, Callable[[Any], None]] = {\n \"applicationRestrictions\": lambda n : setattr(self, 'application_restrictions', n.get_object_value(AppManagementConfiguration)),\n \"isEnabled\": lambda n : setattr(self, 'is_enabled', n.get_bool_value()),\n \"servicePrincipalRestrictions\": lambda n : setattr(self, 'service_principal_restrictions', n.get_object_value(AppManagementConfiguration)),\n }\n super_fields = super().get_field_deserializers()\n fields.update(super_fields)\n return fields", "def namespaced_fields(self):\n ...", "def update(self, mapping):\n if not ismapping(mapping):\n raise TypeError(\"mapping type required\")\n field_names = getpyattr(type(self), 'field_names')\n for key, value in mapping.items():\n if key in field_names:\n setattr(self, key, value)", "def __call__(self, func):\n # Set or extend the function's \"custom_fields\" attribute\n func.required_fields = getattr(func, \"required_fields\", {})\n func.required_fields[self.fieldname] = self.input_type\n # The decorated function is unchanged\n return func", "def set_default_read_fields(cls, fields):\n cls._default_read_fields = fields", "def validation_required(self, validation_required):\n self._validation_required = validation_required", "def extract_fields(self, json_dict):\n raise NotImplementedError()", "def test_map_field_recursive_case(self):\n field = 'content.title'\n mapping = {\n 'type': 'text',\n 'index': True\n }\n\n actual = mapper._map_field(mapping, field)\n expected = {\n 'properties': {\n 'content': {\n 'properties': {\n 'title': {\n 'type': 'text',\n 'index': True\n }\n }\n }\n }\n }\n self.assertEqual(actual, expected)", "def field_mappings(self) -> Optional[Sequence['outputs.FieldMappingResponse']]:\n return pulumi.get(self, \"field_mappings\")", "def set_fields(self, **kwargs):\n for key, value in kwargs.items():\n if key in self.fields.keys():\n if type(value) != bool:\n raise TypeError('Expecting Bool passed {}'\n .format(type(value)))\n self.fields[key] = value\n else:\n raise KeyError", "def _validate_default_fields() -> None:\n default_fields = [\n field\n for field in fields\n if field[\"name\"] in DEFAULT_PREDICTIONS_TABLE_FIELDS\n ]\n if len(DEFAULT_PREDICTIONS_TABLE_FIELDS) != len(default_fields):\n raise NotFilledDefaultFields", "def update_json(self):\n self.set_version_to_default()\n self.remove_null_fields()\n self.remove_unnecessary_keys()\n self.set_fromVersion(from_version=self.from_version)", "def validate(self, data):\n # calling subserializer validate method (fields, and presets)\n data = super(FormidableSerializer, self).validate(data)\n # we check every field define in presets are define inside the form.\n if 'fields' in data and 'presets' in data:\n data = self.check_presets_cohesion(data)\n return data", "def set_fields(self, upstream_obj, nonparam_fields=None):\n default_data = upstream_obj.default_data(start_year=self.start_year,\n metadata=True)\n\n if self.raw_input_fields is None:\n self.raw_input_fields = {}\n for field in self._meta.fields:\n if (getattr(self, field.attname, None) and\n field.name not in nonparam_fields):\n raw_val = getattr(self, field.attname)\n if field.name.endswith(\"cpi\") and isinstance(raw_val, bool):\n raw_val = str(raw_val)\n self.raw_input_fields[field.name] = raw_val\n\n input_fields, failed_lookups = param_formatters.parse_fields(\n self.raw_input_fields,\n default_data\n )\n\n if failed_lookups:\n # distinct elements\n potential_failed_lookups = set(failed_lookups)\n # only keep parameters that used to be in the upstream package\n set_failed_lookups = potential_failed_lookups - nonparam_fields\n if self.deprecated_fields is None:\n self.deprecated_fields = []\n # drop parameters that we already know are deprecated\n set_failed_lookups.difference_update(self.deprecated_fields)\n self.deprecated_fields += list(set_failed_lookups)\n\n self.input_fields = input_fields", "def check_mandatory_props(klass: pyorient.ogm.declarative.DeclarativeMeta, obj: Dict):\n missing = []\n props = klass.objects.g.props_from_db[klass](Graph.compute_all_properties(klass))\n for k, v in props.items():\n prop = getattr(klass, k)\n if hasattr(prop, 'mandatory'):\n if prop.mandatory and k not in obj:\n # Fix values if default set \n if k == \"revoked\":\n obj[k] = False\n continue\n if k == \"spec_version\":\n obj[k] = \"2.1\"\n continue\n missing.append(k)\n if isinstance(prop, odbproperty.String):\n obj[k] = 'added_default'\n elif isinstance(prop, (odbproperty.Date, odbproperty.DateTime)):\n obj[k] = get_datetime()\n elif isinstance(prop, odbproperty.EmbeddedList):\n obj[k] = ['added_default']\n elif isinstance(prop, odbproperty.Integer):\n obj[k] = 0\n elif isinstance(prop, odbproperty.Float):\n obj[k] = 0.0\n elif isinstance(prop, odbproperty.Binary):\n obj[k] = 0\n elif isinstance(prop, odbproperty.Byte):\n obj[k] = 0\n elif isinstance(prop, odbproperty.Decimal):\n obj[k] = 0.0\n elif isinstance(prop, odbproperty.Long):\n obj[k] = 0\n elif isinstance(prop, odbproperty.Short):\n obj[k] = 0\n elif isinstance(prop, odbproperty.Boolean):\n obj[k] = True\n else:\n logging.info(f'What to do with missing mandatory field {k} of type {v.__class__}?')\n if missing:\n logging.info(f'missing mandatory fields for {obj[\"id_\"]}: {missing}')\n return obj", "def test_prep_country_fields_flat(self):\n original_flag = self.form.country_optional\n self.form.country_optional = True\n original_fields = self.form.fields\n original_removed = getattr(self.form, 'removed_fields', None)\n original_computed = getattr(self.form, 'computed_fields', None)\n self.form.fields = original_fields.copy()\n if original_removed is not None:\n self.form.removed_fields = original_removed.copy()\n if original_computed is not None:\n self.form.computed_fields = original_computed.copy()\n remaining = original_fields.copy()\n opts, field_rows = {'fake_opts': 'fake', 'fields': ['nope']}, [{'name': 'assigned_field'}]\n args = ['arbitrary', 'input', 'args']\n kwargs = {'test_1': 'data_1', 'test_2': 'data_2'}\n field_names = (self.form.country_field_name, 'country_flag', )\n if not any(remaining.get(name, None) for name in field_names):\n fix_fields = {name: self.get_missing_field(name) for name in field_names if name not in remaining}\n remaining.update(fix_fields)\n expected_add = {name: remaining[name] for name in field_names if name in remaining}\n expected_field_rows = field_rows.copy()\n expected_field_rows.append(expected_add)\n expected_remaining = {name: field for name, field in remaining.items() if name not in expected_add}\n expected_opts = deepcopy(opts)\n # expected_opts['fields'].append(field_names)\n kwargs['flat_fields'] = True\n expected_remaining.update(expected_add)\n\n sent = (opts, field_rows, remaining, *args)\n r_opts, r_rows, r_remaining, *r_args, r_kwargs = self.form.prep_country_fields(*sent, **kwargs)\n self.assertEqual(expected_opts, r_opts)\n self.assertEqual(expected_field_rows, r_rows)\n self.assertEqual(expected_remaining, r_remaining)\n self.assertEqual(args, r_args)\n self.assertEqual(kwargs, r_kwargs)\n\n self.form.country_optional = original_flag\n self.form.fields = original_fields\n if original_removed is not None:\n self.form.removed_fields = original_removed\n if original_computed is not None:\n self.form.computed_fields = original_computed\n pass", "def set_fields(self, fields: FieldDict):\n super().set_fields(fields)\n # bind fields to attrs\n for attr in ('a', 'b'):\n setattr(self, f'field_{attr}', self.fields[getattr(self, attr)])\n # get error messages\n dump_error = self.error_cls(self.get_error_message(\n self.op, a=self.field_a.dump_source, b=self.field_b.dump_source))\n load_error = self.error_cls(self.get_error_message(\n self.op, a=self.field_a.load_source, b=self.field_b.load_source))\n # set partial arguments for `validate`\n self.validate_dump = partial(\n self.validate,\n a_key=self.field_a.dump_target,\n b_key=self.field_b.dump_target,\n error=dump_error)\n self.validate_load = partial(\n self.validate,\n a_key=self.field_a.load_target,\n b_key=self.field_b.load_target,\n error=load_error)", "def remove_read_only_fields(self):\n self.fields = XML_List(Elements.FIELDS, [field for field in self.fields if\n not field.read_only or not str_to_bool(field.read_only)])", "def build_standard_field(self, field_name, model_field_type):\n field_mapping = self.serializer_field_mapping\n field_class = field_mapping[model_field_type]\n field_kwargs = get_field_kwargs(field_name, model_field_type)\n\n if \"choices\" in field_kwargs:\n # Fields with choices get coerced into `ChoiceField`\n # instead of using their regular typed field.\n field_class = self.serializer_choice_field\n # Some model fields may introduce kwargs that would not be valid\n # for the choice field. We need to strip these out.\n # Eg. models.DecimalField(max_digits=3, decimal_places=1, choices=DECIMAL_CHOICES)\n valid_kwargs = {\n \"read_only\",\n \"write_only\",\n \"required\",\n \"default\",\n \"initial\",\n \"source\",\n \"label\",\n \"help_text\",\n \"style\",\n \"error_messages\",\n \"validators\",\n \"allow_null\",\n \"allow_blank\",\n \"choices\",\n }\n for key in list(field_kwargs):\n if key not in valid_kwargs:\n field_kwargs.pop(key)\n\n if not issubclass(field_class, fields.CharField) and not issubclass(\n field_class, fields.ChoiceField\n ):\n # `allow_blank` is only valid for textual fields.\n field_kwargs.pop(\"allow_blank\", None)\n\n return field_class, field_kwargs", "def objectFields(self):\n raise NotImplementedError", "def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]:\n from .delegated_admin_relationship_request_action import DelegatedAdminRelationshipRequestAction\n from .delegated_admin_relationship_request_status import DelegatedAdminRelationshipRequestStatus\n from .entity import Entity\n\n from .delegated_admin_relationship_request_action import DelegatedAdminRelationshipRequestAction\n from .delegated_admin_relationship_request_status import DelegatedAdminRelationshipRequestStatus\n from .entity import Entity\n\n fields: Dict[str, Callable[[Any], None]] = {\n \"action\": lambda n : setattr(self, 'action', n.get_enum_value(DelegatedAdminRelationshipRequestAction)),\n \"createdDateTime\": lambda n : setattr(self, 'created_date_time', n.get_datetime_value()),\n \"lastModifiedDateTime\": lambda n : setattr(self, 'last_modified_date_time', n.get_datetime_value()),\n \"status\": lambda n : setattr(self, 'status', n.get_enum_value(DelegatedAdminRelationshipRequestStatus)),\n }\n super_fields = super().get_field_deserializers()\n fields.update(super_fields)\n return fields", "def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]:\n from .app_identity import AppIdentity\n from .entity import Entity\n from .print_task import PrintTask\n\n from .app_identity import AppIdentity\n from .entity import Entity\n from .print_task import PrintTask\n\n fields: Dict[str, Callable[[Any], None]] = {\n \"createdBy\": lambda n : setattr(self, 'created_by', n.get_object_value(AppIdentity)),\n \"displayName\": lambda n : setattr(self, 'display_name', n.get_str_value()),\n \"tasks\": lambda n : setattr(self, 'tasks', n.get_collection_of_object_values(PrintTask)),\n }\n super_fields = super().get_field_deserializers()\n fields.update(super_fields)\n return fields", "def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]:\n from .container_filter import ContainerFilter\n from .group_filter import GroupFilter\n from .object_mapping import ObjectMapping\n from .string_key_string_value_pair import StringKeyStringValuePair\n\n from .container_filter import ContainerFilter\n from .group_filter import GroupFilter\n from .object_mapping import ObjectMapping\n from .string_key_string_value_pair import StringKeyStringValuePair\n\n fields: Dict[str, Callable[[Any], None]] = {\n \"containerFilter\": lambda n : setattr(self, 'container_filter', n.get_object_value(ContainerFilter)),\n \"editable\": lambda n : setattr(self, 'editable', n.get_bool_value()),\n \"groupFilter\": lambda n : setattr(self, 'group_filter', n.get_object_value(GroupFilter)),\n \"id\": lambda n : setattr(self, 'id', n.get_str_value()),\n \"metadata\": lambda n : setattr(self, 'metadata', n.get_collection_of_object_values(StringKeyStringValuePair)),\n \"name\": lambda n : setattr(self, 'name', n.get_str_value()),\n \"objectMappings\": lambda n : setattr(self, 'object_mappings', n.get_collection_of_object_values(ObjectMapping)),\n \"@odata.type\": lambda n : setattr(self, 'odata_type', n.get_str_value()),\n \"priority\": lambda n : setattr(self, 'priority', n.get_int_value()),\n \"sourceDirectoryName\": lambda n : setattr(self, 'source_directory_name', n.get_str_value()),\n \"targetDirectoryName\": lambda n : setattr(self, 'target_directory_name', n.get_str_value()),\n }\n return fields", "def to_dict(self):\n _dict = self.dict(by_alias=True,\n exclude={\n },\n exclude_none=True)\n # set to None if billing_cycle_start (nullable) is None\n # and __fields_set__ contains the field\n if self.billing_cycle_start is None and \"billing_cycle_start\" in self.__fields_set__:\n _dict['billing_cycle_start'] = None\n\n # set to None if billing_cycle_end (nullable) is None\n # and __fields_set__ contains the field\n if self.billing_cycle_end is None and \"billing_cycle_end\" in self.__fields_set__:\n _dict['billing_cycle_end'] = None\n\n # set to None if canceled_at (nullable) is None\n # and __fields_set__ contains the field\n if self.canceled_at is None and \"canceled_at\" in self.__fields_set__:\n _dict['canceled_at'] = None\n\n # set to None if charge_id (nullable) is None\n # and __fields_set__ contains the field\n if self.charge_id is None and \"charge_id\" in self.__fields_set__:\n _dict['charge_id'] = None\n\n # set to None if paused_at (nullable) is None\n # and __fields_set__ contains the field\n if self.paused_at is None and \"paused_at\" in self.__fields_set__:\n _dict['paused_at'] = None\n\n # set to None if trial_start (nullable) is None\n # and __fields_set__ contains the field\n if self.trial_start is None and \"trial_start\" in self.__fields_set__:\n _dict['trial_start'] = None\n\n # set to None if trial_end (nullable) is None\n # and __fields_set__ contains the field\n if self.trial_end is None and \"trial_end\" in self.__fields_set__:\n _dict['trial_end'] = None\n\n return _dict", "def add_required_properties(self, p: str):\n # TODO: Deprecate\n for k in p.keys():\n try:\n self._properties[k].set_required(True)\n except KeyError:\n self._properties.define_property(name=k, supported=False, required=True)" ]
[ "0.7349095", "0.59120345", "0.58142334", "0.57975155", "0.5773961", "0.5717595", "0.56959385", "0.5695273", "0.568627", "0.56845975", "0.5660791", "0.5620639", "0.5562065", "0.5561373", "0.5556391", "0.555515", "0.55402875", "0.5511495", "0.546896", "0.5459124", "0.54497963", "0.54110634", "0.5398385", "0.5380121", "0.53782773", "0.53751194", "0.5372808", "0.536668", "0.5352254", "0.5342452", "0.533807", "0.53176105", "0.531521", "0.53018355", "0.5276986", "0.5269212", "0.52681553", "0.52649176", "0.5257536", "0.5252648", "0.52448726", "0.5225241", "0.52013415", "0.5182229", "0.5170967", "0.5165028", "0.51587635", "0.5155698", "0.5134422", "0.51295984", "0.5126346", "0.5120603", "0.51159614", "0.5115571", "0.51124763", "0.5108347", "0.5102396", "0.5102034", "0.509969", "0.5094964", "0.50898844", "0.50701815", "0.50684476", "0.50646734", "0.5063732", "0.5051295", "0.5049662", "0.5048179", "0.50332874", "0.50258726", "0.50250536", "0.50249416", "0.5024757", "0.5012598", "0.501024", "0.49996844", "0.49987686", "0.49954683", "0.49933034", "0.49918306", "0.4985915", "0.49804753", "0.49792653", "0.497623", "0.49646452", "0.49552575", "0.4954946", "0.4951771", "0.49498212", "0.49449262", "0.49443778", "0.49286342", "0.49244466", "0.49187472", "0.49071637", "0.49025235", "0.49007615", "0.49001327", "0.4897843", "0.48955244", "0.48887408" ]
0.0
-1
Getter for the username field
def get_username(self, obj): return obj.user.username
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_username(self):\n return str(getattr(self, self.USERNAME_FIELD))", "def get_username(self):\r\n return self.username", "def get_username(self):\n return self.username", "def username(self) -> undefined.UndefinedOr[str]:", "def get_username(self):\n raise NotImplementedError('get_username')", "def GetUsername(self):\n return self._username", "def GetUsername(self):\n pass", "def get_username(self):\r\n raise NotImplementedError", "def username(self) -> str:", "def username(self) -> str:", "def get_username(self) -> str:\n return self._username", "def username(self) :\n\t\ttry :\n\t\t\treturn self._username\n\t\texcept Exception as e:\n\t\t\traise e", "def clean_username (self):\n return self.instance.username", "def getUsername(self):\n\t\treturn self.Username.lower()", "def get(self, username):\n return username", "def username(self):\n return self._username()", "def username(self, instance):\r\n return instance.user.username", "def username(self) -> str:\n raise NotImplementedError", "def username(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"username\")", "def username(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"username\")", "def username(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"username\")", "def username(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"username\")", "def username(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"username\")", "def username(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"username\")", "def username(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"username\")", "def username(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"username\")", "def username(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"username\")", "def username(self):\n return self._username", "def username(self):\n return self._username", "def username(self):\n return self._username", "def username(self):\n return json_loads(self.user_json).get('username')", "def username(self) -> str:\n return self._username", "def username_field(self):\n\n if 'phone' in self.initial_data:\n return 'phone'\n if 'user_name' in self.initial_data:\n return 'user_name'\n return get_username_field()", "def username(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"username\")", "def username(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"username\")", "def username(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"username\")", "def username(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"username\")", "def username(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"username\")", "def username(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"username\")", "def username(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"username\")", "def username(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"username\")", "def username(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"username\")", "def username(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"username\")", "def username(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"username\")", "def username(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"username\")", "def username(self) -> str:\n return pulumi.get(self, \"username\")", "def username(self) -> str:\n return pulumi.get(self, \"username\")", "def username(self) -> str:\n return pulumi.get(self, \"username\")", "def get_username(self, request):\r\n try:\r\n return request.user.username\r\n except AttributeError:\r\n return ''", "def username(self):\n return self.user.username", "def usernameFind(self):\r\n return self.username()", "def username(self) -> Optional[str]:\n return self._state.get(\"username\", None)", "def username(self):\n if self._username is not None:\n return self._username\n # Try to get a username from the userprofile\n try:\n self._username = self.userprofile.user.username\n except UserProfile.DoesNotExist:\n # User profile does not exist\n return None\n return self._username", "def username(self, inst):\r\n return inst.user.username", "def get_username(self):\n return self.browser.find_element(*locators.USER_NAME_TEXT).text", "def get_full_name(self):\n return self.username", "def get_full_name(self):\n return self.username", "def get_name(self):\n return self.user.username if self.user.username else self.user.email", "def git_username_user_attribute(self):\n return self._git_username_user_attribute", "def __str__(self):\r\n return self.username", "def username(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"username\")", "def _get_username():\n username = request.args.get(\"username\")\n if not username:\n raise NoUserError()\n else:\n return username", "def get_username(self):\n if not self.is_valid():\n return None\n try:\n # NOTE: all emails stored in lower-case\n email = self.clean_email().lower()\n return User.objects.get(email=email).username\n except User.DoesNotExist:\n pass\n return None", "def get_username(self):\n full_name = '%s %s' % (self.user.first_name.strip(), self.user.last_name.strip()[0:1])\n if len(full_name.strip()) == 0:\n full_name = self.user.username\n return full_name.strip()", "def __str__(self):\n return self.username", "def __str__(self):\n return self.username", "def _get_username(self):\n name = self._get_username_from_cookies()\n if name:\n return name\n if self._oauth and self._login_info[0]:\n return self._login_info[0]\n return self._get_username_from_api()", "def getName(self):\n return self.__username", "def username(self):\n return self._query_config()['username']", "def auth_username(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"auth_username\")", "def log_in_username(self):\n username_elem = waiter.find_element(self.driver, 'username', by=NAME)\n return username_elem.get_attribute('value')", "def __str__(self):\n if self.username == None:\n return \"User does not exist\"\n return self.username", "def set_username(self, value):\n raise NotImplementedError('set_username')", "def username(self):\n return self._authenticator.username()", "def ro_username(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ro_username\")", "def set_username(self, value):\n self.username = value", "def user_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"user_name\")", "def _username(self):\n if 'username' not in self._config:\n self._config['username'] = self._UI.get_input(\"Please enter your trac username: \")\n self._config._write_config()\n return self._config['username']", "def get_short_name(self):\n return self.username", "def get_short_name(self):\n return self.username", "def get_short_name(self):\n return self.username", "def test_users_username_get(self):\n pass", "def user_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"user_name\")", "def user_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"user_name\")", "def user_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"user_name\")", "def get_username_by_id(self, id):\n return User.query.get(id).username", "def username(self):\n log.warning(\"username property deprecated. Use boundjid.user\")\n return self.boundjid.user", "def username(self, repository):\r\n return self._username(repository)", "def get_username():\n\n if session.get(\"user_id\") is None:\n username = \"\"\n else:\n user_id = session.get(\"user_id\")\n user = User.query.filter(User.id==user_id).first()\n username = user.username\n\n return username", "def user_name(self) -> str:\n return pulumi.get(self, \"user_name\")", "def user_name(self):\n return self._user_name", "def _get_username_from_api(self):\n result = self.api_query(action=\"query\", meta=\"userinfo\")\n return result[\"query\"][\"userinfo\"][\"name\"]", "def clean_username(self):\n username = self.cleaned_data['username']\n\n try:\n User.objects.get(email=username)\n except ObjectDoesNotExist:\n raise forms.ValidationError('Selected user does not exist.')\n\n return username", "def __str__(self):\n return self.user.username", "def __str__(self):\n return self.user.username", "def __str__(self):\n return self.user.username", "def __str__(self):\n return self.user.username", "def __str__(self):\n return self.user.username", "def getUserName(self):\n user = User.by_id(self.user_id)\n return user.name" ]
[ "0.8819286", "0.8542671", "0.84036976", "0.8369547", "0.8357543", "0.8345958", "0.83210427", "0.82775146", "0.8162549", "0.8162549", "0.8135109", "0.8098483", "0.80932707", "0.8029489", "0.8025117", "0.7972525", "0.79250383", "0.7903688", "0.7901838", "0.7901838", "0.7901838", "0.7901838", "0.7901838", "0.7901838", "0.7901838", "0.7901838", "0.7901838", "0.78961444", "0.78961444", "0.78961444", "0.7893794", "0.7884272", "0.7881781", "0.7877699", "0.7877699", "0.7877699", "0.7877699", "0.7877699", "0.7877699", "0.7877699", "0.7877699", "0.7877699", "0.7877699", "0.7877699", "0.7877699", "0.7865014", "0.7865014", "0.7865014", "0.7840565", "0.78402543", "0.775206", "0.7751875", "0.76991856", "0.76297754", "0.75410986", "0.7502848", "0.7502848", "0.7487611", "0.74469125", "0.7417137", "0.7415221", "0.73924893", "0.73804826", "0.736287", "0.7340982", "0.7340982", "0.7307024", "0.7245849", "0.721643", "0.72031885", "0.7193379", "0.7184313", "0.7159272", "0.7151855", "0.7126765", "0.70969826", "0.70877796", "0.70865536", "0.7076797", "0.7076797", "0.7076797", "0.7050057", "0.70456743", "0.70456743", "0.70456743", "0.7033136", "0.70314556", "0.70233697", "0.7023205", "0.6989999", "0.69894266", "0.69784886", "0.69701743", "0.6967362", "0.6967362", "0.6967362", "0.6967362", "0.6967362", "0.69593537" ]
0.7931724
16
Update serializer_field_mapping to use fields setting required=True
def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) ignore_fields = ( 'about_me', 'romanized_first_name', 'romanized_last_name', 'postal_code', ) set_fields_to_required(self, ignore_fields=ignore_fields)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_fields_to_required(serializer, ignore_fields=None):\n if ignore_fields is None:\n ignore_fields = []\n for field in serializer.fields.values():\n if field.field_name not in ignore_fields:\n field.required = True\n field.allow_null = False\n field.allow_blank = False", "def required_dict_validator(self, dict_fields, model_name, erp_required=[]):\n required_fields = self.env['settings.field'].sudo().search([('model_id.model', '=', model_name)])\n\n if required_fields:\n erp_required.extend(required_fields.required_field_ids.filtered(lambda x: x.id not in [er.id for er in erp_required]))\n\n for field in erp_required:\n if field.name in dict_fields and 'required' not in dict_fields[field.name]:\n dict_fields[field.name]['required'] = True\n dict_fields[field.name]['empty'] = False\n\n return dict_fields", "def get_fields(self):\n fields = super(GeoModelSerializer, self).get_fields()\n # Set the geometry field name when it's undeclared.\n if not self.Meta.geom_field:\n for name, field in fields.items():\n if isinstance(field, GeometryField):\n self.Meta.geom_field = name\n break\n return fields", "def serialize_field(self, payload, model, field, mapping):\n try:\n if field in model.fk_field_names():\n payload[field] = self.serialize_related_field(\n model, field, mapping\n )\n else:\n payload[field] = getattr(model, field)\n except SkipField:\n payload.pop(field, None)", "def __init__(self, *args, **kwargs):\n location_controls = kwargs.pop('location_controls', False)\n\n self.Meta.fields = list(self.Meta.fields)\n\n if location_controls:\n self.Meta.fields += ['next_location_id', 'previous_location_id']\n\n super(LocationFullSerializer, self).__init__(*args, **kwargs)", "def _populate_always_present_fields(self, field):\n defaults = [\n (\"label\", \"\"),\n (\"instructions\", \"\"),\n (\"placeholder\", \"\"),\n (\"defaultValue\", \"\"),\n (\"restrictions\", {}),\n (\"errorMessages\", {}),\n ]\n field.update({\n key: value\n for key, value in defaults if key not in field\n })", "def fields2jsonschema(self, fields, *, ordered=False, partial=None):\n jsonschema = {\"type\": \"object\", \"properties\": OrderedDict() if ordered else {}}\n\n for field_name, field_obj in fields.items():\n observed_field_name = field_obj.data_key or field_name\n prop = self.field2property(field_obj)\n jsonschema[\"properties\"][observed_field_name] = prop\n\n if field_obj.required:\n if not partial or (\n is_collection(partial) and field_name not in partial\n ):\n jsonschema.setdefault(\"required\", []).append(observed_field_name)\n\n if \"required\" in jsonschema:\n jsonschema[\"required\"].sort()\n\n return jsonschema", "def setRequiredValues(self, instance):\n for key in instance.__slots__:\n if key in instance.requiredFields:\n value = self.getTypicalValue(type(instance), key)\n setattr(instance, key, value)", "def intialize_from_fields(self):\n raise NotImplementedError", "def remove_null_fields(self):\n with open(self.schema_path, 'r') as file_obj:\n schema_data = yaml.safe_load(file_obj)\n schema_fields = schema_data.get('mapping').keys()\n for field in schema_fields:\n # We want to keep 'false' and 0 values, and avoid removing fields that are required in the schema.\n if field in self.data and self.data[field] in (None, '', [], {}) and \\\n not schema_data.get('mapping', {}).get(field, {}).get('required'):\n self.data.pop(field)", "def _initFields(self):\n pass", "def requires_mapping(self):", "def _make_reverse_relations_valid(self, data):\n for field_name, (field, related_field) in self._get_reverse_fields().items():\n if data.get(field.source) is None:\n continue\n if isinstance(field, serializers.ListSerializer):\n field = field.child\n if isinstance(field, serializers.ModelSerializer):\n # find the serializer field matching the reverse model relation\n for sub_field in field.fields.values():\n if sub_field.source == related_field.name:\n sub_field.required = False\n # found the matching field, move on\n break", "def test_map_field_base_case(self):\n field = 'title'\n mapping = {\n 'type': 'text',\n 'index': True\n }\n\n actual = mapper._map_field(mapping, field)\n expected = {\n 'properties': {\n 'title': {\n 'type': 'text',\n 'index': True\n }\n }\n }\n self.assertEqual(actual, expected)", "def api_field_from_django_field(cls, f, default=CharField):\n if isinstance(f, JSONField):\n return JSONApiField\n \n return super(PandaModelResource, cls).api_field_from_django_field(f, default)", "def _validate(mapping):\n missing_fields = _MANDATORY_FIELDS - set(mapping)\n if missing_fields:\n raise ValueError(\n \"Missing mandatory fields: {0}\".format(\n \", \".join(repr(field) for field in sorted(missing_fields))\n )\n )", "def test_create_enforces_required_fields(self):\n serializer = ServiceSerializer(data = {}, context = dict(project = self.project))\n self.assertFalse(serializer.is_valid())\n required_fields = {'name', 'category'}\n self.assertCountEqual(serializer.errors.keys(), required_fields)\n for name in required_fields:\n self.assertEqual(serializer.errors[name][0].code, 'required')", "def __init__(self, **kwargs):\n\n for (k, v) in self._fields:\n if k in kwargs:\n self.__dict__[k] = v.validate(kwargs[k])\n self.__dict__[k] = v.default", "def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.fields['first_name'].required = False\n self.fields['last_name'].required = False\n self.fields['institution'].required = False\n self.fields['institution_logo'].required = False\n self.fields['allow_notifications'].required = False", "def required_fields(required_fields=[]):\n def decorator(func):\n \"\"\" The decorator applied to the obj_create method\"\"\"\n def wrapper(resource, bundle=None, **kwargs):\n \"\"\" wraps the decorated method and verifies a list of required\n fields when a new object is being created.\n\n \"\"\"\n if not isinstance(bundle, Bundle):\n request = bundle\n data = resource.deserialize(\n request, request.body,\n format=request.META.get('CONTENT_TYPE', 'application/json')\n )\n bundle = resource.build_bundle(request=request, data=data)\n else:\n request = None\n\n for required_field in required_fields:\n if required_field not in bundle.data:\n response = HttpBadRequest(\n json.dumps(\"missing %s field\" % required_field),\n content_type=bundle.request.META['CONTENT_TYPE'])\n raise ImmediateHttpResponse(response=response)\n return func(resource, bundle=bundle, **kwargs)\n return wrapper\n return decorator", "def convert_fields(fields, _fields):\n mapper = {\n \"id\": \"local_id\",\n \"local_id\": \"id\"\n }\n fields = deepcopy(fields)\n for field in fields:\n if field['name'] in _fields:\n field['name'] = mapper[field['name']]\n return fields", "def _update_fields_with_default(annotation_fields, defaults_dict):\n all_fields = OrderedDict()\n all_filed_keys = _merge_field_keys(annotation_fields, defaults_dict)\n for name in all_filed_keys:\n # Get or create annotation\n annotation = (\n annotation_fields[name]\n if name in annotation_fields\n else _get_annotation_by_value(defaults_dict.get(name, Input._EMPTY))\n )\n # Create annotation if is class type and update default\n annotation = _update_annotation_with_default(annotation, name, defaults_dict.get(name, Input._EMPTY))\n all_fields[name] = annotation\n return all_fields", "def get_required_fields(self) -> Iterable[fields.Field]:\n for model_field in self.get_fields():\n if model_field.required:\n yield model_field", "def _get_simple_fields(cls) -> dict:\n return {\n name: tp\n for name, tp in cls._annotations().items()\n if AnnotationWrapper(tp).is_simple_in_opt_and_not_opt\n }", "def _assign_fields_to_params(cls, fields, params):\n if fields is None:\n fields = cls.get_default_read_fields()\n if fields:\n params['fields'] = ','.join(fields)", "def test_set_non_dictionary_based_field(self):\n self.assertRaises(TypeError, self._p.set_fields, '')", "def test_deserialize_required_fields():\n data = {}\n serializer = DogSerializer(data=data)\n assert not serializer.is_valid()\n assert len(serializer.errors) == 1\n\n data = {'name': 'bruce'}\n serializer = DogSerializer(data=data)\n assert serializer.is_valid()\n\n serializer.save()\n assert Dog.objects.count() == 1\n\n dog = Dog.objects.first()\n assert dog.name == 'bruce'", "def enforce_required_fields(self, attrs):\n if self.instance is not None:\n return\n # missing_items = {\n # field_name: self.missing_message\n # for field_name in self.fields\n # if field_name not in attrs\n # }\n # if missing_items:\n # raise ValidationError(missing_items, code='required')", "def test_defining_only_or_defer_on_nonexistant_fields_fails(self):", "def get_fields(self):\n fields = super(RelationSerializer, self).get_fields()\n\n if self.request.method == \"GET\":\n fields['type'] = serializers.CharField(source='type.name')\n else:\n fields['type'] = serializers.PrimaryKeyRelatedField(queryset=RelationType.objects.all())\n\n return fields", "def jsonable(self, *args, **options):\n d = {}\n for field_name, field in self.schema.normal_fields.items():\n field_val = getattr(self, field_name, None)\n field_val = field.jsonable(self, field_val)\n if field_val is not None:\n d[field_name] = field_val\n\n return d", "def update(self, instance, validated_data):\n assert hasattr(self.Meta, 'allowed_update_fields'), \"Serializers that are used for update must set Meta.allowed_update_fields\"\n if set(validated_data.keys()) - set(self.Meta.allowed_update_fields):\n raise serializers.ValidationError('Only updates on these fields are allowed: %s' % ', '.join(self.Meta.allowed_update_fields))\n return super(BaseSerializer, self).update(instance, validated_data)", "def _populate(self, fields):\n schema = self.schema\n for k, v in fields.items():\n fields[k] = schema.fields[k].iget(self, v)\n\n self.modify(fields)\n self.reset_modified()", "def test_alright_when_non_required_field_is_missing():\n\n model_definition = {'language': {'type': 'fixed',\n 'required': True,\n 'persisted': True},\n 'source': {'type': 'list',\n 'required': False,\n 'persisted': True},\n 'resources.title': {'type': 'text',\n 'required': False,\n 'persisted': True}}\n product1 = {'language': 'english'}\n factory = ProductModelFactory(model_definition)\n factory.build('product1', product1)\n # Ok. No exceptions were raised.", "def _validate_fields(self, change_fields):\n pass", "def clean_fields(self, instance, exclude=None):\n errors = {}\n exclude = exclude or []\n for name, f in self.properties.items():\n raw_value = getattr(instance, name, None)\n is_blank = not bool(raw_value)\n is_nullable = f.null\n is_defaulted = f.column.default or f.column.server_default\n is_required = f.required\n\n is_skippable = is_blank and (is_nullable or is_defaulted or not is_required)\n\n if name in exclude or is_skippable:\n continue\n try:\n setattr(instance, name, f.clean(raw_value, instance))\n except ValidationError as e:\n errors[name] = e.error_list\n if errors:\n raise NestedValidationError(errors)", "def _update_fields_with_default(\n annotation_fields: Dict[str, Union[Annotation, Input, Output]], defaults_dict: Dict[str, Any]\n ) -> Dict[str, Union[Annotation, Input, Output]]:\n all_fields = OrderedDict()\n all_filed_keys = _merge_field_keys(annotation_fields, defaults_dict)\n for name in all_filed_keys:\n # Get or create annotation\n annotation = (\n annotation_fields[name]\n if name in annotation_fields\n else _get_annotation_by_value(defaults_dict.get(name, Input._EMPTY))\n )\n # Create annotation if is class type and update default\n annotation = _update_annotation_with_default(annotation, name, defaults_dict.get(name, Input._EMPTY))\n all_fields[name] = annotation\n return all_fields", "def test_missing_mandatory_attributes():\n model_definition = {'source': {'type': 'list',\n 'required': True,\n 'persisted': True},\n 'resources.title': {'type': 'text',\n 'required': True,\n 'persisted': True}}\n # missing language in the model\n _ = ProductModelFactory(model_definition)", "def test_swagger_field_is_required():\n raw_schema = RawSchemaFactory()\n raw_schema.pop('swagger', None)\n\n assert 'swagger' not in raw_schema\n\n with pytest.raises(ValidationError) as err:\n swagger_schema_validator(raw_schema)\n\n assert_message_in_errors(\n MESSAGES['required']['required'],\n err.value.detail,\n 'required.swagger',\n )", "def is_required(self, field):\n return field.scheme.is_required and not field.scheme.is_pk", "def test_alright_when_required_field_is_missing_but_default_is_given():\n\n model_definition = {'language': {'type': 'fixed',\n 'required': True,\n 'persisted': True,\n 'default': 'portuguese'},\n 'source': {'type': 'list',\n 'required': False,\n 'persisted': True}}\n product1 = {'source': ['Whatever']}\n factory = ProductModelFactory(model_definition)\n factory.build('product1', product1)\n # Ok. No exceptions were raised.", "def clean_fields(self, exclude=None):\n obj = self._obj\n if obj is None:\n return None\n\n self.event = self.clean_event(self.event)\n self.resource_name = self.clean_resource_name(obj.__class__.__name__)\n self.resource_id = obj.id\n self.site = self.clean_site(obj)\n\n serializer_class = self.get_serializer_for_resource(self.resource_name)\n serializer = serializer_class(obj)\n self._resource = serializer.data", "def field_mapping(self):\n fields = self.fields\n if self.target_field is not None:\n del fields[self.target_field.get('name')]\n field_labels = list(self.fields.keys())\n\n field_mapping = {\n name: (\n field_labels.index(name),\n lambda value, e=e: self.parse_type(value, e)\n )\n for name, e in fields.items()\n if e.tag == f'{{{self.namespace}}}DataField'\n }\n\n field_mapping.update({\n name: (\n field_labels.index(self.find(e, 'FieldRef').get('field')),\n lambda value, e=e: self.parse_type(value, e)\n )\n for name, e in fields.items()\n if e.tag == f'{{{self.namespace}}}DerivedField'\n })\n\n return field_mapping", "def required_fields(model, values):\n if values:\n for k in list(values):\n if k not in model.__table__.columns.keys():\n values.pop(k)\n return values", "def check_for_required_fields(cls, fields=[], dataDict={}):\n\n validateRequired = Validate.required(fields=fields, dataDict=dataDict)\n if validateRequired['status'] == False:\n res = jsonify(\n {'status': 400, 'error': validateRequired['message'], 'data': []})\n return abort(make_response(res, 400))\n return True", "def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]:\n from .schedule_change_request import ScheduleChangeRequest\n\n from .schedule_change_request import ScheduleChangeRequest\n\n fields: Dict[str, Callable[[Any], None]] = {\n \"endDateTime\": lambda n : setattr(self, 'end_date_time', n.get_datetime_value()),\n \"startDateTime\": lambda n : setattr(self, 'start_date_time', n.get_datetime_value()),\n \"timeOffReasonId\": lambda n : setattr(self, 'time_off_reason_id', n.get_str_value()),\n }\n super_fields = super().get_field_deserializers()\n fields.update(super_fields)\n return fields", "def set_additional_fields(cls, model, data):\n for k, v in data.items():\n if not hasattr(model, k):\n setattr(model, k, v)", "def test_fields(self):\n\n class Foo(Model):\n field1 = StringField()\n field2 = IntegralField()\n\n assert hasattr(Foo, \"_fields\")\n assert type(Foo._fields) is dict\n\n assert not hasattr(Foo, \"field1\")\n assert \"field1\" in Foo._fields\n assert type(Foo._fields[\"field1\"]) is StringField\n\n assert not hasattr(Foo, \"field2\")\n assert \"field2\" in Foo._fields\n assert type(Foo._fields[\"field2\"]) is IntegralField", "def test_to_dict(self):\n\n class Person(Model):\n name = StringField()\n age = IntegralField(bounds = (0, None))\n siblings = ListField(of = StringField())\n\n data1 = {\n \"name\": \"Joe Shmoe\",\n \"age\": 21,\n \"siblings\": [\"Dick Shmoe\", \"Jane Shmoe\"]\n }\n person1 = Person(**data1)\n assert person1.to_dict() == data1\n\n # The defined but unset fields should still be present, but set to none\n data2 = {\"notaname\": 2, \"age\": \"lots\"}\n person2 = Person.from_dict(data2)\n assert person2.to_dict() == {\n \"notaname\": 2,\n \"age\": \"lots\",\n \"name\": None,\n \"siblings\": None\n }", "def set_fields(self, fields: FieldDict):\n super().set_fields(fields)\n nested_field: NestedField = self.fields[self.nested]\n if not isinstance(nested_field, NestedField):\n raise TypeError(\n f'The field \"{self.nested}\" must be a NestedField instance, not \"{nested_field}\".')\n if nested_field.many:\n raise ValueError(f'The field \"{self.nested}\" can not be set as \"many=True\".')\n self.nested_field = nested_field\n # create partial methods\n self._do_dump = partial(\n getattr(self, self.dump_method),\n target=nested_field.dump_target,\n method=nested_field.dump,\n )\n self._do_load = partial(\n getattr(self, self.load_method),\n target=nested_field.load_target,\n method=nested_field.load,\n )", "def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]:\n fields: Dict[str, Callable[[Any], None]] = {\n \"allowedToCreateApps\": lambda n : setattr(self, 'allowed_to_create_apps', n.get_bool_value()),\n \"allowedToCreateSecurityGroups\": lambda n : setattr(self, 'allowed_to_create_security_groups', n.get_bool_value()),\n \"allowedToCreateTenants\": lambda n : setattr(self, 'allowed_to_create_tenants', n.get_bool_value()),\n \"allowedToReadBitlockerKeysForOwnedDevice\": lambda n : setattr(self, 'allowed_to_read_bitlocker_keys_for_owned_device', n.get_bool_value()),\n \"allowedToReadOtherUsers\": lambda n : setattr(self, 'allowed_to_read_other_users', n.get_bool_value()),\n \"@odata.type\": lambda n : setattr(self, 'odata_type', n.get_str_value()),\n \"permissionGrantPoliciesAssigned\": lambda n : setattr(self, 'permission_grant_policies_assigned', n.get_collection_of_primitive_values(str)),\n }\n return fields", "def fields(self, fields):\n\n self._fields = fields", "def test_reader_serializer(self):\n transaction = TransactionFactory.build()\n serializer = TransactionSerializer(transaction)\n serialized_data = serializer.data\n\n for (\n serializer_field,\n model_field,\n ) in transaction_reader_fields_mapping.items():\n self.assertIn(serializer_field, serialized_data)\n\n if serializer_field == \"estabelecimento\":\n value = transaction.company.cnpj\n else:\n value = getattr(transaction, model_field)\n\n self.assertEqual(serialized_data[serializer_field], value)\n\n # tests if serializer validates\n serializer = TransactionSerializer(data=serialized_data)\n self.assertTrue(serializer.is_valid())", "def _patch_schema(self):\n fields = get_json()['data']['attributes'].keys()\n return make_entity_schema(\n self.SCHEMA, self.RESOURCE_NAME,\n make_data_schema(\n self.SCHEMA, id_required=True,\n only=fields, partial=True\n )\n )", "def test_required_field_values_are_present():\n\n model_definition = {'language': {'type': 'fixed',\n 'required': True,\n 'persisted': True},\n 'source': {'type': 'list',\n 'required': False,\n 'persisted': True},\n 'resources.title': {'type': 'text',\n 'required': True,\n 'persisted': True}}\n product1 = {'language': 'english'}\n factory = ProductModelFactory(model_definition)\n factory.build('product1', product1)", "def _make_field_map(fields):\n field_map = {}\n for field in fields:\n if field.name in field_map:\n raise SchemaParseException(\n 'Duplicate record field name %r.' % field.name)\n field_map[field.name] = field\n return field_map", "def _save_direct_relations(self, kwargs):\n for field_name, field in self.fields.items():\n if field.read_only:\n continue\n if isinstance(self._validated_data, dict) and self._validated_data.get(field.source) is None:\n continue\n if not isinstance(field, serializers.BaseSerializer):\n continue\n if hasattr(self, 'Meta') and hasattr(self.Meta, 'model'):\n # ModelSerializer (or similar) so we need to exclude reverse relations\n try:\n _, direct = self._get_related_field(field)\n except FieldDoesNotExist:\n continue\n if not direct:\n continue\n\n # reinject validated_data\n field._validated_data = self._validated_data[field_name]\n self._validated_data[field_name] = field.save(**kwargs.pop(field_name, {}))", "def get_fields(cls, fields=None, excludes=None):\r\n\r\n final_fields = {}\r\n fields = fields or []\r\n excludes = excludes or []\r\n\r\n if not cls._meta.object_class:\r\n return final_fields\r\n\r\n for name, f in cls._meta.object_class._fields.iteritems():\r\n # If the field name is already present, skip\r\n if name in cls.base_fields:\r\n continue\r\n\r\n # If field is not present in explicit field listing, skip\r\n if fields and name not in fields:\r\n continue\r\n\r\n # If field is in exclude list, skip\r\n if excludes and name in excludes:\r\n continue\r\n\r\n # TODO: Might need it in the future\r\n # if cls.should_skip_field(f):\r\n # continue\r\n\r\n api_field_class = cls.api_field_from_mongo_field(f)\r\n\r\n primary_key = f.primary_key or name == getattr(cls._meta, 'id_field', 'id')\r\n\r\n kwargs = {\r\n 'attribute': name,\r\n 'unique': f.unique or primary_key,\r\n 'null': not f.required and not primary_key,\r\n 'help_text': f.help_text,\r\n }\r\n\r\n # If field is not required, it does not matter if set default value,\r\n # so we do\r\n if not f.required:\r\n kwargs['default'] = f.default\r\n else:\r\n # MongoEngine does not really differ between user-specified default\r\n # and its default, so we try to guess\r\n if isinstance(f, mongoengine.ListField):\r\n if not callable(f.default) or f.default() != []: # If not MongoEngine's default\r\n kwargs['default'] = f.default\r\n elif isinstance(f, mongoengine.DictField):\r\n if not callable(f.default) or f.default() != {}: # If not MongoEngine's default\r\n kwargs['default'] = f.default\r\n else:\r\n if f.default is not None: # If not MongoEngine's default\r\n kwargs['default'] = f.default\r\n\r\n kwargs = cls.api_field_options(name, f, kwargs)\r\n\r\n final_fields[name] = api_field_class(**kwargs)\r\n final_fields[name].instance_name = name\r\n final_fields[name]._primary_key = primary_key\r\n\r\n # We store MongoEngine field so that schema output can show\r\n # to which content the list is limited to (if any)\r\n if isinstance(f, mongoengine.ListField):\r\n final_fields[name].field = f.field\r\n\r\n return final_fields", "def set_specific_fields(self):\n raise NotImplementedError(\"Must be defined by subclass!\")", "def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]:\n fields: Dict[str, Callable[[Any], None]] = {\n \"assignedDateTime\": lambda n : setattr(self, 'assigned_date_time', n.get_datetime_value()),\n \"capabilityStatus\": lambda n : setattr(self, 'capability_status', n.get_str_value()),\n \"@odata.type\": lambda n : setattr(self, 'odata_type', n.get_str_value()),\n \"service\": lambda n : setattr(self, 'service', n.get_str_value()),\n \"servicePlanId\": lambda n : setattr(self, 'service_plan_id', n.get_uuid_value()),\n }\n return fields", "def add_base_fields(self) -> None:\n for field in get_model_fields(self.model, foreign=False, m2m=False):\n if hasattr(self.factory, field.name) or field.has_default() or field.blank:\n continue\n setattr(\n self.factory, field.name, self._get_decl_for_model_field(field)\n )", "def depopulate(self, is_update):\n fields = {}\n schema = self.schema\n for k, field in schema.fields.items():\n is_modified = k in self.modified_fields\n orig_v = getattr(self, k)\n v = field.iset(\n self,\n orig_v,\n is_update=is_update,\n is_modified=is_modified\n )\n\n if is_modified or v is not None:\n if is_update and field.is_pk() and v == orig_v:\n continue\n\n else:\n fields[k] = v\n\n if not is_update:\n for field_name in schema.required_fields.keys():\n if field_name not in fields:\n raise KeyError(\"Missing required field {}\".format(field_name))\n\n return fields", "def __createFields(self):\n fields = self.updateFields\n for field in fields:\n self.__createField(field)", "def test_field_nullable(self):\n node_dict = {\n 'host_name': 'abc',\n 'local_router_id': '1.1.1.1',\n 'as_num': 100,\n 'bgpls_id': '0.0.0.0',\n 'igp_id': '0.0.0.0'\n }\n node = Node(**node_dict)\n for name, field in node_dict.items():\n self.assertEqual(field, node.__dict__[name])", "def _validate(self):\n fields, schema = self.__dict__, self._def.default\n extra_fields = fields.viewkeys() - schema.viewkeys()\n if len(extra_fields) > 0:\n raise AttributeError('Fields found that are not in the schema: %r' % (list(extra_fields)))\n for key in fields.iterkeys():\n if type(fields[key]) is not type(schema[key]):\n raise AttributeError('Invalid %s for field \"%s\", should be %s' %\n (type(fields[key]), key, type(schema[key])))", "def _get_not_simple_fields(cls) -> Dict[str, str]:\n\n return {\n name: anno\n for name, anno in cls._annotations().items()\n if not AnnotationWrapper(anno).is_simple_in_opt_and_not_opt\n }", "def validate_fields_for_magento(self,data):\n for field in data:\n if data[field] == None :\n del data[field]\n if data[field] == True:\n data[field] = 1\n if data[field] == False :\n data[field] = 0", "def clean(self):\n cleaned_data = super().clean()\n cleaned_data = {key: field for key, field in cleaned_data.items()\n if field is not None}\n return cleaned_data", "def _initializeRequestField(self,field,referenceField):\n\t\tvaluesDict = referenceField.values\n\t\tfield.initialize_values(valuesDict)\n\t\t\n\t\tpass", "def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n for field in self.fields:\n self.fields[field].label = False", "def __init__(self):\n super(ObjectSchema, self).__init__()\n self.is_allow_undefined = False", "def _update_allowed_fields(self) -> list:\n raise NotImplementedError('Each model has to have its list of update allowed fields')", "def test_prep_fields(self):\n pass", "def required_fields(self, gid):\n r = self.get(\"/groups/{g:d}/fields\".format(g=gid))\n return r.json()", "def writeRequiredFields(self, outputFile):\n fields = []\n for field in self.getFields():\n if not field.has_default:\n fields.append(field)\n if len(fields) < 1:\n self._writeWithIndent('requiredFields = set([])', outputFile)\n else:\n self._writeWithIndent('requiredFields = set([', outputFile)\n for field in fields:\n string_ = '\"{0}\",'.format(field.name)\n self._writeWithIndent(string_, outputFile, 2)\n self._writeWithIndent('])', outputFile)", "def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]:\n from .app_management_configuration import AppManagementConfiguration\n from .policy_base import PolicyBase\n\n from .app_management_configuration import AppManagementConfiguration\n from .policy_base import PolicyBase\n\n fields: Dict[str, Callable[[Any], None]] = {\n \"applicationRestrictions\": lambda n : setattr(self, 'application_restrictions', n.get_object_value(AppManagementConfiguration)),\n \"isEnabled\": lambda n : setattr(self, 'is_enabled', n.get_bool_value()),\n \"servicePrincipalRestrictions\": lambda n : setattr(self, 'service_principal_restrictions', n.get_object_value(AppManagementConfiguration)),\n }\n super_fields = super().get_field_deserializers()\n fields.update(super_fields)\n return fields", "def namespaced_fields(self):\n ...", "def update(self, mapping):\n if not ismapping(mapping):\n raise TypeError(\"mapping type required\")\n field_names = getpyattr(type(self), 'field_names')\n for key, value in mapping.items():\n if key in field_names:\n setattr(self, key, value)", "def __call__(self, func):\n # Set or extend the function's \"custom_fields\" attribute\n func.required_fields = getattr(func, \"required_fields\", {})\n func.required_fields[self.fieldname] = self.input_type\n # The decorated function is unchanged\n return func", "def set_default_read_fields(cls, fields):\n cls._default_read_fields = fields", "def validation_required(self, validation_required):\n self._validation_required = validation_required", "def extract_fields(self, json_dict):\n raise NotImplementedError()", "def test_map_field_recursive_case(self):\n field = 'content.title'\n mapping = {\n 'type': 'text',\n 'index': True\n }\n\n actual = mapper._map_field(mapping, field)\n expected = {\n 'properties': {\n 'content': {\n 'properties': {\n 'title': {\n 'type': 'text',\n 'index': True\n }\n }\n }\n }\n }\n self.assertEqual(actual, expected)", "def field_mappings(self) -> Optional[Sequence['outputs.FieldMappingResponse']]:\n return pulumi.get(self, \"field_mappings\")", "def set_fields(self, **kwargs):\n for key, value in kwargs.items():\n if key in self.fields.keys():\n if type(value) != bool:\n raise TypeError('Expecting Bool passed {}'\n .format(type(value)))\n self.fields[key] = value\n else:\n raise KeyError", "def _validate_default_fields() -> None:\n default_fields = [\n field\n for field in fields\n if field[\"name\"] in DEFAULT_PREDICTIONS_TABLE_FIELDS\n ]\n if len(DEFAULT_PREDICTIONS_TABLE_FIELDS) != len(default_fields):\n raise NotFilledDefaultFields", "def update_json(self):\n self.set_version_to_default()\n self.remove_null_fields()\n self.remove_unnecessary_keys()\n self.set_fromVersion(from_version=self.from_version)", "def validate(self, data):\n # calling subserializer validate method (fields, and presets)\n data = super(FormidableSerializer, self).validate(data)\n # we check every field define in presets are define inside the form.\n if 'fields' in data and 'presets' in data:\n data = self.check_presets_cohesion(data)\n return data", "def set_fields(self, upstream_obj, nonparam_fields=None):\n default_data = upstream_obj.default_data(start_year=self.start_year,\n metadata=True)\n\n if self.raw_input_fields is None:\n self.raw_input_fields = {}\n for field in self._meta.fields:\n if (getattr(self, field.attname, None) and\n field.name not in nonparam_fields):\n raw_val = getattr(self, field.attname)\n if field.name.endswith(\"cpi\") and isinstance(raw_val, bool):\n raw_val = str(raw_val)\n self.raw_input_fields[field.name] = raw_val\n\n input_fields, failed_lookups = param_formatters.parse_fields(\n self.raw_input_fields,\n default_data\n )\n\n if failed_lookups:\n # distinct elements\n potential_failed_lookups = set(failed_lookups)\n # only keep parameters that used to be in the upstream package\n set_failed_lookups = potential_failed_lookups - nonparam_fields\n if self.deprecated_fields is None:\n self.deprecated_fields = []\n # drop parameters that we already know are deprecated\n set_failed_lookups.difference_update(self.deprecated_fields)\n self.deprecated_fields += list(set_failed_lookups)\n\n self.input_fields = input_fields", "def check_mandatory_props(klass: pyorient.ogm.declarative.DeclarativeMeta, obj: Dict):\n missing = []\n props = klass.objects.g.props_from_db[klass](Graph.compute_all_properties(klass))\n for k, v in props.items():\n prop = getattr(klass, k)\n if hasattr(prop, 'mandatory'):\n if prop.mandatory and k not in obj:\n # Fix values if default set \n if k == \"revoked\":\n obj[k] = False\n continue\n if k == \"spec_version\":\n obj[k] = \"2.1\"\n continue\n missing.append(k)\n if isinstance(prop, odbproperty.String):\n obj[k] = 'added_default'\n elif isinstance(prop, (odbproperty.Date, odbproperty.DateTime)):\n obj[k] = get_datetime()\n elif isinstance(prop, odbproperty.EmbeddedList):\n obj[k] = ['added_default']\n elif isinstance(prop, odbproperty.Integer):\n obj[k] = 0\n elif isinstance(prop, odbproperty.Float):\n obj[k] = 0.0\n elif isinstance(prop, odbproperty.Binary):\n obj[k] = 0\n elif isinstance(prop, odbproperty.Byte):\n obj[k] = 0\n elif isinstance(prop, odbproperty.Decimal):\n obj[k] = 0.0\n elif isinstance(prop, odbproperty.Long):\n obj[k] = 0\n elif isinstance(prop, odbproperty.Short):\n obj[k] = 0\n elif isinstance(prop, odbproperty.Boolean):\n obj[k] = True\n else:\n logging.info(f'What to do with missing mandatory field {k} of type {v.__class__}?')\n if missing:\n logging.info(f'missing mandatory fields for {obj[\"id_\"]}: {missing}')\n return obj", "def test_prep_country_fields_flat(self):\n original_flag = self.form.country_optional\n self.form.country_optional = True\n original_fields = self.form.fields\n original_removed = getattr(self.form, 'removed_fields', None)\n original_computed = getattr(self.form, 'computed_fields', None)\n self.form.fields = original_fields.copy()\n if original_removed is not None:\n self.form.removed_fields = original_removed.copy()\n if original_computed is not None:\n self.form.computed_fields = original_computed.copy()\n remaining = original_fields.copy()\n opts, field_rows = {'fake_opts': 'fake', 'fields': ['nope']}, [{'name': 'assigned_field'}]\n args = ['arbitrary', 'input', 'args']\n kwargs = {'test_1': 'data_1', 'test_2': 'data_2'}\n field_names = (self.form.country_field_name, 'country_flag', )\n if not any(remaining.get(name, None) for name in field_names):\n fix_fields = {name: self.get_missing_field(name) for name in field_names if name not in remaining}\n remaining.update(fix_fields)\n expected_add = {name: remaining[name] for name in field_names if name in remaining}\n expected_field_rows = field_rows.copy()\n expected_field_rows.append(expected_add)\n expected_remaining = {name: field for name, field in remaining.items() if name not in expected_add}\n expected_opts = deepcopy(opts)\n # expected_opts['fields'].append(field_names)\n kwargs['flat_fields'] = True\n expected_remaining.update(expected_add)\n\n sent = (opts, field_rows, remaining, *args)\n r_opts, r_rows, r_remaining, *r_args, r_kwargs = self.form.prep_country_fields(*sent, **kwargs)\n self.assertEqual(expected_opts, r_opts)\n self.assertEqual(expected_field_rows, r_rows)\n self.assertEqual(expected_remaining, r_remaining)\n self.assertEqual(args, r_args)\n self.assertEqual(kwargs, r_kwargs)\n\n self.form.country_optional = original_flag\n self.form.fields = original_fields\n if original_removed is not None:\n self.form.removed_fields = original_removed\n if original_computed is not None:\n self.form.computed_fields = original_computed\n pass", "def set_fields(self, fields: FieldDict):\n super().set_fields(fields)\n # bind fields to attrs\n for attr in ('a', 'b'):\n setattr(self, f'field_{attr}', self.fields[getattr(self, attr)])\n # get error messages\n dump_error = self.error_cls(self.get_error_message(\n self.op, a=self.field_a.dump_source, b=self.field_b.dump_source))\n load_error = self.error_cls(self.get_error_message(\n self.op, a=self.field_a.load_source, b=self.field_b.load_source))\n # set partial arguments for `validate`\n self.validate_dump = partial(\n self.validate,\n a_key=self.field_a.dump_target,\n b_key=self.field_b.dump_target,\n error=dump_error)\n self.validate_load = partial(\n self.validate,\n a_key=self.field_a.load_target,\n b_key=self.field_b.load_target,\n error=load_error)", "def remove_read_only_fields(self):\n self.fields = XML_List(Elements.FIELDS, [field for field in self.fields if\n not field.read_only or not str_to_bool(field.read_only)])", "def build_standard_field(self, field_name, model_field_type):\n field_mapping = self.serializer_field_mapping\n field_class = field_mapping[model_field_type]\n field_kwargs = get_field_kwargs(field_name, model_field_type)\n\n if \"choices\" in field_kwargs:\n # Fields with choices get coerced into `ChoiceField`\n # instead of using their regular typed field.\n field_class = self.serializer_choice_field\n # Some model fields may introduce kwargs that would not be valid\n # for the choice field. We need to strip these out.\n # Eg. models.DecimalField(max_digits=3, decimal_places=1, choices=DECIMAL_CHOICES)\n valid_kwargs = {\n \"read_only\",\n \"write_only\",\n \"required\",\n \"default\",\n \"initial\",\n \"source\",\n \"label\",\n \"help_text\",\n \"style\",\n \"error_messages\",\n \"validators\",\n \"allow_null\",\n \"allow_blank\",\n \"choices\",\n }\n for key in list(field_kwargs):\n if key not in valid_kwargs:\n field_kwargs.pop(key)\n\n if not issubclass(field_class, fields.CharField) and not issubclass(\n field_class, fields.ChoiceField\n ):\n # `allow_blank` is only valid for textual fields.\n field_kwargs.pop(\"allow_blank\", None)\n\n return field_class, field_kwargs", "def objectFields(self):\n raise NotImplementedError", "def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]:\n from .delegated_admin_relationship_request_action import DelegatedAdminRelationshipRequestAction\n from .delegated_admin_relationship_request_status import DelegatedAdminRelationshipRequestStatus\n from .entity import Entity\n\n from .delegated_admin_relationship_request_action import DelegatedAdminRelationshipRequestAction\n from .delegated_admin_relationship_request_status import DelegatedAdminRelationshipRequestStatus\n from .entity import Entity\n\n fields: Dict[str, Callable[[Any], None]] = {\n \"action\": lambda n : setattr(self, 'action', n.get_enum_value(DelegatedAdminRelationshipRequestAction)),\n \"createdDateTime\": lambda n : setattr(self, 'created_date_time', n.get_datetime_value()),\n \"lastModifiedDateTime\": lambda n : setattr(self, 'last_modified_date_time', n.get_datetime_value()),\n \"status\": lambda n : setattr(self, 'status', n.get_enum_value(DelegatedAdminRelationshipRequestStatus)),\n }\n super_fields = super().get_field_deserializers()\n fields.update(super_fields)\n return fields", "def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]:\n from .app_identity import AppIdentity\n from .entity import Entity\n from .print_task import PrintTask\n\n from .app_identity import AppIdentity\n from .entity import Entity\n from .print_task import PrintTask\n\n fields: Dict[str, Callable[[Any], None]] = {\n \"createdBy\": lambda n : setattr(self, 'created_by', n.get_object_value(AppIdentity)),\n \"displayName\": lambda n : setattr(self, 'display_name', n.get_str_value()),\n \"tasks\": lambda n : setattr(self, 'tasks', n.get_collection_of_object_values(PrintTask)),\n }\n super_fields = super().get_field_deserializers()\n fields.update(super_fields)\n return fields", "def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]:\n from .container_filter import ContainerFilter\n from .group_filter import GroupFilter\n from .object_mapping import ObjectMapping\n from .string_key_string_value_pair import StringKeyStringValuePair\n\n from .container_filter import ContainerFilter\n from .group_filter import GroupFilter\n from .object_mapping import ObjectMapping\n from .string_key_string_value_pair import StringKeyStringValuePair\n\n fields: Dict[str, Callable[[Any], None]] = {\n \"containerFilter\": lambda n : setattr(self, 'container_filter', n.get_object_value(ContainerFilter)),\n \"editable\": lambda n : setattr(self, 'editable', n.get_bool_value()),\n \"groupFilter\": lambda n : setattr(self, 'group_filter', n.get_object_value(GroupFilter)),\n \"id\": lambda n : setattr(self, 'id', n.get_str_value()),\n \"metadata\": lambda n : setattr(self, 'metadata', n.get_collection_of_object_values(StringKeyStringValuePair)),\n \"name\": lambda n : setattr(self, 'name', n.get_str_value()),\n \"objectMappings\": lambda n : setattr(self, 'object_mappings', n.get_collection_of_object_values(ObjectMapping)),\n \"@odata.type\": lambda n : setattr(self, 'odata_type', n.get_str_value()),\n \"priority\": lambda n : setattr(self, 'priority', n.get_int_value()),\n \"sourceDirectoryName\": lambda n : setattr(self, 'source_directory_name', n.get_str_value()),\n \"targetDirectoryName\": lambda n : setattr(self, 'target_directory_name', n.get_str_value()),\n }\n return fields", "def to_dict(self):\n _dict = self.dict(by_alias=True,\n exclude={\n },\n exclude_none=True)\n # set to None if billing_cycle_start (nullable) is None\n # and __fields_set__ contains the field\n if self.billing_cycle_start is None and \"billing_cycle_start\" in self.__fields_set__:\n _dict['billing_cycle_start'] = None\n\n # set to None if billing_cycle_end (nullable) is None\n # and __fields_set__ contains the field\n if self.billing_cycle_end is None and \"billing_cycle_end\" in self.__fields_set__:\n _dict['billing_cycle_end'] = None\n\n # set to None if canceled_at (nullable) is None\n # and __fields_set__ contains the field\n if self.canceled_at is None and \"canceled_at\" in self.__fields_set__:\n _dict['canceled_at'] = None\n\n # set to None if charge_id (nullable) is None\n # and __fields_set__ contains the field\n if self.charge_id is None and \"charge_id\" in self.__fields_set__:\n _dict['charge_id'] = None\n\n # set to None if paused_at (nullable) is None\n # and __fields_set__ contains the field\n if self.paused_at is None and \"paused_at\" in self.__fields_set__:\n _dict['paused_at'] = None\n\n # set to None if trial_start (nullable) is None\n # and __fields_set__ contains the field\n if self.trial_start is None and \"trial_start\" in self.__fields_set__:\n _dict['trial_start'] = None\n\n # set to None if trial_end (nullable) is None\n # and __fields_set__ contains the field\n if self.trial_end is None and \"trial_end\" in self.__fields_set__:\n _dict['trial_end'] = None\n\n return _dict", "def add_required_properties(self, p: str):\n # TODO: Deprecate\n for k in p.keys():\n try:\n self._properties[k].set_required(True)\n except KeyError:\n self._properties.define_property(name=k, supported=False, required=True)" ]
[ "0.7349095", "0.59120345", "0.58142334", "0.57975155", "0.5773961", "0.5717595", "0.56959385", "0.5695273", "0.568627", "0.56845975", "0.5660791", "0.5620639", "0.5562065", "0.5561373", "0.5556391", "0.555515", "0.55402875", "0.5511495", "0.546896", "0.5459124", "0.54497963", "0.54110634", "0.5398385", "0.5380121", "0.53782773", "0.53751194", "0.5372808", "0.536668", "0.5352254", "0.5342452", "0.533807", "0.53176105", "0.53018355", "0.5276986", "0.5269212", "0.52681553", "0.52649176", "0.5257536", "0.5252648", "0.52448726", "0.5225241", "0.52013415", "0.5182229", "0.5170967", "0.5165028", "0.51587635", "0.5155698", "0.5134422", "0.51295984", "0.5126346", "0.5120603", "0.51159614", "0.5115571", "0.51124763", "0.5108347", "0.5102396", "0.5102034", "0.509969", "0.5094964", "0.50898844", "0.50701815", "0.50684476", "0.50646734", "0.5063732", "0.5051295", "0.5049662", "0.5048179", "0.50332874", "0.50258726", "0.50250536", "0.50249416", "0.5024757", "0.5012598", "0.501024", "0.49996844", "0.49987686", "0.49954683", "0.49933034", "0.49918306", "0.4985915", "0.49804753", "0.49792653", "0.497623", "0.49646452", "0.49552575", "0.4954946", "0.4951771", "0.49498212", "0.49449262", "0.49443778", "0.49286342", "0.49244466", "0.49187472", "0.49071637", "0.49025235", "0.49007615", "0.49001327", "0.4897843", "0.48955244", "0.48887408" ]
0.531521
32
Assert that filled_out can't be turned off and that agreed_to_terms_of_service is true
def validate(self, attrs): if 'filled_out' in attrs and not attrs['filled_out']: raise ValidationError("filled_out cannot be set to false") if 'agreed_to_terms_of_service' in attrs and not attrs['agreed_to_terms_of_service']: raise ValidationError("agreed_to_terms_of_service cannot be set to false") # Postal code is only required in United States and Canada country = attrs.get("country", "") postal_code = attrs.get("postal_code", "") if country in ("US", "CA") and not postal_code: raise ValidationError("postal_code may not be blank") return super().validate(attrs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_reject_agreement(self):\n pass", "def test_terminate_agreement(self):\n pass", "def test_dont_cancel_if_advance_payment_not_required(self, mock_tz):\n mock_tz.now.return_value = datetime(\n 2015, 2, 11, 10, tzinfo=dt_timezone.utc\n )\n # set payment_due_date to None, otherwise advance_payment_required is\n # automatically set to True\n self.ticketed_event.payment_due_date = None\n self.ticketed_event.advance_payment_required = False\n self.ticketed_event.save()\n self.assertFalse(self.unpaid.cancelled)\n self.assertFalse(self.paid.cancelled)\n\n management.call_command('cancel_unpaid_ticket_bookings')\n # emails are sent to user per cancelled booking and studio once for all\n # cancelled bookings\n self.unpaid.refresh_from_db()\n self.paid.refresh_from_db()\n self.assertEqual(len(mail.outbox), 0)\n self.assertFalse(self.unpaid.cancelled)\n self.assertFalse(self.paid.cancelled)", "def test_busy_cook(cook_busy, product_for_cook):\n with pytest.raises(CustomWarning):\n assert cook_busy.cook_dish(product_for_cook)", "def test_cook_set_free(cook_busy, product_for_cook):\n cook_busy.set_free(True)\n # if product needs to be cooked\n assert product_for_cook.get_need_cook_status() is True\n cook_busy.cook_dish(product_for_cook)\n assert product_for_cook.get_need_cook_status() is False", "def test_dont_cancel_if_advance_payment_not_required(self, mock_tz):\n mock_tz.now.return_value = datetime(\n 2015, 2, 10, 10, tzinfo=dt_timezone.utc\n )\n # set payment_due_date to None, otherwise advance_payment_required is\n # automatically set to True\n self.event.payment_due_date = None\n self.event.advance_payment_required = False\n self.event.save()\n self.assertEqual(\n self.unpaid.status, 'OPEN', self.unpaid.status\n )\n self.assertEqual(\n self.paid.status, 'OPEN', self.paid.status\n )\n management.call_command('cancel_unpaid_bookings')\n # emails are sent to user per cancelled booking and studio once for all\n # cancelled bookings\n unpaid_booking = Booking.objects.get(id=self.unpaid.id)\n paid_booking = Booking.objects.get(id=self.paid.id)\n self.assertEqual(len(mail.outbox), 0)\n self.assertEqual(\n unpaid_booking.status, 'OPEN', unpaid_booking.status\n )\n self.assertEqual(\n paid_booking.status, 'OPEN', paid_booking.status\n )\n # auto_cancelled set to True only on cancelled bookings\n self.assertFalse(unpaid_booking.auto_cancelled)\n self.assertFalse(paid_booking.auto_cancelled)", "def clean_agrees_to_tos(field_name):\n\n @check_field_is_empty(field_name)\n def wrapper(self):\n \"\"\"Decorator wrapper method.\n \"\"\"\n agrees_to_tos = self.cleaned_data.get(field_name)\n\n if not site_logic.getToS(site_logic.getSingleton()):\n return agrees_to_tos\n\n # Site settings specify a site-wide ToS, so agreement is *required*\n if agrees_to_tos:\n return True\n\n # there was no agreement made so raise an error\n raise forms.ValidationError(\n 'The site-wide Terms of Service must be accepted to participate'\n ' on this site.')\n\n return wrapper", "def test_optout_course(self):\r\n url = reverse('change_email_settings')\r\n # This is a checkbox, so on the post of opting out (that is, an Un-check of the box),\r\n # the Post that is sent will not contain 'receive_emails'\r\n response = self.client.post(url, {'course_id': self.course.id.to_deprecated_string()})\r\n self.assertEquals(json.loads(response.content), {'success': True})\r\n\r\n self.client.logout()\r\n\r\n self.client.login(username=self.instructor.username, password=\"test\")\r\n self.navigate_to_email_view()\r\n\r\n test_email = {\r\n 'action': 'Send email',\r\n 'send_to': 'all',\r\n 'subject': 'test subject for all',\r\n 'message': 'test message for all'\r\n }\r\n response = self.client.post(self.send_mail_url, test_email)\r\n self.assertEquals(json.loads(response.content), self.success_content)\r\n\r\n # Assert that self.student.email not in mail.to, outbox should be empty\r\n self.assertEqual(len(mail.outbox), 0)", "def soft_assert_cannot_make_proposal(info_page, soft_assert):\n soft_assert.expect(not info_page.is_propose_changes_btn_exists,\n \"'Propose Changes' button should not be displayed.\")", "def test_accept_all_terms_optional(api, account, given_terms):\n api.terms.get_all_terms.return_value = given_terms\n api.terms.accept_terms.reset_mock()\n account.accept_all_terms(optional=True)\n api.terms.get_required_terms.assert_not_called()\n api.terms.get_all_terms.assert_called()\n\n call_count = custom_st.count_terms(given_terms)\n assert api.terms.accept_terms.call_count == call_count", "def test_approve_agreement(self):\n pass", "def test_create_warranty(self):\n pass", "def test_not_accept(mocker, client, application, decision, should_send_email):\n order = create_test_order(application, 123, fulfilled=False)\n\n data = {\"req_reference_number\": make_reference_id(order), \"decision\": decision}\n mocker.patch(\n \"ecommerce.views.IsSignedByCyberSource.has_permission\", return_value=True\n )\n send_email = mocker.patch(\"ecommerce.api.MailgunClient.send_individual_email\")\n resp = client.post(reverse(\"order-fulfillment\"), data=data)\n assert resp.status_code == statuses.HTTP_200_OK\n assert len(resp.content) == 0\n order.refresh_from_db()\n assert Order.objects.count() == 1\n assert order.status == Order.FAILED\n\n if should_send_email:\n assert send_email.call_count == 1\n assert send_email.call_args[0] == (\n \"Order fulfillment failed, decision={decision}\".format(\n decision=\"something else\"\n ),\n \"Order fulfillment failed for order {order}\".format(order=order),\n \"ecommerce@example.com\",\n )\n else:\n assert send_email.call_count == 0", "def test_out_of_date(self):\n self.assertTrue(update_available(0.0))", "def test_open_no_setup(restaurant_only, hall_only, kitchen_only, delivery_only):\n # Here checks not all variants, cause restaurant_only is not isolated\n # object. They were previously check and working alongside\n # but affects result if together.\n\n # no setups\n with pytest.raises(CustomWarning):\n restaurant_only.open()\n assert restaurant_only.is_working is False, \"You need to setup Kitchen, Delivery and Hall\"\n\n # only kitchen\n with pytest.raises(CustomWarning):\n restaurant_only.set_kitchen(kitchen_only)\n restaurant_only.open()\n assert restaurant_only.is_working is False, \"You need to setup Kitchen, Delivery and Hall\"\n\n # only delivery and kitchen\n with pytest.raises(CustomWarning):\n restaurant_only.set_delivery(delivery_only)\n restaurant_only.set_kitchen(kitchen_only)\n restaurant_only.open()\n assert restaurant_only.is_working is False, \"You need to setup Kitchen, Delivery and Hall\"", "def test_negative_is_active_of_homework():\n assert not expired_hw.is_active()", "def violated(self) -> bool:\n ...", "def pops_agree(x):\n return len(x.all_open_closed) == 1", "def test_kyc_post_legal(self):\n pass", "def should_ask_if_examiner_want_to_give_another_chance(self):\n if self.assignment.is_electronic:\n return (self.delivery_status == \"corrected\" and not self.feedback.is_passing_grade) \\\n or self.delivery_status == 'closed-without-feedback'\n else:\n return False", "def test_individual_requirements(self):\n form_data = self.form_data(\n clear=['payer_name'], organization_name='Big Corporation',\n organization_contact='Mr A. Suit')\n form = DonationPaymentForm(data=form_data)\n self.assertFalse(form.is_valid())\n\n form_data['payer_name'] = 'Bob'\n form = DonationPaymentForm(data=form_data)\n self.assertTrue(form.is_valid())\n self.assertFalse('organization_name' in form.cleaned_data)\n self.assertFalse('organization_contact' in form.cleaned_data)", "def test_email_warnings_only_sent_for_unpaid(self, mock_tz):\n mock_tz.now.return_value = datetime(\n 2015, 2, 11, 10, 0, tzinfo=dt_timezone.utc\n )\n\n # payment_due_date 2015/2/11 23:59 (within 24hrs - warnings sent)\n ticketed_event = baker.make_recipe(\n 'booking.ticketed_event_max10',\n date=datetime(2015, 2, 14, 18, 0, tzinfo=dt_timezone.utc),\n payment_open=True,\n ticket_cost=10,\n payment_due_date=datetime(2015, 2, 11, tzinfo=dt_timezone.utc),\n )\n\n baker.make(\n TicketBooking, ticketed_event=ticketed_event, paid=False,\n date_booked=datetime(2015, 2, 1, 0, 0, tzinfo=dt_timezone.utc),\n _quantity=5,\n )\n baker.make(\n TicketBooking, ticketed_event=ticketed_event, paid=True,\n date_booked=datetime(2015, 2, 1, 0, 0, tzinfo=dt_timezone.utc),\n _quantity=5,\n )\n _add_user_email_addresses(TicketBooking)\n for ticket_booking in TicketBooking.objects.all():\n baker.make(Ticket, ticket_booking=ticket_booking)\n\n management.call_command('email_ticket_booking_warnings')\n self.assertEqual(len(mail.outbox), 5)\n for ticket_booking in TicketBooking.objects.filter(paid=False):\n self.assertTrue(ticket_booking.warning_sent)\n for ticket_booking in TicketBooking.objects.filter(paid=True):\n self.assertFalse(ticket_booking.warning_sent)", "def test_legal_address_is_complete(empty_field):\n address = LegalAddressFactory.create(country=\"US\")\n if empty_field:\n setattr(address, empty_field, \"\")\n assert address.is_complete is False\n else:\n assert address.is_complete is True", "def test_accept_all_terms_required(api, account, given_terms):\n api.terms.get_required_terms.return_value = given_terms\n api.terms.accept_terms.reset_mock()\n account.accept_all_terms()\n api.terms.get_required_terms.assert_called()\n api.terms.get_all_terms.assert_not_called()\n\n call_count = custom_st.count_terms(given_terms)\n assert api.terms.accept_terms.call_count == call_count", "def test_deny_pending_payment(self):\n pass", "def test_reject_proposal_demand(self):\n pass", "def test_dont_cancel_bookings_within_cancellation_period_without_warning_sent(self, mock_tz):\n mock_tz.now.return_value = datetime(2015, 2, 10, 18, 0, tzinfo=dt_timezone.utc)\n # reset warning flags\n self.unpaid.warning_sent = False\n self.unpaid.date_warning_sent = None\n self.unpaid.save()\n\n self.assertEqual(self.unpaid.status, 'OPEN')\n self.assertFalse(self.unpaid.warning_sent)\n self.assertIsNone(self.unpaid.date_warning_sent)\n management.call_command('cancel_unpaid_bookings')\n self.unpaid.refresh_from_db()\n # still open\n self.assertEqual(self.unpaid.status, 'OPEN')\n\n # set the warning sent flag to < 2hrs ago\n self.unpaid.warning_sent = True\n self.unpaid.date_warning_sent = datetime(2015, 2, 10, 17, 0, tzinfo=dt_timezone.utc)\n self.unpaid.save()\n management.call_command('cancel_unpaid_bookings')\n self.unpaid.refresh_from_db()\n # still open\n self.assertEqual(self.unpaid.status, 'OPEN')\n\n # set the warning sent flag to > 2hrs ago\n self.unpaid.warning_sent = True\n self.unpaid.date_warning_sent = datetime(2015, 2, 10, 15, 0, tzinfo=dt_timezone.utc)\n self.unpaid.save()\n management.call_command('cancel_unpaid_bookings')\n self.unpaid.refresh_from_db()\n # now cancelled\n self.assertEqual(self.unpaid.status, 'CANCELLED')", "def test_corporate_approval_allowed(self):\n p = self.make('Prescription')\n self.assertFalse(p.can_corporate_approve)\n self.assertFalse(p.has_corporate_approval)\n\n fields = ['priority', 'location', 'perimeter', 'area', 'last_season',\n 'last_year', 'treatment_percentage', 'allocation']\n for field in fields:\n self.set_cbas_attributes(p, exclude_fields=[field])\n self.assertFalse(p.can_corporate_approve)\n self.set_cbas_attributes(p)\n self.assertTrue(p.can_corporate_approve)", "def test_dont_cancel_bookings_in_cancellation_period_if_warning_not_sent(self, mock_tz):\n mock_tz.now.return_value = datetime(2015, 2, 11, 12, 0, tzinfo=dt_timezone.utc)\n\n # self.ticketed_event payment due date 2015/2/11 23:59\n\n unpaid_no_warning = baker.make(\n TicketBooking,\n ticketed_event=self.ticketed_event,\n paid=False,\n date_booked=datetime(2015, 2, 10, 5, 30, tzinfo=dt_timezone.utc),\n warning_sent=False\n )\n unpaid_warning_within_2_hrs = baker.make(\n TicketBooking,\n ticketed_event=self.ticketed_event,\n paid=False,\n date_booked=datetime(2015, 2, 10, 5, 30, tzinfo=dt_timezone.utc),\n warning_sent=True,\n date_warning_sent=datetime(2015, 2, 11, 10, 30, tzinfo=dt_timezone.utc),\n )\n unpaid_warning_more_than_2_hrs_ago = baker.make(\n TicketBooking,\n ticketed_event=self.ticketed_event,\n paid=False,\n date_booked=datetime(2015, 2, 10, 5, 30, tzinfo=dt_timezone.utc),\n warning_sent=True,\n date_warning_sent=datetime(2015, 2, 11, 9, 30, tzinfo=dt_timezone.utc),\n )\n\n self.assertFalse(unpaid_no_warning.cancelled)\n self.assertFalse(unpaid_warning_within_2_hrs.cancelled)\n self.assertFalse(unpaid_warning_more_than_2_hrs_ago.cancelled)\n\n management.call_command('cancel_unpaid_ticket_bookings')\n unpaid_no_warning.refresh_from_db()\n unpaid_warning_within_2_hrs.refresh_from_db()\n unpaid_warning_more_than_2_hrs_ago.refresh_from_db()\n self.assertFalse(unpaid_no_warning.cancelled)\n self.assertFalse(unpaid_warning_within_2_hrs.cancelled)\n self.assertTrue(unpaid_warning_more_than_2_hrs_ago.cancelled)", "def test_check_inputs_hospital_prefs_all_nonempty(game):\n\n hospital = game.hospitals[0]\n hospital.prefs = []\n\n with pytest.warns(PlayerExcludedWarning) as record:\n game._check_inputs_player_prefs_nonempty(\"hospitals\", \"residents\")\n\n assert len(record) == 1\n assert hospital.name in str(record[0].message)\n\n if game.clean:\n assert hospital not in game.hospitals", "def test_non_contractor_acks_receipt(self):\n res = self.client.post(self.url)\n self.assertEqual(res.status_code, 403)", "def test_noTicket():\n assert testUser1.buyTicket(None) == False", "def test_falsepositive(client):\n g.test_authorized_for = []\n res = client.get(\"/v0/falsepositive\" + get_request_args)\n assert \"Thanks! We’ve marked this as a false positive\" in res.data.decode(\"utf-8\")", "def test_ensure_state_untouched_if_not_necessary(self, setState):\n advisory = errata.Advisory(errata_id=123, errata_state='QE')\n advisory.ensure_state('QE')\n setState.assert_not_called()", "def test_get_agreement(self):\n pass", "def test_no_email_to_studio_if_setting_not_on(self, mock_tz):\n mock_tz.now.return_value = datetime(2015, 2, 11, 10, tzinfo=dt_timezone.utc)\n for i in range(5):\n baker.make(\n TicketBooking, ticketed_event=self.ticketed_event,\n cancelled=False, paid=False,\n user__email=\"unpaid_user{}@test.com\".format(i),\n date_booked= datetime(2015, 2, 9, tzinfo=dt_timezone.utc),\n warning_sent=True,\n date_warning_sent= datetime(2015, 2, 9, tzinfo=dt_timezone.utc),\n )\n for booking in TicketBooking.objects.all():\n baker.make(Ticket, ticket_booking=booking)\n\n management.call_command('cancel_unpaid_ticket_bookings')\n # emails are sent to user per cancelled booking (6) (these 5 plus\n # self.unpaid); none to studio\n self.assertEqual(len(mail.outbox), 6)\n cancelled_booking_emails = [\n booking.user.email for booking\n in TicketBooking.objects.filter(cancelled=True)\n ]\n self.assertEqual(\n cancelled_booking_emails, [email.to[0] for email in mail.outbox]\n )", "def test_create_consent_fail_on_incorrect_status(client, session, tokens):\n data = {\n \"type\": \"cookie\",\n \"category\": \"strictly_necessary\",\n \"status\": \"akcepted\",\n }\n response = client.post(\n \"/consent\",\n json=data,\n headers={\"Authorization\": f\"Bearer {tokens['write']}\"},\n )\n assert response.status_code == 422", "def test_buyTicket_NotForSale():\n old_venue_balance = testVenue.wallet\n assert not testUser2.buyTicket(testTicket2)\n assert testTicket2 not in testUser2.inventory\n assert not testTicket1.for_sale\n assert testUser2.wallet == 500\n assert testVenue.wallet == old_venue_balance", "def test_email_warnings_only_sent_for_payment_not_confirmed(self, mock_tz):\n mock_tz.now.return_value = datetime(2015, 2, 10, 10, tzinfo=dt_timezone.utc)\n # cancellation period starts 2015/2/13 17:00\n # payment_due_date 2015/2/11 23:59\n event = baker.make_recipe(\n 'booking.future_EV',\n date=datetime(2015, 2, 13, 18, 0, tzinfo=dt_timezone.utc),\n payment_open=True,\n cost=10,\n payment_due_date=datetime(2015, 2, 11, tzinfo=dt_timezone.utc),\n cancellation_period=1)\n baker.make_recipe(\n 'booking.booking', event=event, paid=False,\n payment_confirmed=False,\n date_booked=datetime(2015, 2, 9, 19, 30, tzinfo=dt_timezone.utc),\n _quantity=3,\n )\n baker.make_recipe(\n 'booking.booking', event=event, paid=True,\n payment_confirmed=True,\n date_booked=datetime(2015, 2, 9, 19, 30, tzinfo=dt_timezone.utc),\n _quantity=3,\n )\n _add_user_email_addresses(Booking)\n \n management.call_command('email_warnings')\n self.assertEqual(len(mail.outbox), 3)\n for booking in Booking.objects.filter(payment_confirmed=False):\n self.assertTrue(booking.warning_sent)\n for booking in Booking.objects.filter(payment_confirmed=True):\n self.assertFalse(booking.warning_sent)", "def test_kyc_get_validation_legal(self):\n pass", "def test_may_certify(self):\r\n self.assertTrue(self.past_show_certs.may_certify())\r\n self.assertTrue(self.past_noshow_certs.may_certify())\r\n self.assertTrue(self.future_show_certs.may_certify())\r\n self.assertFalse(self.future_noshow_certs.may_certify())", "def test_verification_status_invisible(self):\r\n self.client.login(username=\"jack\", password=\"test\")\r\n self.check_verification_status_off('verified', 'You\\'re enrolled as a verified student')\r\n self.check_verification_status_off('honor', 'You\\'re enrolled as an honor code student')\r\n self.check_verification_status_off('audit', 'You\\'re auditing this course')", "def test_dont_cancel_rebookings_within_cancellation_period_without_warning_sent(self, mock_tz):\n mock_tz.now.return_value = datetime(2015, 2, 10, 18, 0, tzinfo=dt_timezone.utc)\n # cancel booking to reset warning flags\n self.unpaid.status = \"CANCELLED\"\n self.unpaid.save()\n # rebook\n self.unpaid.status = \"OPEN\"\n self.unpaid.date_rebooked = datetime(2015, 2, 10, 12, 30, tzinfo=dt_timezone.utc)\n self.unpaid.save()\n\n self.assertEqual(self.unpaid.status, 'OPEN')\n self.assertFalse(self.unpaid.warning_sent)\n self.assertIsNone(self.unpaid.date_warning_sent)\n management.call_command('cancel_unpaid_bookings')\n self.unpaid.refresh_from_db()\n # still open\n self.assertEqual(self.unpaid.status, 'OPEN')\n\n # set the warning sent flag to < 2hrs ago\n self.unpaid.warning_sent = True\n self.unpaid.date_warning_sent = datetime(2015, 2, 10, 17, 0, tzinfo=dt_timezone.utc)\n self.unpaid.save()\n management.call_command('cancel_unpaid_bookings')\n self.unpaid.refresh_from_db()\n # still open\n self.assertEqual(self.unpaid.status, 'OPEN')\n\n # set the warning sent flag to > 2hrs ago\n self.unpaid.warning_sent = True\n self.unpaid.date_warning_sent = datetime(2015, 2, 10, 15, 0, tzinfo=dt_timezone.utc)\n self.unpaid.save()\n management.call_command('cancel_unpaid_bookings')\n self.unpaid.refresh_from_db()\n # now cancelled\n self.assertEqual(self.unpaid.status, 'CANCELLED')", "def test_is_active_active_not_between(self) -> None:\n today = date.today()\n start_date = today - timedelta(days=today.weekday() - 2)\n end_date = today - timedelta(days=today.weekday() - 1)\n mode = HolidayMode(True, start_date, end_date, 15)\n self.assertFalse(mode.is_applied)", "def test_unsuccessful_verification(self):\n for i in (-4, -3, 3, 4):\n description = \"TOTP verified for `i={0}`\".format(i)\n calculated = self.algorithm.calculate(self.device.secret, drift=i)\n confirmed = self.relate.verify(calculated, save=False)\n\n self.assertFalse(confirmed, description)\n\n self.relate.confirm = False", "def test_manage_tos_field(self):\n name = self.form.name_for_tos or 'tos_field'\n initial_is_off = self.form.tos_required is False\n found = self.form.fields.get(name, None)\n original_critical = deepcopy(self.form.critical_fields)\n self.form.tos_required = True\n expected = deepcopy(original_critical)\n name = getattr(self.form, 'name_for_tos', None) or ''\n tos_opts = {'names': (name, ), 'alt_field': 'tos_field', 'computed': False}\n tos_opts.update({'name': 'tos_field', 'field': self.form_class.tos_field})\n expected.update(name_for_tos=tos_opts)\n initial_kwargs = {}\n returned_kwargs = self.form.setup_critical_fields(**initial_kwargs)\n actual = self.form.critical_fields\n\n self.assertTrue(initial_is_off)\n self.assertIsNone(found)\n self.assertDictEqual(initial_kwargs, returned_kwargs)\n self.assertDictEqual(expected, actual)\n\n self.form.fields.pop('tos_field', None)\n self.form.tos_required = False\n self.form.critical_fields = original_critical\n reset_kwargs = self.form.setup_critical_fields(**initial_kwargs)\n self.assertDictEqual({}, reset_kwargs)", "def test_no_email_to_studio_if_setting_not_on(self, mock_tz):\n mock_tz.now.return_value = datetime(2015, 2, 10, 10, tzinfo=dt_timezone.utc)\n for i in range(5):\n baker.make_recipe(\n 'booking.booking', event=self.event,\n status='OPEN', paid=False,\n payment_confirmed=False,\n user__email=\"unpaid_user{}@test.com\".format(i),\n date_booked= datetime(2015, 2, 9, tzinfo=dt_timezone.utc),\n warning_sent=True,\n date_warning_sent= datetime(2015, 2, 9, 2, tzinfo=dt_timezone.utc),\n )\n\n management.call_command('cancel_unpaid_bookings')\n # emails are sent to user per cancelled booking (6); none to studio\n self.assertEqual(len(mail.outbox), 6)\n cancelled_booking_emails = [\n [booking.user.email] for booking\n in Booking.objects.filter(status='CANCELLED')\n ]\n self.assertEqual(\n sorted(cancelled_booking_emails),\n sorted([email.to for email in mail.outbox])\n )", "def test_ticket_not_consumed(self):\n st = ServiceTicketFactory()\n self.assertFalse(st.is_consumed())", "def test_can_not_exceed_quota(self):\n create_test_booking(self.user, self.first_day, 8, facility='g')\n create_test_booking(self.user, self.first_day, 9, facility='0')\n create_test_booking(self.user, self.first_day, 10, facility='g')\n create_test_booking(self.user, self.first_day, 11, facility='h')\n create_test_booking(self.user, self.first_day, 12, facility='h')\n create_test_booking(self.user, self.first_day, 13, facility='g')\n create_test_booking(self.user, self.first_day, 14, facility='x')\n create_test_booking(self.user, self.first_day, 15, facility='y')\n create_test_booking(self.user, self.first_day, 16, facility='g')\n create_test_booking(self.user, self.first_day, 17, facility='g')\n\n date = datetime(2030, 1, 1, 8)\n\n response = self.client.post(\n reverse('bookings', kwargs={'facility': 'g'}), {'book': str(date.timestamp())})\n\n context = response.context\n bookings = context[\"bookings\"]\n\n self.assertEqual(response.status_code, 403)\n self.assertEqual(context[\"quota\"], 0)\n self.assertEqual(type(context[\"info\"]), QuotaExceededAlert)", "def assert_false(received,message=None):\n if (received):\n if message is None:\n message = 'assert_false: %s evaluates to True' % repr(received)\n quit_with_error(message)", "def test_dont_cancel_for_events_with_no_cost(self, mock_tz):\n mock_tz.now.return_value = datetime(\n 2015, 2, 10, 10, tzinfo=dt_timezone.utc\n )\n self.event.cost = 0\n self.event.save()\n self.assertEqual(\n self.unpaid.status, 'OPEN', self.unpaid.status\n )\n self.assertEqual(\n self.paid.status, 'OPEN', self.paid.status\n )\n management.call_command('cancel_unpaid_bookings')\n # emails are sent to user per cancelled booking and studio once for all\n # cancelled bookings\n unpaid_booking = Booking.objects.get(id=self.unpaid.id)\n paid_booking = Booking.objects.get(id=self.paid.id)\n self.assertEqual(len(mail.outbox), 0)\n self.assertEqual(\n unpaid_booking.status, 'OPEN', unpaid_booking.status\n )\n self.assertEqual(\n paid_booking.status, 'OPEN', paid_booking.status\n )\n\n # auto_cancelled set to True only on cancelled bookings\n self.assertFalse(unpaid_booking.auto_cancelled)\n self.assertFalse(paid_booking.auto_cancelled)", "def test_inactive_account(self):", "def test_dont_cancel_for_events_with_no_cost(self, mock_tz):\n mock_tz.now.return_value = datetime(\n 2015, 2, 11, 10, tzinfo=dt_timezone.utc\n )\n self.ticketed_event.ticket_cost = 0\n self.ticketed_event.save()\n self.assertFalse(self.unpaid.cancelled)\n self.assertFalse(self.paid.cancelled)\n\n management.call_command('cancel_unpaid_ticket_bookings')\n # emails are sent to user per cancelled booking and studio once for all\n # cancelled bookings\n self.unpaid.refresh_from_db()\n self.paid.refresh_from_db()\n self.assertEqual(len(mail.outbox), 0)\n self.assertFalse(self.unpaid.cancelled)\n self.assertFalse(self.paid.cancelled)", "def test_validate_ticker_false(self):\n ticker = 'xxx'\n result = stock_helper.validate_ticker(ticker)\n self.assertEqual(result, False)", "def _assert_cases_state(self, closed=False):\n assert_method = self.assertTrue if closed else self.assertFalse\n for model in AttendeeModel.objects.by_domain(self.domain, include_closed=True):\n assert_method(model.case.closed)", "def test_is_active_active_no_dates(self) -> None:\n mode = HolidayMode(True, None, None, None)\n self.assertFalse(mode.is_applied)", "def testConditionChecking(self):\n\n state = State.from_problem(self.prob)\n \n drive = self.dom.get_action(\"drive\")\n with drive.instantiate([\"agent\", \"tru1\", \"apt1\"], self.prob):\n self.assert_(state.is_satisfied(drive.precondition))\n\n with drive.instantiate([\"agent\", \"tru1\", \"apt2\"], self.prob):\n self.assertFalse(state.is_satisfied(drive.precondition))", "def _assert_escalation_email_available(self, available):\n func = self.assertIn if available else self.assertNotIn\n response = self.client.get(self.url)\n func('escalation-email-container', response.content.decode('utf-8'))", "def test_early_out_withdrawal(self):\n with FakeClock(TIME_1):\n response = self.send_post(\"Participant\", self.participant)\n participant_id = response[\"participantId\"]\n response[\"providerLink\"] = [self.provider_link_2]\n response[\"withdrawalStatus\"] = \"EARLY_OUT\"\n response[\"withdrawalTimeStamp\"] = 1563907344169\n response[\"suspensionStatus\"] = \"NOT_SUSPENDED\"\n response[\"withdrawalReason\"] = \"TEST\"\n response[\"withdrawalReasonJustification\"] = \"This was a test account.\"\n path = \"Participant/%s\" % participant_id\n self.send_put(path, response, headers={\"If-Match\": 'W/\"1\"'})\n participant = self.send_get(path)\n self.assertEqual(participant[\"withdrawalStatus\"], \"EARLY_OUT\")\n self.assertEqual(participant[\"withdrawalTime\"], '2018-01-01T00:00:00')\n self.assertEqual(participant[\"withdrawalAuthored\"], '2019-07-23T18:42:24')", "def test_43_terms_of_use_and_data(self):\r\n res = self.app.get('account/signin', follow_redirects=True)\r\n assert \"/help/terms-of-use\" in res.data, res.data\r\n assert \"http://opendatacommons.org/licenses/by/\" in res.data, res.data\r\n\r\n res = self.app.get('account/register', follow_redirects=True)\r\n assert \"http://okfn.org/terms-of-use/\" in res.data, res.data\r\n assert \"http://opendatacommons.org/licenses/by/\" in res.data, res.data", "def test_check_is_required(fake_check):\n assert fake_check.is_required()", "def test_ask_yes_no_2(self, input_mock):\n response = basic.ask_yes_no()\n self.assertFalse(response)", "def test_check_inputs_resident_prefs_all_nonempty(game):\n\n resident = game.residents[0]\n resident.prefs = []\n\n with pytest.warns(PlayerExcludedWarning) as record:\n game._check_inputs_player_prefs_nonempty(\"residents\", \"hospitals\")\n\n assert len(record) == 1\n assert resident.name in str(record[0].message)\n\n if game.clean:\n assert resident not in game.residents", "def check(self,):\n self.is_valid_according_policy()", "def test_kyc_get_legal(self):\n pass", "def test_is_approved_no_approval(time_record_factory):\n record = time_record_factory()\n\n assert not record.is_approved", "def test_validate_no_offices():\n conversion_json = copy.deepcopy(BEN_CONVERSION)\n del conversion_json['offices']\n\n is_valid, errors = validate(conversion_json, 'conversion')\n\n if errors:\n for err in errors:\n print(err.message)\n print(errors)\n\n assert not is_valid", "def agreements():\n pass", "def clean_tos(self):\r\n if self.cleaned_data.get('tos', False):\r\n return self.cleaned_data['tos']\r\n raise forms.ValidationError(_(u'You must agree to the terms to register'))", "def test_has_enough_money_handles_insufficient_funds(self):\n # Params\n f_money_collected = 2.00\n f_chocolate_price = 2.25\n\n # Returns\n return_1 = 'Insufficient funds... Dispensing coins inserted.\\n'\n\n # Calls\n string_1 = has_enough_money(f_money_collected, f_chocolate_price)\n\n # Asserts\n self.assertEqual(string_1, return_1)", "def assert_true(received,message=None):\n if (not received):\n if message is None:\n message = 'assert_true: %s evaluates to False' % repr(received)\n quit_with_error(message)", "def test_yes_option_disabled(\n self, wait_tx_settled_mock, confirm_mock, do_transfer_mock\n ):\n password_option = self.get_password_args(self.PASSWORD)\n self.invoke(\n \"transfer\",\n self.LEDGER_ID,\n self.get_address(self.LEDGER_ID, self.PASSWORD),\n \"100000\",\n \"100\",\n *password_option,\n )\n confirm_mock.assert_called_once()", "def test_email_warnings_sent_if_no_payment_due_date(self, mock_tz):\n mock_tz.now.return_value = datetime(\n 2015, 2, 11, 19, 0, tzinfo=dt_timezone.utc\n )\n\n # cancellation period starts 2015/2/13 18:00\n # payment_due_date None\n event = baker.make_recipe(\n 'booking.future_EV',\n date=datetime(2015, 2, 14, 18, 0, tzinfo=dt_timezone.utc),\n payment_open=True,\n cost=10,\n payment_due_date=None,\n cancellation_period=24)\n\n baker.make_recipe(\n 'booking.booking', event=event, paid=False,\n payment_confirmed=False,\n date_booked=datetime(2015, 2, 11, 14, 30, tzinfo=dt_timezone.utc),\n _quantity=5,\n )\n _add_user_email_addresses(Booking)\n management.call_command('email_warnings')\n self.assertEqual(len(mail.outbox), 5)", "def test_show_correctness_never(self, has_staff_access):\n assert not ShowCorrectness.correctness_available(show_correctness=ShowCorrectness.NEVER,\n has_staff_access=has_staff_access)", "def _check(self):\n assert isinstance(self._price, int)\n assert self._price >= 0\n assert isinstance(self._units, int)\n assert self._units > 0\n assert self._side == OrderSide.BUY or self._side == OrderSide.SELL\n assert self._type == OrderType.LIMIT or self._type == OrderType.CANCEL\n assert isinstance(self._market, int)\n assert self._market > 0", "def verify_report_cancellation(self):\n if self.pci_compliance_table_empty:\n return True\n else:\n raise AssertionError(\"ReportsPciCompliancePage: Report generated, cancel did not work. Traceback: %s\" %traceback.format_exc())", "def test_organization_requirements(self):\n form_data = self.form_data(is_org=True)\n form = DonationPaymentForm(data=form_data)\n self.assertFalse(form.is_valid())\n\n form_data['organization_name'] = 'Org org'\n form = DonationPaymentForm(data=form_data)\n self.assertFalse(form.is_valid())\n form_data['organization_contact'] = 'Contact'\n form = DonationPaymentForm(data=form_data)\n self.assertTrue(form.is_valid())\n self.assertFalse('payer_name' in form.cleaned_data)", "def test_review_is_done(self):\n self.assertFalse(processes.review_is_done(None))\n self.assertFalse(processes.review_is_done(0))\n self.assertFalse(processes.review_is_done(models.REVIEW_PENDING))\n self.assertFalse(processes.review_is_done(models.REVIEW_ISSUES_OPEN))\n self.assertTrue(processes.review_is_done(models.REVIEW_ISSUES_ADDRESSED))\n self.assertTrue(processes.review_is_done(models.REVIEW_NA))", "def test_environmental_impact_compliance():\n emissions = 12000\n legal_limit = 300\n assert emissions < legal_limit", "def test_update_condition_false(self):\n original_alt_info = getattr(self.form, 'alt_field_info', None)\n expected_label = 'alt_test_feature'\n test_method = getattr(self.form, 'condition_' + expected_label, None)\n alt_info = getattr(self, 'alt_field_info', None)\n expected = {}\n self.form.alt_field_info = alt_info\n self.form.test_condition_response = False\n actual = self.form.get_alt_field_info()\n\n self.assertIsNotNone(alt_info)\n self.assertIsNotNone(test_method)\n self.assertFalse(test_method())\n self.assertIsNotNone(expected)\n self.assertIn(expected_label, alt_info)\n self.assertEqual(expected, actual)\n\n self.form.test_condition_response = False\n self.form.alt_field_info = original_alt_info\n if original_alt_info is None:\n del self.form.alt_field_info", "def test_subscription_transaction_declined_suspend(self):\n\n # We'll create a plan that starts here\n start_date = dt.date(2019, 1, 1)\n\n # And the trial date ends here too\n trial_end_date = dt.date(2019, 1, 1)\n\n # The customer will use some metered features here\n metered_usage_on = dt.date(2019, 1, 10)\n\n # Docs will be generated to bill here.\n prev_billing_date = dt.date(2019, 1, 3)\n\n # So, the customer grace period ends here.\n # First billing interval: dt.date(2019, 2, 1)\n # \n billing_grace_exp = dt.date(2019, 2, 3)\n\n # The next billing check should discover that the subscription\n # is unpaid.\n # Billing due date is: dt.date(2019, 2, 6)\n # With the grace period: dt.date(2019, 2, 9)\n # \n billing_post_grace_check = dt.date(2019, 2, 10)\n\n # Create a customer\n #\n customer = CustomerFactory.create(sales_tax_percent=Decimal('0.00'),\n payment_due_days=3)\n PaymentMethodFactory.create(\n payment_processor=triggered_processor, customer=customer,\n canceled=False,\n verified=True,\n )\n\n # Create a metered feature\n #\n mf_price = Decimal('2.5')\n metered_feature = MeteredFeatureFactory(\n included_units_during_trial=Decimal('0.00'),\n price_per_unit=mf_price)\n currency = 'USD'\n\n # Crate a plan with metered features. Generate the invoice after\n # the 5 day trial period, the plan will be billed every 30 days.\n # \n generate_after = 5\n plan = PlanFactory.create(interval=Plan.INTERVALS.DAY,\n interval_count=30,\n generate_after=generate_after,\n enabled=True,\n amount=Decimal('20.00'),\n trial_period_days=1,\n metered_features=[metered_feature],\n currency=currency)\n\n # Subscribe the customer\n #\n subscription = SubscriptionFactory.create(\n plan=plan, start_date=start_date, customer=customer)\n subscription.activate()\n subscription.save()\n\n # Log some metered usage\n consumed_1 = Decimal('5.00')\n consumed_2 = Decimal('5.00')\n mf_log = MeteredFeatureUnitsLogFactory.create(\n subscription=subscription,\n metered_feature=metered_feature,\n start_date=metered_usage_on,\n end_date=subscription.trial_end,\n consumed_units=consumed_1)\n\n # Generate the docs\n call_command('generate_docs',\n billing_date=prev_billing_date,\n stdout=self.output)\n\n proforma = Proforma.objects.first()\n\n assert proforma.proforma_entries.count() != 0\n assert Subscription.objects.all().count() == 1\n assert Invoice.objects.all().count() == 0\n assert Proforma.objects.all()[0].total > Decimal('0.00')\n\n # Consume more units\n mf_log.consumed_units += consumed_2\n mf_log.save()\n\n call_command('generate_docs',\n billing_date=billing_grace_exp,\n stdout=self.output)\n\n assert Proforma.objects.all().count() != 0\n assert Invoice.objects.all().count() == 0\n\n for pf in Proforma.objects.all():\n # # Issue the proforma to generate transactions\n # proforma = Proforma.objects.all()[1]\n pf.issue()\n pf.save()\n\n self.assertEqual(pf.state, Proforma.STATES.ISSUED)\n # Fail the transaction\n for tx in pf.transactions:\n # tx = proforma.transactions[0]\n tx.fail()\n tx.save()\n self.assertEqual(tx.state, Transaction.States.Failed)\n\n assert Transaction.objects.all().count() != 0\n\n call_command('check_subscriptions',\n billing_date=billing_post_grace_check,\n stdout=self.output)\n\n subscr = Subscription.objects.first()\n\n # Scan for subscriptions with unpaid documents\n logging.debug(\"subscr %s\" % subscr)\n self.assertEqual(subscr.state, Subscription.STATES.CANCELED)", "def test_non_thesis(non_thesis):\n assert non_thesis is None", "def test_kyc_post_legal_share_holder(self):\n pass", "def test_email_warnings_only_sent_for_open_bookings(self, mock_tz):\n mock_tz.now.return_value = datetime(\n 2015, 2, 10, 10, tzinfo=dt_timezone.utc\n )\n event = baker.make_recipe(\n 'booking.future_EV',\n date=datetime(2015, 2, 13, 18, 0, tzinfo=dt_timezone.utc),\n payment_open=True,\n cost=10,\n payment_due_date=datetime(2015, 2, 11, tzinfo=dt_timezone.utc),\n cancellation_period=1)\n baker.make_recipe(\n 'booking.booking', event=event, paid=False,\n payment_confirmed=False, status='OPEN',\n date_booked=datetime(2015, 2, 9, 19, 30, tzinfo=dt_timezone.utc),\n _quantity=3,\n )\n baker.make_recipe(\n 'booking.booking', event=event, paid=False,\n payment_confirmed=False, status='CANCELLED',\n date_booked=datetime(2015, 2, 9, 19, 30, tzinfo=dt_timezone.utc),\n _quantity=3,\n )\n _add_user_email_addresses(Booking)\n \n management.call_command('email_warnings')\n self.assertEqual(len(mail.outbox), 3)\n for booking in Booking.objects.filter(status='OPEN'):\n self.assertTrue(booking.warning_sent)\n for booking in Booking.objects.filter(status='CANCELLED'):\n self.assertFalse(booking.warning_sent)", "def test_cancellations(self):\n self.assertEqual(self.meter * self.imeter, 1)\n self.assertEqual(self.second * self.isecond, 1)\n self.assertEqual(self.kgram * self.ikgram, 1)", "def test_valid_account_create_flag_off(self):\n ident_choice = UserIdentificationLabel.objects.get(slug=\"ident2\")\n form_data = {\n 'email': 'BamBam@Example.com',\n 'organization_name': 'transhealth',\n 'password1': 'BEDrocks@123',\n 'password2': 'BEDrocks@123',\n 'first_name': 'BamBam',\n 'last_name': 'Rubble',\n 'identification_choice': str(ident_choice.pk),\n }\n response = self.client.post(self.url, form_data, follow=True)\n self.assertEqual(response.status_code, 404)", "def no_payment_required(self):", "def test_is_affordable_off_chain(self):\n tx_message = TransactionMessage(\n performative=TransactionMessage.Performative.PROPOSE_FOR_SETTLEMENT,\n skill_callback_ids=[PublicId(\"author\", \"a_skill\", \"0.1.0\")],\n tx_id=self.tx_id,\n tx_sender_addr=self.tx_sender_addr,\n tx_counterparty_addr=self.tx_counterparty_addr,\n tx_amount_by_currency_id={\"FET\": -20},\n tx_sender_fee=0,\n tx_counterparty_fee=0,\n tx_quantities_by_good_id={\"good_id\": 10},\n ledger_id=\"off_chain\",\n info=self.info,\n tx_nonce=\"Transaction nonce\",\n )\n\n assert self.decision_maker._is_affordable(tx_message)", "def test_wip(self):\n self.assertTrue(not return_true())", "def check_observatory(self):\n assert self.observatory in ALL_OBSERVATORIES, \\\n \"Invalid observatory \" + repr(self.observatory) + \" in \" + repr(self.filename)", "def test_send_mail_unauthorized(self):\r\n\r\n response = self.client.post(\r\n self.url, {\r\n 'action': 'Send email',\r\n 'to_option': 'all',\r\n 'subject': \"Welcome to the course!\",\r\n 'message': \"Lets start with an introduction!\"\r\n }\r\n )\r\n self.assertContains(response, \"Email is not enabled for this course.\")", "def test_dont_cancel_for_events_in_the_past(self, mock_tz):\n mock_tz.now.return_value = datetime(\n 2016, 2, 10, 10, tzinfo=dt_timezone.utc\n )\n self.assertEqual(\n self.unpaid.status, 'OPEN', self.unpaid.status\n )\n self.assertEqual(\n self.paid.status, 'OPEN', self.paid.status\n )\n self.assertTrue(timezone.now() > self.event.date)\n management.call_command('cancel_unpaid_bookings')\n # emails are sent to user per cancelled booking and studio once\n # for all cancelled bookings\n unpaid_booking = Booking.objects.get(id=self.unpaid.id)\n paid_booking = Booking.objects.get(id=self.paid.id)\n self.assertEqual(len(mail.outbox), 0)\n self.assertEqual(\n unpaid_booking.status, 'OPEN', unpaid_booking.status\n )\n self.assertEqual(\n paid_booking.status, 'OPEN', paid_booking.status\n )\n\n # auto_cancelled set to True only on cancelled bookings\n self.assertFalse(unpaid_booking.auto_cancelled)\n self.assertFalse(paid_booking.auto_cancelled)", "def test_dont_cancel_for_already_cancelled(self, mock_tz):\n mock_tz.now.return_value = datetime(\n 2015, 2, 10, 10, tzinfo=dt_timezone.utc\n )\n self.unpaid.status = 'CANCELLED'\n self.unpaid.save()\n self.assertEqual(\n self.unpaid.status, 'CANCELLED', self.unpaid.status\n )\n management.call_command('cancel_unpaid_bookings')\n # emails are sent to user per cancelled booking and studio once\n # for all cancelled bookings\n unpaid_booking = Booking.objects.get(id=self.unpaid.id)\n self.assertEqual(len(mail.outbox), 0)\n self.assertEqual(\n unpaid_booking.status, 'CANCELLED', unpaid_booking.status\n )\n\n # auto_cancelled set to True only on cancelled bookings\n self.assertFalse(unpaid_booking.auto_cancelled)\n self.assertFalse(self.paid.auto_cancelled)", "def test_is_active_without_ops(self):\n\n self.veh.health = 2\n self.veh.operators = []\n self.assertFalse(self.veh.is_active)", "def test_available(self):\n feature_guard = _make_requires(True, \"Error text\")\n results = []\n\n @feature_guard\n def inner():\n results.append(True)\n return True\n\n assert inner() is True\n assert [True] == results", "def test_update_condition_not_defined(self):\n original_alt_info = getattr(self.form, 'alt_field_info', None)\n expected_label = 'alt_test_no_method'\n label_for_used_attrs = 'alt_test_feature'\n test_method = getattr(self.form, 'condition_' + expected_label, None)\n alt_info = getattr(self, 'alt_field_info', None)\n expected = alt_info.get(label_for_used_attrs, None)\n self.form.alt_field_info = alt_info\n self.form.test_condition_response = True\n actual = self.form.get_alt_field_info()\n\n self.assertIsNotNone(alt_info)\n self.assertIsNone(test_method)\n self.assertIsNotNone(expected)\n self.assertIn(expected_label, alt_info)\n self.assertEqual(expected, actual)\n\n self.form.test_condition_response = False\n self.form.alt_field_info = original_alt_info\n if original_alt_info is None:\n del self.form.alt_field_info", "def test_dont_cancel_for_already_cancelled(self, mock_tz):\n mock_tz.now.return_value = datetime(\n 2015, 2, 11, 10, tzinfo=dt_timezone.utc\n )\n self.unpaid.cancelled = True\n self.unpaid.save()\n\n management.call_command('cancel_unpaid_ticket_bookings')\n # emails are sent to user per cancelled booking and studio once\n # for all cancelled bookings\n self.unpaid.refresh_from_db()\n self.assertEqual(len(mail.outbox), 0)\n self.assertTrue(self.unpaid.cancelled)", "def test_un_member_status(self):\n\n # Get Hong Kong\n hong_kong = Country.objects.get(iso3=\"HKG\")\n\n # Assert that is_un_member_at is None\n self.assertEqual(hong_kong.is_un_member_at, None)\n\n # Initialize assertRaises block\n with self.assertRaises(ValidationError):\n\n # Set is UN member to True\n hong_kong.is_un_member = True\n\n # Attempt to clean the Hong Kong object\n hong_kong.clean()\n\n # Should fail because no corresponding is UN member at date is set\n\n # Get Thailand\n thailand = Country.objects.get(iso3=\"THA\")\n\n # Assert that Thailand is a UN member state\n self.assertEqual(thailand.is_un_member, True)\n\n # Assert that Thailand is UN member at is not None\n self.assertIsNotNone(thailand.is_un_member_at)\n\n # Set Thailand is UN member at to False\n thailand.is_un_member = False\n\n # Clean Thailand object\n thailand.clean()\n\n # Asser that Thailand is UN member at is now None\n self.assertIsNone(thailand.is_un_member_at)", "def clean_tos(self):\n if self.cleaned_data.get('tos', False):\n return self.cleaned_data['tos']\n raise forms.ValidationError(_(u'You must agree to the terms to register'))", "def testInsufficientCash(self):\n\n bid_move = self._move()\n context = self._context()\n context.players[0].cash = 200\n bfpc = BiddingForPrivateCompany()\n\n self.assertFalse(bfpc.run(bid_move, context), bfpc.errors())" ]
[ "0.66434324", "0.6157988", "0.61146647", "0.610798", "0.60404", "0.6012807", "0.6009148", "0.6007099", "0.5963601", "0.5951551", "0.59262794", "0.5911601", "0.59060943", "0.59055305", "0.58505845", "0.58087033", "0.5791745", "0.57821536", "0.5769893", "0.57652164", "0.5752993", "0.57443887", "0.57422316", "0.5730069", "0.56700164", "0.566059", "0.5654085", "0.5649452", "0.56415224", "0.5639141", "0.5635093", "0.5628686", "0.56268483", "0.5626384", "0.5616487", "0.56052655", "0.5591373", "0.559008", "0.558705", "0.55763054", "0.55684876", "0.55657023", "0.55608475", "0.55392516", "0.55375373", "0.5531905", "0.5528092", "0.5522264", "0.5505805", "0.5500665", "0.54927295", "0.54864365", "0.5482901", "0.54677385", "0.54660416", "0.5461931", "0.54611295", "0.54510754", "0.5450722", "0.54481375", "0.5441636", "0.5427783", "0.5425844", "0.54202574", "0.54149735", "0.54141766", "0.54113615", "0.54092467", "0.54079086", "0.54002804", "0.5398723", "0.53976274", "0.5391908", "0.5389978", "0.53897583", "0.53879076", "0.5383893", "0.5374142", "0.5364039", "0.5357454", "0.5349009", "0.5347134", "0.5341911", "0.5341842", "0.534012", "0.5339026", "0.5337278", "0.5317518", "0.5317209", "0.53130996", "0.5312198", "0.5306424", "0.52967125", "0.5292677", "0.52907544", "0.5286803", "0.5286157", "0.5285514", "0.52782524", "0.5275054" ]
0.54506004
59
Getter for the username field
def get_username(self, obj): return obj.user.username
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_username(self):\n return str(getattr(self, self.USERNAME_FIELD))", "def get_username(self):\r\n return self.username", "def get_username(self):\n return self.username", "def username(self) -> undefined.UndefinedOr[str]:", "def get_username(self):\n raise NotImplementedError('get_username')", "def GetUsername(self):\n return self._username", "def GetUsername(self):\n pass", "def get_username(self):\r\n raise NotImplementedError", "def username(self) -> str:", "def username(self) -> str:", "def get_username(self) -> str:\n return self._username", "def username(self) :\n\t\ttry :\n\t\t\treturn self._username\n\t\texcept Exception as e:\n\t\t\traise e", "def clean_username (self):\n return self.instance.username", "def getUsername(self):\n\t\treturn self.Username.lower()", "def get(self, username):\n return username", "def username(self):\n return self._username()", "def username(self, instance):\r\n return instance.user.username", "def username(self) -> str:\n raise NotImplementedError", "def username(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"username\")", "def username(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"username\")", "def username(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"username\")", "def username(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"username\")", "def username(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"username\")", "def username(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"username\")", "def username(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"username\")", "def username(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"username\")", "def username(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"username\")", "def username(self):\n return self._username", "def username(self):\n return self._username", "def username(self):\n return self._username", "def username(self):\n return json_loads(self.user_json).get('username')", "def username(self) -> str:\n return self._username", "def username_field(self):\n\n if 'phone' in self.initial_data:\n return 'phone'\n if 'user_name' in self.initial_data:\n return 'user_name'\n return get_username_field()", "def username(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"username\")", "def username(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"username\")", "def username(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"username\")", "def username(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"username\")", "def username(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"username\")", "def username(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"username\")", "def username(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"username\")", "def username(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"username\")", "def username(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"username\")", "def username(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"username\")", "def username(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"username\")", "def username(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"username\")", "def username(self) -> str:\n return pulumi.get(self, \"username\")", "def username(self) -> str:\n return pulumi.get(self, \"username\")", "def username(self) -> str:\n return pulumi.get(self, \"username\")", "def get_username(self, request):\r\n try:\r\n return request.user.username\r\n except AttributeError:\r\n return ''", "def username(self):\n return self.user.username", "def usernameFind(self):\r\n return self.username()", "def username(self) -> Optional[str]:\n return self._state.get(\"username\", None)", "def username(self):\n if self._username is not None:\n return self._username\n # Try to get a username from the userprofile\n try:\n self._username = self.userprofile.user.username\n except UserProfile.DoesNotExist:\n # User profile does not exist\n return None\n return self._username", "def username(self, inst):\r\n return inst.user.username", "def get_username(self):\n return self.browser.find_element(*locators.USER_NAME_TEXT).text", "def get_full_name(self):\n return self.username", "def get_full_name(self):\n return self.username", "def get_name(self):\n return self.user.username if self.user.username else self.user.email", "def git_username_user_attribute(self):\n return self._git_username_user_attribute", "def __str__(self):\r\n return self.username", "def username(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"username\")", "def _get_username():\n username = request.args.get(\"username\")\n if not username:\n raise NoUserError()\n else:\n return username", "def get_username(self):\n if not self.is_valid():\n return None\n try:\n # NOTE: all emails stored in lower-case\n email = self.clean_email().lower()\n return User.objects.get(email=email).username\n except User.DoesNotExist:\n pass\n return None", "def get_username(self):\n full_name = '%s %s' % (self.user.first_name.strip(), self.user.last_name.strip()[0:1])\n if len(full_name.strip()) == 0:\n full_name = self.user.username\n return full_name.strip()", "def __str__(self):\n return self.username", "def __str__(self):\n return self.username", "def _get_username(self):\n name = self._get_username_from_cookies()\n if name:\n return name\n if self._oauth and self._login_info[0]:\n return self._login_info[0]\n return self._get_username_from_api()", "def getName(self):\n return self.__username", "def username(self):\n return self._query_config()['username']", "def auth_username(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"auth_username\")", "def log_in_username(self):\n username_elem = waiter.find_element(self.driver, 'username', by=NAME)\n return username_elem.get_attribute('value')", "def __str__(self):\n if self.username == None:\n return \"User does not exist\"\n return self.username", "def set_username(self, value):\n raise NotImplementedError('set_username')", "def username(self):\n return self._authenticator.username()", "def ro_username(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ro_username\")", "def set_username(self, value):\n self.username = value", "def user_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"user_name\")", "def _username(self):\n if 'username' not in self._config:\n self._config['username'] = self._UI.get_input(\"Please enter your trac username: \")\n self._config._write_config()\n return self._config['username']", "def get_short_name(self):\n return self.username", "def get_short_name(self):\n return self.username", "def get_short_name(self):\n return self.username", "def test_users_username_get(self):\n pass", "def user_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"user_name\")", "def user_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"user_name\")", "def user_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"user_name\")", "def get_username_by_id(self, id):\n return User.query.get(id).username", "def username(self):\n log.warning(\"username property deprecated. Use boundjid.user\")\n return self.boundjid.user", "def username(self, repository):\r\n return self._username(repository)", "def get_username():\n\n if session.get(\"user_id\") is None:\n username = \"\"\n else:\n user_id = session.get(\"user_id\")\n user = User.query.filter(User.id==user_id).first()\n username = user.username\n\n return username", "def user_name(self) -> str:\n return pulumi.get(self, \"user_name\")", "def user_name(self):\n return self._user_name", "def _get_username_from_api(self):\n result = self.api_query(action=\"query\", meta=\"userinfo\")\n return result[\"query\"][\"userinfo\"][\"name\"]", "def clean_username(self):\n username = self.cleaned_data['username']\n\n try:\n User.objects.get(email=username)\n except ObjectDoesNotExist:\n raise forms.ValidationError('Selected user does not exist.')\n\n return username", "def __str__(self):\n return self.user.username", "def __str__(self):\n return self.user.username", "def __str__(self):\n return self.user.username", "def __str__(self):\n return self.user.username", "def __str__(self):\n return self.user.username", "def getUserName(self):\n user = User.by_id(self.user_id)\n return user.name" ]
[ "0.8819286", "0.8542671", "0.84036976", "0.8369547", "0.8357543", "0.8345958", "0.83210427", "0.82775146", "0.8162549", "0.8162549", "0.8135109", "0.8098483", "0.80932707", "0.8029489", "0.8025117", "0.7972525", "0.79250383", "0.7903688", "0.7901838", "0.7901838", "0.7901838", "0.7901838", "0.7901838", "0.7901838", "0.7901838", "0.7901838", "0.7901838", "0.78961444", "0.78961444", "0.78961444", "0.7893794", "0.7884272", "0.7881781", "0.7877699", "0.7877699", "0.7877699", "0.7877699", "0.7877699", "0.7877699", "0.7877699", "0.7877699", "0.7877699", "0.7877699", "0.7877699", "0.7877699", "0.7865014", "0.7865014", "0.7865014", "0.7840565", "0.78402543", "0.775206", "0.7751875", "0.76991856", "0.76297754", "0.75410986", "0.7502848", "0.7502848", "0.7487611", "0.74469125", "0.7417137", "0.7415221", "0.73924893", "0.73804826", "0.736287", "0.7340982", "0.7340982", "0.7307024", "0.7245849", "0.721643", "0.72031885", "0.7193379", "0.7184313", "0.7159272", "0.7151855", "0.7126765", "0.70969826", "0.70877796", "0.70865536", "0.7076797", "0.7076797", "0.7076797", "0.7050057", "0.70456743", "0.70456743", "0.70456743", "0.7033136", "0.70314556", "0.70233697", "0.7023205", "0.6989999", "0.69894266", "0.69784886", "0.69701743", "0.6967362", "0.6967362", "0.6967362", "0.6967362", "0.6967362", "0.69593537" ]
0.7931724
17
Returns an instance for a given Jenkins URL. The returned instance is usually a instance of a PlatformJenkins subclass (this allows to switch to a different Jenkins API.
def get_jenkins(cls, url, template_dir=None): return PlatformJenkinsJavaCLI(template_dir, url)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self, baseurl, nodename, jenkins_obj):\n self.name = nodename\n self.jenkins = jenkins_obj\n JenkinsBase.__init__(self, baseurl)", "def fromurl(cls, url: str):\n return cls.parse_obj(requests.get(url).json())", "async def from_url(cls) -> \"AocPrivateLeaderboard\":\n api_json = await cls.json_from_url()\n return cls.from_json(api_json)", "def __init__(self, jenkinsurl=None, dburl=None):\n self.logger = get_named_logger('Djinn')\n self.dj = DJenkins(url=jenkinsurl, logger=self.logger)\n self.dburl = dburl\n self.db = PipelineResults(connection_url=dburl, echo=False)", "def get_jenkins_with_http_info(self, **kwargs):\n\n all_params = []\n all_params.append('async')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method get_jenkins\" % key\n )\n params[key] = val\n del params['kwargs']\n\n collection_formats = {}\n\n path_params = {}\n\n query_params = []\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json'])\n\n # Authentication setting\n auth_settings = ['jenkins_auth']\n\n return self.api_client.call_api('/api/json', 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='Hudson',\n auth_settings=auth_settings,\n async=params.get('async'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)", "def _get_instance(cls, configuration, auth_type):\n if configuration in cls._INSTANCES:\n return cls._INSTANCES[configuration]\n return cls._create_instance(configuration, auth_type)", "def get_builder_instance(self):\n if self.framework == 'standalone':\n builder_instance = BuildStandaloneExecutionScenario(\n scenario=self.scenario,\n testcase_list=self.testcase_list,\n splunk_metadata=self.splunk_metadata\n )\n else:\n builder_instance = BuildOneMapExecutionScenario(\n scenario=self.scenario,\n testcase_list=self.testcase_list,\n splunk_metadata=self.splunk_metadata\n )\n return builder_instance", "def get_instance(driver_info: DriverInfo) -> webdriver:\n if driver_info.get_driver_type() == \"chrome\":\n options = webdriver.ChromeOptions()\n options.add_argument(\"--start-maximized\")\n return webdriver.Chrome(\n executable_path=os.path.join(\n driver_info.get_driver_path(),\n \"chromedriver\"\n ),\n chrome_options=options\n )", "def getinstance() :\n\t\treturn Jikji.instance", "def get_object(self) -> Job:\n project = ProjectPermissionsMixin.get_object(self)\n return project.jobs.get(id=self.kwargs[\"job\"])", "def get_instance(self, name):\n klass = self.get_class(name)\n return klass()", "def getResource(self, url):\n\n res = self.getRequest(url)\n return self._instantiateResource(res)", "def get_browser(self, name, job):\n browser = None\n if 'type' in job and job['type'] == 'traceroute':\n from .traceroute import Traceroute\n browser = Traceroute(self.options, job)\n elif name in self.browsers and 'exe' in self.browsers[name]:\n from .chrome_desktop import ChromeDesktop\n browser = ChromeDesktop(self.browsers[name]['exe'], self.options, job)\n return browser", "def getInstance(config):\n return Plugin(config)", "def getInstance(config):\n return Plugin(config)", "def get_plugin(version):\n build_version = get_build_version(current_app, version)\n if build_version:\n pid = request.args.get('id')\n return _get_plugin(current_app, build_version, pid)\n else:\n return invalid_api_version(404)", "def __new__(cls, *_args, **_kwargs):\n if cls._instance is None:\n cls._instance = super(TestJob, cls).__new__(cls)\n return cls._instance", "def get_instance(self, instance):\n return self._get(_instance.Instance, instance)", "def get_instance(instance: Optional[str] = None,\n project: Optional[str] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetInstanceResult:\n __args__ = dict()\n __args__['instance'] = instance\n __args__['project'] = project\n opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)\n __ret__ = pulumi.runtime.invoke('google-native:sqladmin/v1:getInstance', __args__, opts=opts, typ=GetInstanceResult).value\n\n return AwaitableGetInstanceResult(\n available_maintenance_versions=pulumi.get(__ret__, 'available_maintenance_versions'),\n backend_type=pulumi.get(__ret__, 'backend_type'),\n connection_name=pulumi.get(__ret__, 'connection_name'),\n create_time=pulumi.get(__ret__, 'create_time'),\n current_disk_size=pulumi.get(__ret__, 'current_disk_size'),\n database_installed_version=pulumi.get(__ret__, 'database_installed_version'),\n database_version=pulumi.get(__ret__, 'database_version'),\n disk_encryption_configuration=pulumi.get(__ret__, 'disk_encryption_configuration'),\n disk_encryption_status=pulumi.get(__ret__, 'disk_encryption_status'),\n etag=pulumi.get(__ret__, 'etag'),\n failover_replica=pulumi.get(__ret__, 'failover_replica'),\n gce_zone=pulumi.get(__ret__, 'gce_zone'),\n instance_type=pulumi.get(__ret__, 'instance_type'),\n ip_addresses=pulumi.get(__ret__, 'ip_addresses'),\n ipv6_address=pulumi.get(__ret__, 'ipv6_address'),\n kind=pulumi.get(__ret__, 'kind'),\n maintenance_version=pulumi.get(__ret__, 'maintenance_version'),\n master_instance_name=pulumi.get(__ret__, 'master_instance_name'),\n max_disk_size=pulumi.get(__ret__, 'max_disk_size'),\n name=pulumi.get(__ret__, 'name'),\n on_premises_configuration=pulumi.get(__ret__, 'on_premises_configuration'),\n out_of_disk_report=pulumi.get(__ret__, 'out_of_disk_report'),\n project=pulumi.get(__ret__, 'project'),\n region=pulumi.get(__ret__, 'region'),\n replica_configuration=pulumi.get(__ret__, 'replica_configuration'),\n replica_names=pulumi.get(__ret__, 'replica_names'),\n root_password=pulumi.get(__ret__, 'root_password'),\n satisfies_pzs=pulumi.get(__ret__, 'satisfies_pzs'),\n scheduled_maintenance=pulumi.get(__ret__, 'scheduled_maintenance'),\n secondary_gce_zone=pulumi.get(__ret__, 'secondary_gce_zone'),\n self_link=pulumi.get(__ret__, 'self_link'),\n server_ca_cert=pulumi.get(__ret__, 'server_ca_cert'),\n service_account_email_address=pulumi.get(__ret__, 'service_account_email_address'),\n settings=pulumi.get(__ret__, 'settings'),\n state=pulumi.get(__ret__, 'state'),\n suspension_reason=pulumi.get(__ret__, 'suspension_reason'))", "def get_jira_instance(use_test_server):\n username = get_username()\n password = get_password()\n\n credentials=(username, password)\n\n if use_test_server:\n cfg.server = cfg.TEST_SERVER\n\n try:\n j = JIRA(cfg.server, basic_auth=credentials), username\n except JIRAError, e:\n if e.text.find('CAPTCHA_CHALLENGE') != -1:\n eprint('Captcha verification has been triggered by '\\\n 'JIRA - please go to JIRA using your web '\\\n 'browser, log out of JIRA, log back in '\\\n 'entering the captcha; after that is done, '\\\n 'please re-run the script')\n sys.exit(os.EX_NOPERM)\n else:\n raise\n return j", "def get_instance(cls):\n return cls.__new__(cls)", "def get_instance(cls):\n return cls.__new__(cls)", "def get_instance(cls):\n return cls.__new__(cls)", "def _create_instance(cls, configuration, auth_type):\n auth = ClientAuthFactory.get(\n username=configuration.username,\n password=configuration.password,\n auth_type=auth_type\n )\n instance = HttpClient(configuration.url, auth)\n cls._INSTANCES[configuration] = instance\n return instance", "def from_url(cls, url):\n query_params = _get_query_params_from_url(url)\n\n if _get_param(query_params, \"SERVICE\") == \"WMS\":\n layer = _get_param(query_params, \"LAYERS\")\n elif _get_param(query_params, \"SERVICE\") == \"WCS\":\n layer = _get_param(query_params, \"COVERAGE\")\n\n d = None\n if layer.startswith(\"https://\"):\n d = _get_from_url(layer).json()\n elif layer.startswith(\"s3://\"):\n parts = layer.split(\"/\")\n bucket = parts[2]\n key = \"/\".join(parts[3:])\n s3 = S3CacheStore(s3_bucket=bucket)\n s = s3._load(key)\n elif layer == \"%PARAMS%\":\n s = _get_param(query_params, \"PARAMS\")\n else:\n p = _get_param(query_params, \"PARAMS\")\n if p is None:\n p = \"{}\"\n if not isinstance(p, dict):\n p = json.loads(p)\n return cls.from_name_params(layer, p)\n\n if d is None:\n d = json.loads(s, object_pairs_hook=OrderedDict)\n\n return cls.from_definition(d)", "def from_url(cls, url):\n query_params = _get_query_params_from_url(url)\n\n if _get_param(query_params, \"SERVICE\") == \"WMS\":\n layer = _get_param(query_params, \"LAYERS\")\n elif _get_param(query_params, \"SERVICE\") == \"WCS\":\n layer = _get_param(query_params, \"COVERAGE\")\n\n d = None\n if layer.startswith(\"https://\"):\n d = _get_from_url(layer).json()\n elif layer.startswith(\"s3://\"):\n parts = layer.split(\"/\")\n bucket = parts[2]\n key = \"/\".join(parts[3:])\n s3 = S3CacheStore(s3_bucket=bucket)\n s = s3._load(key)\n elif layer == \"%PARAMS%\":\n s = _get_param(query_params, \"PARAMS\")\n else:\n p = _get_param(query_params, \"PARAMS\")\n if p is None:\n p = \"{}\"\n if not isinstance(p, dict):\n p = json.loads(p)\n return cls.from_name_params(layer, p)\n\n if d is None:\n d = json.loads(s, object_pairs_hook=OrderedDict)\n\n return cls.from_definition(d)", "def get_instance(self, name):\n return self.website.instance.id", "def get_instance(cls, galaxy_instance=None):\n if not WorkflowLoader._instance:\n cls._logger.debug(\"Creating a new WorflowLoader instance...\")\n WorkflowLoader._instance = WorkflowLoader(galaxy_instance)\n elif galaxy_instance:\n cls._logger.debug(\"Initializing the existing WorkflowLoader instance...\")\n WorkflowLoader._instance._initialize(galaxy_instance)\n return WorkflowLoader._instance", "def loadComponentFromURL( cUrl, tProperties=() ):\n StarDesktop = getDesktop()\n oDocument = StarDesktop.loadComponentFromURL( cUrl, \"_blank\", 0, tProperties )\n return oDocument", "def loadComponentFromURL( cUrl, tProperties=() ):\n StarDesktop = getDesktop()\n oDocument = StarDesktop.loadComponentFromURL( cUrl, \"_blank\", 0, tProperties )\n return oDocument", "def __init__(self, url='https://gitlab.com'):\n self._url = url\n self._gitlab = None", "def clone_from(\n cls,\n url: PathLike,\n to_path: PathLike,\n progress: CallableProgress = None,\n env: Optional[Mapping[str, str]] = None,\n multi_options: Optional[List[str]] = None,\n allow_unsafe_protocols: bool = False,\n allow_unsafe_options: bool = False,\n **kwargs: Any,\n ) -> \"Repo\":\n git = cls.GitCommandWrapperType(os.getcwd())\n if env is not None:\n git.update_environment(**env)\n return cls._clone(\n git,\n url,\n to_path,\n GitCmdObjectDB,\n progress,\n multi_options,\n allow_unsafe_protocols=allow_unsafe_protocols,\n allow_unsafe_options=allow_unsafe_options,\n **kwargs,\n )", "def _get_gitlab_fetcher(\n parsed_url: ParsedUrl, output_dir: str, spec: Optional[str] = None\n) -> WorkflowFetcherBase:\n # There are four different GitLab URLs we are interested in:\n # 1. URL to a repository: /<user>/<repo>\n # 2. Git URL: /<user>/<repo>.git\n # 3. URL to a branch/commit/tag: /<user>/<repo>/-/tree/<git_ref>\n # 4. URL to a zip snapshot: /<user>/<repo>/-/archive/.../<repo>-<git_ref>.zip\n # Note that GitLab supports recursive subgroups, so <user> can contain slashes\n components = _match_url(\n parsed_url,\n [\n \"/<path:username>/<repository>/\",\n \"/<path:username>/<repository>.git/\",\n \"/<path:username>/<repository>/-/tree/<path:git_ref>\",\n \"/<path:username>/<repository>/-/archive/<path:zip_path>\",\n ],\n )\n\n username = components[\"username\"]\n repository = components[\"repository\"]\n git_ref = components.get(\"git_ref\")\n zip_path = components.get(\"zip_path\")\n\n if zip_path:\n # The name of the zip file is composed of the repository name and\n # the git commit/branch/tag\n workflow_name = parsed_url.basename_without_extension\n return WorkflowFetcherZip(parsed_url, output_dir, spec, workflow_name)\n else:\n repository_url = ParsedUrl(\n f\"https://{parsed_url.hostname}/{username}/{repository}.git\"\n )\n return WorkflowFetcherGit(repository_url, output_dir, git_ref, spec)", "def create_instance(c_instance):\n return OpenLabs(c_instance)", "def create_api(self):\n return DJinnAPI(djenkins=self.dj, pipeline_results=self.db)", "def __get__(self, instance, owner):\n if instance._location is None:\n raise AttributeError('Cannot find URL of %s relative to URL-less %s' % (self.cls.__name__, owner.__name__))\n newurl = join(instance._location, self.api_name)\n obj = self.cls.get(newurl, auth=instance.auth)\n obj.auth = instance.auth\n return obj", "def init_from_url(cls, url):\n init_kwargs = cls._validate_init_kwargs(url)\n return cls(**init_kwargs)", "def get_instance(cls, *args, **kwargs):\n if cls._instance is not None:\n return cls._instance\n return cls(*args, **kwargs)", "def getSite(self, url):\n hostname = urlparse(urlparser).hostname\n site = sites.getSite(hostname)\n return site", "def bot_factory(url):\n from dallinger_experiment.experiment import Bot\n\n return Bot(url)", "def get_fetcher(\n launcher_url: str, output_dir: str, spec: Optional[str] = None\n) -> WorkflowFetcherBase:\n parsed_url = ParsedUrl(launcher_url)\n\n if parsed_url.scheme not in FETCHER_ALLOWED_SCHEMES:\n raise ValueError(\"URL scheme not allowed\")\n\n if spec:\n _, spec_ext = os.path.splitext(spec)\n if spec_ext not in WORKFLOW_SPEC_EXTENSIONS:\n raise ValueError(\n \"The provided specification doesn't have a valid file extension\"\n )\n\n if parsed_url.netloc == \"github.com\":\n return _get_github_fetcher(parsed_url, output_dir, spec)\n elif parsed_url.netloc in FETCHER_ALLOWED_GITLAB_HOSTNAMES:\n return _get_gitlab_fetcher(parsed_url, output_dir, spec)\n elif parsed_url.extension == \".git\":\n return WorkflowFetcherGit(parsed_url, output_dir, spec=spec)\n elif parsed_url.extension == \".zip\":\n return WorkflowFetcherZip(parsed_url, output_dir, spec)\n elif parsed_url.extension in WORKFLOW_SPEC_EXTENSIONS:\n if spec:\n raise ValueError(\n \"Cannot use the 'specification' argument when the URL points directly \"\n \"to a specification file\"\n )\n return WorkflowFetcherYaml(parsed_url, output_dir)\n else:\n raise ValueError(\"Cannot handle given URL\")", "def createClientFromUrl(url, authStrategy=None):\n return MetaClient(MetaHttpClient(url, authStrategy))", "def __init__(self, url_address, type=\"Chrome\"):\n from webdriverwrapper import Chrome\n from selenium import webdriver\n options = webdriver.ChromeOptions()\n options.add_experimental_option(\"excludeSwitches\", [\"enable-automation\"])\n options.add_experimental_option('useAutomationExtension', False)\n self.driver = Chrome(options=options)\n # Open a website\n window_before = self.driver.window_handles[0]\n self.driver.get(url_address)", "def get_instance():\n if ChromeDriverSession.__instance is None:\n ChromeDriverSession()\n return ChromeDriverSession.__instance", "def get_instance(c: Config) -> NotionDAO:\n if c.notion_official_configured:\n result = APIv2(c)\n else:\n result = APIv1(c)\n return result", "def get_builder_project():\n if config.use_shaman is True:\n builder_class = ShamanProject\n else:\n builder_class = GitbuilderProject\n return builder_class", "def get_jira(self):\n if not self._jira:\n jira_config = MTConfigHandler().get_config_section('jira')\n self._jira = JIRA(\n jira_config['host'],\n basic_auth=(jira_config['id'], jira_config['password'])\n )\n return self._jira", "def get_instance(cls):\n global FW_MANAGER_API\n if not FW_MANAGER_API:\n FW_MANAGER_API = cls()\n return FW_MANAGER_API", "def get_homeworld(self, url):\n\n if not isinstance(self.homeworld, Planet):\n data = get_swapi_resource(url)\n homeworld = Planet(data['name'], data['url'])\n homeworld.assign_values(data)\n self.homeworld = homeworld", "def get_instance(self, container, cls, **params):\n if not cls in self.instances:\n self.instances[cls] = self.create_instance(container, cls, **params)\n \n return self.instances[cls]", "def getJob(uniq):\n return Job(Cuebot.getStub('job').GetJob(\n job_pb2.JobGetJobRequest(id=uniq), timeout=Cuebot.Timeout).job)", "def from_git_url(cls, git_url):\n with tempfile.TemporaryDirectory() as temp_dir:\n cmd = ['git', 'clone', '--single-branch', '--depth=1', git_url, temp_dir]\n subprocess.check_call(cmd)\n db_dir = os.path.join(temp_dir, 'database')\n return cls.from_dir(db_dir=db_dir)", "def get(self, *args):\n params = self.parse_query_string(args[0])\n\n module = self.get_module(params)\n impl = module.ProductsBuilds(config=self.context)\n\n return impl.get(**params)", "def get_instance(cls):\n if cls.__instance is None:\n cls.__guard = False\n cls.__instance = PvMonitors()\n cls.__guard = True\n return PvMonitors.__instance", "def GetInstance():\n pass", "def _get_instance(self, id):\n if id not in self._instances:\n self._instances[id] = self._load_constructor(id)\n\n return self._instances[id]", "def get_jenkins(self, **kwargs):\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async'):\n return self.get_jenkins_with_http_info(**kwargs)\n else:\n (data) = self.get_jenkins_with_http_info(**kwargs)\n return data", "def get_repository(url):\n if not '://' in url:\n url = 'https://' + url\n\n parsed_url = urllib.parse.urlparse(url)\n if parsed_url.netloc.endswith('github.com'):\n g = get_github_auth_token()\n repo_url = parsed_url.path.strip('/')\n repo = GitHubRepository(g.get_repo(repo_url))\n return repo\n\n raise Exception('Unsupported url!')", "def fetch_executable_from_jenkins():\n\n base_job_url = os.environ.get('JENKINS_JOB_URL')\n if not base_job_url:\n error('Jenkins job URL for the builder is not specified.')\n\n build_json = json.loads(requests.get('%s/api/json'\n % base_job_url).text)\n last_build = build_json['lastCompletedBuild']['number']\n print 'Last build ID: %d' % last_build\n\n job_url = '%s/%d' % (base_job_url, last_build)\n last_build_json = json.loads(requests.get('%s/api/json'\n % job_url).text)\n if not last_build_json['artifacts']:\n error('No artifacts found!')\n\n artifacts_deb = [artifact for artifact in\n last_build_json['artifacts'] if '.dmg'\n in artifact['fileName']]\n artifact_url = '%s/artifact/%s' % (job_url,\n artifacts_deb[0]['relativePath'])\n file_name = artifacts_deb[0]['fileName']\n print 'Tribler installer url: %s' % artifact_url\n\n # Download the file\n file_path = os.path.join(os.environ.get('WORKSPACE'), file_name)\n download_response = requests.get(artifact_url, stream=True)\n download_response.raise_for_status()\n\n with open(file_path, 'wb') as handle:\n for block in download_response.iter_content(1024):\n handle.write(block)\n\n return file_path", "def get_by_url(self, url, pool_name=None):\n\t\tif not pool_name:\n\t\t\treturn self.pool[url]\n\t\treturn getattr(self, pool_name)[url]", "def one(url_or_bundle, download=None):\n\n return swarm([url_or_bundle], download=download)[0]", "def by_url(cls, url):\n video_id = cls._id_from_url(url)\n return Plus7Program(video_id)", "def from_url(self, url: str) -> Optional[str]:\n parsed = urlparse.urlparse(url)\n if parsed.scheme not in {'http', 'https', ''}:\n return None\n\n path = parsed.path\n if parsed.query:\n path += '?' + parsed.query\n\n # Discard $1 and everything after it\n path, *_ = path.partition('$1')\n\n for domain in self.domains:\n if domain in parsed.netloc:\n break\n else:\n return None\n\n matched_sites = set()\n for code in chain(self.codes,\n getattr(self, 'test_codes', ()),\n getattr(self, 'closed_wikis', ()),\n ):\n if self._hostname(code)[1] == parsed.netloc:\n # Use the code and family instead of the url\n # This is only creating a Site instance if domain matches\n site = pywikibot.Site(code, self.name)\n pywikibot.log(f'Found candidate {site}')\n\n for iw_url in site._interwiki_urls():\n iw_url, *_ = iw_url.partition('{}')\n if path.startswith(iw_url):\n matched_sites.add(site)\n break\n\n if len(matched_sites) == 1:\n return matched_sites.pop().code\n\n if not matched_sites:\n return None\n\n raise RuntimeError(\n 'Found multiple matches for URL \"{}\": {}'\n .format(url, ', '.join(str(s) for s in matched_sites)))", "def get_from_host(cls, host, silent=False):\n if cls.search([], count=True) == 1:\n return cls.search([])[0]\n try:\n website, = cls.search([('name', '=', host)])\n except ValueError:\n if not silent:\n raise WebsiteNotFound()\n else:\n return website", "def _getRobotURL(self, masterUrl):\r\n print('Connect to Master Process on: {0}'.format(masterUrl))\r\n\r\n args = urlencode((('userID', self._userID),\r\n ('version', CURRENT_VERSION)))\r\n\r\n try:\r\n f = urlopen('{0}?{1}'.format(masterUrl, args))\r\n except HTTPError as e:\r\n msg = e.read()\r\n\r\n if msg:\r\n msg = ' - {0}'.format(msg)\r\n\r\n raise ConnectionError('HTTP Error {0}: '\r\n '{1}{2}'.format(e.getcode(), e.msg, msg))\r\n\r\n return json.loads(f.read())", "def create_remote_instance(self, payload):\n instance = RemoteInstance()\n instance.init_from_payload(payload)\n return instance", "def get_instance(self, project, parameters):\n\t\t\n\t\tparameters = project.process_node_parameters(\n\t\t\tparameters,\n\t\t\t[\"destination\", \"from\", \"to\"],\n\t\t\t{\"replace\": False, \"retry\": 1},\n\t\t\t{\"destination\": \"variable_name\", \"from\": \"non_empty_string\", \"to\": \"non_empty_string\", \"replace\": \"boolean\", \"retry\": \"integer\"}\n\t\t\t)\n\n\t\treturn DownloadCommand(project, parameters[\"destination\"], parameters[\"from\"], parameters[\"to\"], parameters[\"replace\"], parameters[\"retry\"])", "def get_instance(cls, device):\n\n if cls._nuis.get(device) is None:\n cls._nuis[device] = AndroidUiautomationPoco(device)\n return cls._nuis[device]", "def get_job(self, _id):\n data = {\n 'class': 'Job',\n 'id': _id,\n 'attrs': {},\n }\n job = self.db_client.send_request('list', json.dumps(data))\n\n return Job(\n _id=job['id'],\n _type=job['type'],\n task=job['task'],\n command=job['command'],\n input_parameters=job['inputParameters'],\n status=job['status'],\n runner_id=job['runner'],\n )", "def request(cls, url, debug=False):\n try:\n response = connect.request(url, debug=debug)\n except urllib2.HTTPError:\n raise ValueError(\"No description found with URL '%s'\" % url)\n reply = json.loads(response.read())\n return cls.from_json(reply)", "def plugin_instance(self):\n return self.__plugin_instance", "def get_object(self):\n if not self.user.is_authenticated():\n raise Http404('Access denied')\n self.url_name = self.request.resolver_match.url_name\n if self.url_name == 'sticker-detail':\n return Sticker.objects.get(\n board__desk__owner__user=self.user,\n board__prefix=self.kwargs['prefix'],\n sequence=self.kwargs['sequence']\n )\n elif self.url_name == 'board-comments':\n return Board.objects.get(\n desk__owner__user=self.user,\n sequence=self.kwargs['board_sequence']\n )\n elif self.url_name == 'sprint-comments':\n return Sprint.objects.get(\n number=self.kwargs['sprint_number'],\n board__desk__owner__user=self.user,\n board__sequence=self.kwargs['board_sequence']\n )", "def from_url(self) -> PngImagePlugin.PngImageFile:\n response = requests.get(self.url)\n img = Image.open(BytesIO(response.content))\n\n return img", "def get(host, port=None, version=None):\n port = 8081 if port is None else port\n version = \"v1\" if version is None else version\n\n if version not in VERSIONS.keys():\n raise RestException(f\"Unknown REST API version: {version}\")\n api_client_cls = VERSIONS[version]\n return api_client_cls(host=host, port=port)", "def getinstance():\n if cls not in instances:\n instances[cls] = cls()\n return instances[cls]", "def getPlatform(self, name):\r\n if self.platforms.has_key(name):\r\n return self.platforms[name]\r\n else:\r\n self.platforms[name] = Platform(name)\r\n return self.platforms[name]", "def get_browser(self, settings=None):\n browser = Browser(self.get_wsgi_application())\n if settings is not None:\n settings(browser)\n self._browsers.append(browser)\n return browser", "def get_website(self, url: str, check_interval: int):\n try:\n\n website = Website(url=url, check_interval=check_interval)\n\n except Exception:\n print(\n \"I wasn't able to connect with that URL.\\n\"\n + \"Please revise it, including 'http://'\"\n + \" or 'https://' as appropriate).\"\n )\n return None\n\n return website", "def get_json_job_details(buildurl):\n return requests.get(buildurl + \"/api/json\").json()", "def get_selenium2libraryExtend_instance():\n\n # selenium = BuiltIn().get_library_instance('Selenium2Library')\n return selenium", "def _CreateCurl(self):\n # Create pycURL object if no factory is provided\n if self._curl_factory:\n curl = self._curl_factory()\n else:\n curl = pycurl.Curl()\n\n # Default cURL settings\n curl.setopt(pycurl.VERBOSE, False)\n curl.setopt(pycurl.FOLLOWLOCATION, False)\n curl.setopt(pycurl.MAXREDIRS, 5)\n curl.setopt(pycurl.NOSIGNAL, True)\n curl.setopt(pycurl.USERAGENT, self.USER_AGENT)\n curl.setopt(pycurl.SSL_VERIFYHOST, 0)\n curl.setopt(pycurl.SSL_VERIFYPEER, False)\n curl.setopt(pycurl.HTTPHEADER, [\n \"Accept: %s\" % HTTP_APP_JSON,\n \"Content-type: %s\" % HTTP_APP_JSON,\n ])\n\n assert ((self._username is None and self._password is None) ^\n (self._username is not None and self._password is not None))\n\n if self._username:\n # Setup authentication\n curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC)\n curl.setopt(pycurl.USERPWD,\n str(\"%s:%s\" % (self._username, self._password)))\n\n # Call external configuration function\n if self._curl_config_fn:\n self._curl_config_fn(curl, self._logger)\n\n return curl", "def get_gitlab_remote(self):\n return self.get_remote('gitlab')", "def getRepo(session, name=None, url=None):\r\n\r\n try:\r\n # Look up repository by name\r\n if name is not None:\r\n return session.get_repo(name)\r\n\r\n # Look up repository by clone URL\r\n if url is not None:\r\n # Parse URL\r\n url = urlparse(url)\r\n\r\n # Check that this is a github URL\r\n if not url.hostname.endswith(\"github.com\"):\r\n return None\r\n\r\n # Get repository name from clone URL\r\n name = url.path\r\n if name.startswith(\"/\"):\r\n name = name[1:]\r\n if name.endswith(\".git\"):\r\n name = name[:-4]\r\n\r\n # Look up repository by name\r\n return getRepo(session, name=name)\r\n\r\n except:\r\n pass\r\n\r\n return None", "def _build_client(self):\n if self.url:\n return koji.ClientSession(self.url)\n else:\n _configuration = koji.read_config(self.profile,\n user_config=self.user_config)\n \"\"\"\n NOTE: This check is here because if the user does not have and koji\n config files, read_config will 'helpfully' return you a useless\n default config. The required baseurl ('server' in _configuration)\n has a default, so we cannot check that. However, topurl defaults\n to None, so we currently use this to devine if the returned config\n is the useless default.\n \"\"\"\n if not _configuration.get('topurl'):\n raise koji.ConfigurationError(\"no configuration for profile \\\n name: {0}\".format(self.profile))\n return koji.ClientSession(_configuration.get('server'),\n opts=_configuration)", "def get_workflow_object(\n workflow_name, settings, logger, client, token, decision, maximum_page_size\n):\n module_name = \"workflow.\" + workflow_name\n module_object = importlib.import_module(module_name)\n workflow_class = getattr(module_object, workflow_name)\n # Create the object\n workflow_object = workflow_class(\n settings, logger, client, token, decision, maximum_page_size\n )\n return workflow_object", "def get_job(self) -> CustomJob:\n return self._client.get_custom_job(name=self._job_name)", "def pull(verbose_level=1, hostnames=[], servicenames=[]):\n check_arg(hostnames, u._('Host names'), list,\n empty_ok=True, none_ok=True)\n check_arg(verbose_level, u._('Verbose level'), int)\n check_arg(servicenames, u._('Service names'), list,\n empty_ok=True, none_ok=True)\n\n check_kolla_args(hostnames=hostnames,\n servicenames=servicenames)\n\n hostnames = safe_decode(hostnames)\n servicenames = safe_decode(servicenames)\n action = KollaAction(verbose_level=verbose_level,\n playbook_name='site.yml')\n ansible_job = action.pull(hostnames, servicenames)\n return Job(ansible_job)", "def open_url(name):\n url = localReadConfig.get_webServer(name)\n browser = open_browser()\n browser.get(url)\n return browser", "def _get_instance(self):", "def _get_instance(self):", "def get_soup_obj(url):\n try:\n html = session.get(url, headers=headers).text\n return BeautifulSoup(html, \"html.parser\")\n except HTTPError:\n print(\"{} not reachable\".format(url))\n return None", "def get_instance(cls, project, parameters):\n\n\t\tif False == parameters.has_key(\"name\") or \"\" == parameters[\"name\"]:\n\t\t\traise DepFileParsingError()\n\n\t\tschema_version = Project.LAST_SCHEMA_VERSION\n\t\tif parameters.has_key(\"schema\") and \"\" != parameters[\"schema\"]:\n\t\t\tschema_version = parameters[\"schema\"]\n\n\t\treturn Project(parameters[\"name\"], schema_version)", "def load(cls, host):\n\n return cls(host)", "def get_instance():\r\n try:\r\n module_instance = importlib.import_module(\r\n f\"{__name__}.{SETTINGS.db_type_ccgp_crawler.lower()}\")\r\n except ImportError as error:\r\n LOG.error(error)\r\n return module_instance.CCGPBidInfoStorage", "def get_object(self):\n lookup_url_kwarg = self.lookup_url_kwarg or self.lookup_field\n\n assert lookup_url_kwarg in self.kwargs, (\n 'Expected view %s to be called with a URL keyword argument '\n 'named \"%s\". Fix your URL conf, or set the `.lookup_field` '\n 'attribute on the view correctly.' %\n (self.__class__.__name__, lookup_url_kwarg)\n )\n\n filter_kwargs = {\n \"parent_id\": self.kwargs[\"boards_pk\"],\n self.lookup_field: self.kwargs[lookup_url_kwarg]\n }\n obj = get_object_or_404(self.get_queryset(), **filter_kwargs)\n\n # May raise a permission denied\n self.check_object_permissions(self.request, obj)\n\n return obj", "def get_job_status(job_url, build_number, username, password):\n try:\n url = \"{}{}/api/json\".format(job_url, str(build_number))\n res = requests.get(url, auth=(username, password))\n build_status_json = json.loads(res.text)\n return build_status_json[\"result\"]\n\n except requests.exceptions.RequestException as e:\n print (e)\n sys.exit(2)", "def get_job(\n self, job_id: Union[str, int], *, params: Optional[dict] = None\n ) -> \"resource_types.Job\":\n\n return communicator.Job(self.__requester).from_id(\n job_id=job_id, parameters=params\n )", "def import_build_for_job(job_pk, build_number):\n job = Job.objects.get(pk=job_pk)\n logging.info(\"Located job %s\\n\" % job)\n\n client = job.server.get_client()\n logging.info(\"Using server at %s\\n\" % job.server.url)\n\n jenkins_job = client.get_job(job.name)\n build_result = jenkins_job.get_build(build_number)\n\n # TODO: Shouldn't access _data here.\n build_details = {\n \"status\": build_result.get_status(),\n # TODO: What should we do with this ID we get from Jenkins?\n # Discard? or only set it if we don't have one?\n # \"build_id\": build_result._data[\"id\"],\n \"duration\": build_result._data[\"duration\"],\n \"url\": build_result.get_result_url(),\n \"console_log\": build_result.get_console(),\n }\n logging.info(\"Processing build details for %s #%d\" % (job, build_number))\n Build.objects.filter(job=job, number=build_number).update(**build_details)\n build = Build.objects.get(job=job, number=build_number)\n for artifact in build_result.get_artifacts():\n artifact_details = {\n \"filename\": artifact.filename,\n \"url\": artifact.url,\n \"build\": build\n }\n logging.info(\"%s\" % artifact_details)\n Artifact.objects.create(**artifact_details)", "def return_instance(cls):\n return cls()", "def get_instance(cls):\n global DNS_MANAGER_API\n if not DNS_MANAGER_API:\n DNS_MANAGER_API = cls()\n return DNS_MANAGER_API" ]
[ "0.5845707", "0.55609673", "0.52667314", "0.51818883", "0.5164338", "0.5137516", "0.5066826", "0.5053414", "0.5036196", "0.49901026", "0.49768355", "0.49687746", "0.4964062", "0.49446085", "0.49446085", "0.48970905", "0.48545104", "0.48198324", "0.48110753", "0.48049742", "0.47993132", "0.47993132", "0.47993132", "0.47983548", "0.47976637", "0.47976637", "0.47592542", "0.47507975", "0.4747257", "0.4747257", "0.47432923", "0.4724452", "0.4722996", "0.47216675", "0.47047874", "0.4699257", "0.46955305", "0.46902704", "0.4689305", "0.46839488", "0.46779558", "0.4668109", "0.4655386", "0.4616942", "0.46113694", "0.45866233", "0.45837915", "0.45798165", "0.457573", "0.45666075", "0.45537645", "0.4548213", "0.4545914", "0.45433986", "0.45329452", "0.4530216", "0.45262828", "0.4521024", "0.45173314", "0.4516762", "0.45136112", "0.45100248", "0.45013595", "0.44975466", "0.4495582", "0.44939417", "0.44884786", "0.44861984", "0.4480762", "0.447887", "0.4462397", "0.44579774", "0.44561964", "0.44515264", "0.4451005", "0.44405538", "0.44381848", "0.4428476", "0.44271296", "0.44267726", "0.44228986", "0.44225848", "0.4416194", "0.44156155", "0.4413205", "0.4408434", "0.4403232", "0.4385771", "0.43818364", "0.43818364", "0.43816692", "0.43808514", "0.43761048", "0.43715537", "0.43642282", "0.43544757", "0.43497705", "0.4343235", "0.43420258", "0.43415886" ]
0.7511691
0
Returns true if a given view exists.
def view_exists(self, view): with open("/dev/null", "w") as devnull: call = subprocess.Popen(self.cli + [PlatformJenkinsJavaCLI.GET_VIEW, view], stdout=devnull, stderr=devnull) call.wait() return call.returncode == 0
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def exists_for_view(self, view_id):\n raise NotImplementedError(\"calling abstract method\")", "def has_debug_view(name=None):\r\n for view in sublime.active_window().views():\r\n if is_debug_view(view):\r\n if name is not None:\r\n if view.name() == name:\r\n return True\r\n else:\r\n return True\r\n return False", "def has_view_permission(self, request, obj=None):\n return True\n opts = self.opts\n codename = get_permission_codename('view', opts)\n return any([\n request.user.has_perm(\"%s.%s\" % (opts.app_label, codename)),\n request.user.has_perm(\"%s.%s\" % (opts.app_label, codename), obj)])", "def is_view(self):\n return self._base is not None", "def is_db_view(db_table):\n if db_table in postgresql_views:\n return True\n return False", "def exists(self):\n try:\n select_template(self.get_paths())\n return True\n except TemplateDoesNotExist:\n return False", "def exists(self):\n return True", "def exists(self):\n return True", "def test_vote_exists(self):\n name_exists = 'vote' in self.views_module_listing\n is_callable = callable(self.views_module.vote)\n \n self.assertTrue(name_exists, f\"{FAILURE_HEADER}vote() view does not exist{FAILURE_FOOTER}\")\n self.assertTrue(is_callable, f\"{FAILURE_HEADER}vote() function does not exist or will not execute{FAILURE_FOOTER}\")", "def has_permission(self, request, view):\n return True", "def has_permission(self, request, view):\n return False", "def is_viewed(self):\n return self.has_label(VIEWED_LABEL)", "def test_VIEW_pass(self):\n for V in self.mod.views.itervalues():\n self.assertTrue(V.isset)", "def exists(path):\n return get_instance(path).exists(path)", "def exists(self):\r\n return os.path.exists(self.full_path)", "def exists(self):\n return self.path.exists()", "def exists(self, url):\n return (self.base_path / url).exists()", "def test_detail_exists(self):\n name_exists = 'detail' in self.views_module_listing\n is_callable = callable(self.views_module.detail)\n \n self.assertTrue(name_exists, f\"{FAILURE_HEADER}detail() view does not exist{FAILURE_FOOTER}\")\n self.assertTrue(is_callable, f\"{FAILURE_HEADER}detail() function does not exist or will not execute{FAILURE_FOOTER}\")", "def exists(self):\n return self.obj is not None", "def exists(self):\n return _os.path.exists(self.__str__())", "def is_element_in_view(self, element: Element) -> bool:\n return self.find_element_view(element=element) is not None", "def exists_task(self, task):\n assert task, \"Must input a valid task name.\"\n return any(self.get_by_task(task))", "def test_index_exists(self):\n name_exists = 'index' in self.views_module_listing\n is_callable = callable(self.views_module.index)\n \n self.assertTrue(name_exists, f\"{FAILURE_HEADER}index() view does not exist{FAILURE_FOOTER}\")\n self.assertTrue(is_callable, f\"{FAILURE_HEADER}index() function does not exist or will not execute{FAILURE_FOOTER}\")", "def exists(self) -> bool:\n p = pathlib.Path(self.summary_path)\n return p.exists()", "def exists(self, path: str) -> bool:\n pass", "def _is_drf_view(pattern):\n return hasattr(pattern.callback, 'cls') and issubclass(pattern.callback.cls,\n APIView)", "def exists(self) -> bool:\n doc_ref = self.doc_ref\n if isinstance(doc_ref, DocumentReference):\n return doc_ref.get().exists\n return False", "def exists(self):\n return self.islink() or exists(self._path)", "def Exists(self, path: str) -> bool:\n ...", "def exists(self) -> bool:\n try:\n result = self.get()\n except KeyError:\n return False\n return True", "def is_view_loaded(view):\n\n if not G.AGENT:\n return\n if not G.AGENT.joined_workspace:\n return\n if view.is_loading():\n return\n\n buf = get_buf(view)\n if not buf or buf.get('buf') is None:\n return\n\n return buf", "def exists(self):\n return bool(self.get())", "def node_exists(self):\n return self.oid is not None", "def exists(request, pagename, filename):\n fpath = getFilename(request, pagename, filename)\n return os.path.exists(fpath)", "def exists(obj: Dict, path: str) -> bool:\n\n return get(obj, path) is not None", "def exists(self):\n\n return os.path.exists(self.path)", "def _has(self, name):\n return hasattr(self._, name)", "def does_exist(self, index):\n if index in self.map:\n return True\n return False", "def view(self, name):\n try:\n return self._views[name]\n except KeyError:\n raise NoSuchView(name)", "def exists(self):\n return os.path.isfile(self.location)", "def has_view_permissions(self, obj):\n queryset = self.model.objects.filter(pk=obj.pk)\n if hasattr(queryset, 'has_view_permissions'):\n return queryset.has_view_permissions( PyFormsMiddleware.user() )\n else:\n return True", "def _exists (self):\n cursor = self._exec (self.select)\n return bool (cursor.fetchall ())", "def has_permission(self, request, view):\n board_id = view.kwargs.get(\"pk\")\n owner_id = Board.objects.get(pk=board_id).owner.pk\n return request.user.id == owner_id", "def f_exists(self, varname):\r\n return (varname in self.locals_ptr)", "def _url_exists(self, url):\n return url_exists(url)", "def validate_view_naming(view_file):\n parsed = sqlparse.parse(view_file.read_text())[0]\n tokens = [\n t\n for t in parsed.tokens\n if not (t.is_whitespace or isinstance(t, sqlparse.sql.Comment))\n ]\n is_view_statement = (\n \" \".join(tokens[0].normalized.split()) == \"CREATE OR REPLACE\"\n and tokens[1].normalized == \"VIEW\"\n )\n if is_view_statement:\n target_view = str(tokens[2]).strip().split()[0]\n try:\n [project_id, dataset_id, view_id] = target_view.replace(\"`\", \"\").split(\".\")\n if not (\n view_file.parent.name == view_id\n and view_file.parent.parent.name == dataset_id\n and view_file.parent.parent.parent.name == project_id\n ):\n print(\n f\"{view_file} ERROR\\n\"\n f\"View name {target_view} not matching directory structure.\"\n )\n return False\n except Exception:\n print(f\"{view_file} ERROR\\n{target_view} missing project ID qualifier.\")\n return False\n else:\n print(\n f\"ERROR: {view_file} does not appear to be \"\n \"a CREATE OR REPLACE VIEW statement! Quitting...\"\n )\n return False\n return True", "def exists(self, key_name: str) -> bool:\n pass", "def exists(self, name):\n try:\n self.container.get_object(name)\n return True\n except NoSuchObject:\n return False", "def test_view_url_exists(self):\n response = self.client.get('/details/' + str(self.s.id))\n response2 = self.client.get(reverse('details', args=(self.s.id,)))\n self.assertEqual(response.status_code, 200)\n self.assertEqual(response2.status_code, 200)\n self.assertTemplateUsed(response2, 'notifications/details.html')", "def is_viewvc(self):\n if self.path == '/' + options.script_alias:\n return 1\n if self.path[:len(options.script_alias)+2] == \\\n '/' + options.script_alias + '/':\n return 1\n if self.path[:len(options.script_alias)+2] == \\\n '/' + options.script_alias + '?':\n return 1\n return 0", "def has_permission(self, request, view):\n user = request.user\n try:\n user.user_client\n return True\n except Exception:\n return False", "def exists(self) -> bool:\n return self._file_exists()", "def exists(self, name):\n return self.backend.exists(name)", "def test_view_url_exists(self):\n response = self.client.get('')\n response2 = self.client.get(reverse('index'))\n self.assertEqual(response.status_code, 200)\n self.assertEqual(response2.status_code, 200)\n self.assertTemplateUsed(response2, 'notifications/index.html')", "def exists(_env):\n return True", "def exists(_env):\n return True", "def exists(self, path):", "def exists(self):\n # TODO: What about broken sym-links?\n return os.path.exists(self.path)", "def exists (self, uuid):\n return self.read (uuid) is not None", "def IsVisibleInView(object_id, view=None):\n rhobj = rhutil.coercerhinoobject(object_id, True, True)\n viewport = __viewhelper(view).MainViewport\n bbox = rhobj.Geometry.GetBoundingBox(True)\n return rhobj.Visible and viewport.IsVisible(bbox)", "def exists(path: str) -> bool:\n pass", "def _has(self, key):\n path = self._get_key_path(key)\n return exists(path)", "def exists(self):\r\n try:\r\n self.refresh()\r\n except:\r\n return False\r\n return True", "def test_view_url_exists_at_desired_location(self):\n response = self.client.get('')\n self.assertEqual(response.status_code, 200)", "def named_view(self, name):\n\t\tfor view in self.views:\n\t\t\tif view.name == name:\n\t\t\t\treturn view\n\t\treturn None", "def has(self, tag, index):\n return self.get(tag, index) is not None", "def has_permission(self, request, view):\n return has_permission(request.user, Permissions.CAN_ADVANCE_SEARCH)", "def has_permission(self, request, view):\n if request.user.is_authenticated():\n return True\n return False", "def __contains__(self, item):\n\n if self.is_view:\n return item in self._view\n return item in self._storage", "def node_exists(self, node_name: str) -> bool:\r\n return self.get_authentic_node_name(node_name) is not None", "def _has_template(self, target):\n\t\tif target.has_key('use'):\n\t\t\treturn True\n\t\telse:\n\t\t\treturn None", "def has_permission(self, request, view):\n usuario = request.user\n return str(usuario) == \"AnonymousUser\"", "def objExists(*args, **kwargs)->bool:\n pass", "def exists(self, path):\n return self.dir_exists(path) or self.file_exists(path)", "def is_exists(self):\n\n return os.path.isfile(os.path.join(self.scripts_dir, self.python_name))", "def has(self, key):\n return self.collection.find_one({'_id': key}) is not None", "def exists(self, name):\n return self.endpoint.exists(name)", "def path_exists(path):\r\n return os.path.exists(path)", "def definition_exists(name: str) -> bool:\n try:\n return bool(lookup_definition(name))\n except:\n return False", "def has_permission(self, request, view):\n usuario = request.user\n return str(usuario.grupo) == \"Vendedor\"", "def exists (self, db):\n return hasattr(self, db) and isinstance(getattr(self, db), Database)", "def path_exists(path):\n return os.path.exists(path)", "async def exists(self, tag_name):\n try:\n if await self.get_id(tag_name):\n return True\n except RtbDoesntExists:\n return False", "def exists(self, destination: Route) -> bool:\n i = hash(destination.addr)\n return i in self.keys()", "def exists(self):\n\n if self:\n pass", "def ResourceExists(resource_name, search_user_paths=True):\n try:\n ResourcePath(resource_name, search_user_paths)\n return True\n except ResourceNotFound:\n return False", "def exists(self, obj):\n return False", "def exist(self, table: str, libref: str =\"\") -> bool:\n code = 'data _null_; e = exist(\"'\n if len(libref):\n code += libref+\".\"\n code += \"'\"+table.strip()+\"'n\"+'\"'+\");\\n\"\n code += 'v = exist(\"'\n if len(libref):\n code += libref+\".\"\n code += \"'\"+table.strip()+\"'n\"+'\"'+\", 'VIEW');\\n if e or v then e = 1;\\n\"\n code += \"te='TABLE_EXISTS='; put te e;run;\\n\"\n\n ll = self.submit(code, \"text\")\n\n l2 = ll['LOG'].rpartition(\"TABLE_EXISTS= \")\n l2 = l2[2].partition(\"\\n\")\n exists = int(l2[0])\n\n return bool(exists)", "def has_permission(self, request, view):\n if not request.resource:\n return False\n\n return Classroom.objects.filter(\n pk=view.get_related_classroom_id(), playlist_id=request.resource.id\n ).exists()", "def exists(self, answer):\n return self.find(answer) is not None", "def db_exists(self, db):\n # HDF5 is file based\n return os.path.isfile(db)", "def test_results_exists(self):\n name_exists = 'results' in self.views_module_listing\n is_callable = callable(self.views_module.results)\n \n self.assertTrue(name_exists, f\"{FAILURE_HEADER}results() view does not exist{FAILURE_FOOTER}\")\n self.assertTrue(is_callable, f\"{FAILURE_HEADER}results() function does not exist or will not execute{FAILURE_FOOTER}\")", "def exists(self):\n if self._exists is None:\n self._exists = \\\n bool(\n self.oxdb.get_executed_cursor(\n EXISTS, self.variable_name).fetchone())\n\n return self._exists", "def exists(self):\n return Path(self.model_dir).exists()", "def exists(self) -> bool:\n self.connection.describe_activity_type(self.domain.name, self.name, self.version)\n return True", "def has_key(self, key):\n return self.__dict__.has_key(key)", "def exists(self):\n return self._repository is not None", "def exists(path):\n return os.path.exists(path)", "def can_access(self, permission_name: str, view_name: str) -> bool:\n\n user = g.user\n if user.is_anonymous:\n return self.is_item_public(permission_name, view_name)\n return self._has_view_access(user, permission_name, view_name)", "def exists(self):\n return bool(get_zone_by_name(self.get_name(refresh=False)))" ]
[ "0.8074527", "0.70116293", "0.68193936", "0.66889185", "0.660105", "0.6589236", "0.6287463", "0.6287463", "0.6283923", "0.6276442", "0.62659264", "0.6244167", "0.623865", "0.6238351", "0.6232471", "0.6214478", "0.61842877", "0.6141818", "0.61366314", "0.6128516", "0.612039", "0.6117486", "0.6086874", "0.6066519", "0.6056968", "0.6037086", "0.60223305", "0.59939903", "0.59724456", "0.5956503", "0.5950527", "0.5944642", "0.5931964", "0.5929768", "0.5918808", "0.59092784", "0.59036475", "0.58734226", "0.58720636", "0.5870851", "0.5867123", "0.5860575", "0.5859553", "0.58575696", "0.58515656", "0.584262", "0.5841612", "0.5835419", "0.58172125", "0.5814228", "0.58092487", "0.5803882", "0.5796553", "0.5793565", "0.5792601", "0.5792601", "0.57888263", "0.57875246", "0.57860655", "0.57830155", "0.57693505", "0.5768343", "0.5759227", "0.57548845", "0.57466084", "0.5745962", "0.57403314", "0.5731955", "0.5731314", "0.57203245", "0.5719698", "0.5713433", "0.57132274", "0.570654", "0.5705339", "0.5701377", "0.57007235", "0.56912833", "0.5690833", "0.5686461", "0.56778425", "0.5677353", "0.5676394", "0.56697065", "0.56660026", "0.5663643", "0.56589246", "0.56528866", "0.5651702", "0.56472033", "0.5645321", "0.56442827", "0.5639531", "0.56207556", "0.5619996", "0.5611758", "0.56067336", "0.5592604", "0.55865353", "0.5585722" ]
0.76935965
1
Creates a View, defined by XML in view_xml_filename. If the file exists, it will be update using the provided definition.
def set_view(self, view, view_xml_filename): if self.view_exists(view): command = PlatformJenkinsJavaCLI.UPDATE_VIEW else: command = PlatformJenkinsJavaCLI.CREATE_VIEW with open(view_xml_filename) as view_xml_file: view_xml = view_xml_file.read() call = subprocess.Popen(self.cli + [command, view], stdin=subprocess.PIPE) call.communicate(view_xml) call.wait()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_view(name, fields=''):\n if '/' in name:\n blueprint_name, model_name = name.split('/')\n output_file = 'blueprints/%s/views.py' % blueprint_name\n else:\n model_name = name\n output_file = 'views.py'\n file_exists = os.path.exists(output_file)\n form_data = []\n for f in fields.split():\n form_data.append('form.%s.data' % f.split(':')[0])\n views = create_view.views_scaffold % dict(name=model_name.lower(),\n model_name=model_name.capitalize(),\n form_data=', '.join(form_data))\n with open(output_file, 'a') as out_file:\n if not file_exists:\n views = '''%(imports)s\\n%(rest)s''' % dict(imports=create_view.imports,\n rest=views)\n out_file.write(views)\n create_templates(name, fields)", "def create_view(self, view_name='', description='', fields=None, order=None, filters=''):\n res, _ = self.clients.resource_registry.find_resources(name=view_name, id_only=True)\n if len(res) > 0:\n raise BadRequest('The view resource with name: %s, already exists.' % view_name)\n\n #======================\n # Arg Validations\n #======================\n validate_is_instance(fields,list, 'Specified fields must be a list.')\n validate_true(len(fields)>0, 'Specfied fields must be a list.')\n if order is not None:\n validate_is_instance(order,list, 'Specified order must be a list of fields')\n for field in order:\n if not field in fields:\n raise BadRequest('The specified ordering field was not part of the search fields.')\n\n fields = set(fields) # Convert fields to a set for aggregation across the catalogs\n #======================================================================================================\n # Priorty Queue Index Matching\n #======================================================================================================\n\n pq = [] # Priority queue for matching\n catalog_id = None\n catalogs, _ = self.clients.resource_registry.find_resources(restype=RT.Catalog, id_only=False)\n for catalog in catalogs:\n if set(catalog.catalog_fields).issubset(fields):\n index_num = len(self.clients.catalog_management.list_indexes(catalog._id))\n heapq.heappush(pq, (index_num,catalog))\n if pq:\n weight, catalog = heapq.heappop(pq)\n if weight < self.heuristic_cutoff:\n catalog_id = catalog._id\n\n \n if catalog_id is None:\n catalog_id = self.clients.catalog_management.create_catalog('%s_catalog'% view_name, keywords=list(fields))\n\n view_res = View(name=view_name, description=description)\n view_res.order = order\n view_res.filters = filters\n view_id, _ = self.clients.resource_registry.create(view_res)\n self.clients.resource_registry.create_association(subject=view_id, predicate=PRED.hasCatalog,object=catalog_id)\n return view_id", "def createViews(views):\n ...", "def newEditorView(self, fn, caller, filetype=\"\", indexes=None):\n editor, assembly = self.cloneEditor(caller, filetype, fn)\n \n self._addView(assembly, fn, caller.getNoName(), indexes=indexes)\n self._modificationStatusChanged(editor.isModified(), editor)\n self._checkActions(editor)\n \n return editor", "def create_view(self, repo, view, sql):\n return self.user_con.create_view(\n repo=repo, view=view, sql=sql)", "def __newDocumentView(self):\n aw = self.activeWindow()\n if aw:\n self.newEditorView(aw.getFileName(), aw, aw.getFileType())", "def view(view_name, db=None, empty_ok=True, **kwargs):\n db_name = DEFAULT_DATABASE if db is None else db\n db = dbs[db_name]\n name = '%s%s/%s' % (db_name, DESIGN_DOC_SUFFIX, view_name)\n if empty_ok:\n return _view_empty_ok(db, name, kwargs)\n return db.view(name, **kwargs)", "def view(self, view_id):\r\n return resources.View(self, view_id)", "def _trait_view ( cls, name, view_element, default_name, view_elements,\n editable_traits ):\n # If a view element was passed instead of a name or None, return it:\n if isinstance( name, ViewElement ):\n return name\n\n # Get the ViewElements object associated with the class:\n view_elements = view_elements()\n\n # The following test should only succeed for objects created before\n # traits has been fully initialized (such as the default Handler):\n if view_elements is None:\n return None\n\n if name:\n if view_element is None:\n # If only a name was specified, return the ViewElement it\n # matches, if any:\n return view_elements.find( name )\n\n # Otherwise, save the specified ViewElement under the name\n # specified:\n view_elements.content[ name ] = view_element\n return\n\n # Get the default view/view name:\n name = default_name()\n\n # If the default is a View, return it:\n if isinstance( name, ViewElement ):\n return name\n\n # Otherwise, get all View objects associated with the object's class:\n names = view_elements.filter_by()\n\n # If the specified default name is in the list, return its View:\n if name in names:\n return view_elements.find( name )\n\n # If there is only one View, return it:\n if len( names ) == 1:\n return view_elements.find( names[0] )\n\n # Otherwise, create and return a View based on the set of editable\n # traits defined for the object:\n from enthought.traits.ui.api import View\n return View( editable_traits() )", "def creates_view(self):\n return self.statements[0].creates_view()", "def add_view(self, schema, create=True):\n if not constants.NAME_RX.match(schema[\"name\"]):\n raise ValueError(\"invalid view name\")\n if utils.name_in_nocase(schema[\"name\"], self.db[\"tables\"]):\n raise ValueError(\"name is already in use for a table\")\n if utils.name_in_nocase(schema[\"name\"], self.db[\"views\"]):\n raise ValueError(\"name is already in use for a view\")\n if create:\n sql = 'CREATE VIEW \"%s\" AS %s' % (\n schema[\"name\"],\n dbshare.query.get_sql_statement(schema[\"query\"]),\n )\n self.dbcnx.execute(sql)\n cursor = self.dbcnx.cursor()\n try:\n sql = 'PRAGMA table_info(\"%s\")' % schema[\"name\"]\n cursor.execute(sql)\n except sqlite3.Error: # Invalid view\n sql = 'DROP VIEW \"%s\"' % schema[\"name\"]\n cursor.execute(sql)\n raise ValueError(\"invalid view; maybe non-existent column?\")\n # Source names considering quotes and disregarding AS part, if any.\n schema[\"sources\"] = dbshare.query.get_from_sources(schema[\"query\"][\"from\"])\n schema[\"columns\"] = [{\"name\": row[1], \"type\": row[2]} for row in cursor]\n sql = \"INSERT INTO %s (name, schema) VALUES (?,?)\" % constants.VIEWS\n with self.dbcnx:\n self.dbcnx.execute(sql, (schema[\"name\"], json.dumps(schema)))\n self.db[\"views\"][schema[\"name\"]] = schema", "def register_view(self, viewfunc, url_rule=None) :\n\n\t\tviewid = View.parse_id(viewfunc, self.settings.VIEW_ROOT)\n\t\t\n\t\tif viewid not in self.views :\n\t\t\t# Add view if not exists\n\t\t\tv = View(\n\t\t\t\tid = viewid,\n\t\t\t\tviewfunc = viewfunc,\n\t\t\t\turl_rule = url_rule,\n\t\t\t)\n\t\t\tself.views[viewid] = v\n\n\t\telse :\n\t\t\t# Update view if exists\n\t\t\tv = self.views[viewid]\n\t\t\tv.viewfunc = viewfunc\n\n\t\t\tif url_rule is not None :\n\t\t\t\tv.url_rule = url_rule\n\n\t\treturn v", "def _create_or_alter_view(self, survey_data):\n self.log.info(\"Creating or altering view vw_AllSurveyData \")\n edit_view = self._get_query('edit_view') + \"( \" + survey_data + \" )\"\n self.db.execute_query(edit_view)\n self.log.info(\"View was edited successfully\")", "def addViewToDb(self,name):\n\t\tsql = \"INSERT INTO hudson_views(viewname) VALUES (%s)\"\n\t\tcsr = self.db.cursor()\n\t\tcsr.execute(sql,[name])", "def make_view(app, view_class=View, view_name='View', **kwargs):\n kwargs.update({'__app__': app})\n return type(view_name, (view_class, ), kwargs)", "def test_migrate_view_fields(self):\n self.test_view = RecordView.create(\n self.testcoll, test_view_id, test_view_create_values\n )\n migrate_coll_data(self.testcoll)\n # Read field definition and check for inline field list\n view_data = self.check_entity_values(\n \"_view\", test_view_id, check_values=test_view_migrated_values\n )\n return", "def _create_from_template(self):\n template_file = self._helper._get_template_file_path()\n self._engine.open_file_by_path(template_file)\n self._save_current_as_new()", "def saveViews(lib, filename='views', path=os.path.expanduser('~')):\n ext = '.camera'\n os.chdir(path)\n f = open(filename + ext, 'wb')\n pickle.dump(lib, f, pickle.HIGHEST_PROTOCOL)\n f.close()", "def create_or_replace_view(self, relation) -> None:\n database = self.quoted(self._correct_case(relation.database))\n schema = self.quoted(self._correct_case(relation.schema))\n ddl_statement = f\"\"\"CREATE OR REPLACE VIEW\n{self.quoted_dot_notation(relation)}\nAS\n{relation.view_ddl}\n\"\"\"\n engine = self.get_connection(database_override=database,\n schema_override=schema)\n try:\n engine.execute(ddl_statement)\n except Exception as exc:\n logger.info(\"Failed to create %s %s:%s\", relation.materialization.name,\n self.quoted_dot_notation(relation),\n exc)\n raise exc\n logger.info('Created relation %s', self.quoted_dot_notation(relation))", "def render(self, filename: str, view: bool = False) -> None:\n self._dot.render(filename, view=view)", "def validate_view_naming(view_file):\n parsed = sqlparse.parse(view_file.read_text())[0]\n tokens = [\n t\n for t in parsed.tokens\n if not (t.is_whitespace or isinstance(t, sqlparse.sql.Comment))\n ]\n is_view_statement = (\n \" \".join(tokens[0].normalized.split()) == \"CREATE OR REPLACE\"\n and tokens[1].normalized == \"VIEW\"\n )\n if is_view_statement:\n target_view = str(tokens[2]).strip().split()[0]\n try:\n [project_id, dataset_id, view_id] = target_view.replace(\"`\", \"\").split(\".\")\n if not (\n view_file.parent.name == view_id\n and view_file.parent.parent.name == dataset_id\n and view_file.parent.parent.parent.name == project_id\n ):\n print(\n f\"{view_file} ERROR\\n\"\n f\"View name {target_view} not matching directory structure.\"\n )\n return False\n except Exception:\n print(f\"{view_file} ERROR\\n{target_view} missing project ID qualifier.\")\n return False\n else:\n print(\n f\"ERROR: {view_file} does not appear to be \"\n \"a CREATE OR REPLACE VIEW statement! Quitting...\"\n )\n return False\n return True", "def mkview(self,\n context=[],\n viewobj=None):\n if viewobj == None:\n raise ValueError, \"mkview: viewobj is None\"\n return jsoncall.do_call(\"mkview\", {'modelname':self.modelname,\\\n 'user':self.user,\\\n 'password':self.password,\\\n 'context':context,\\\n 'viewobj':viewobj.__dict__},\n self.connection)", "def _create_view(self, view, schema=None, config=None):\n viewname, vschema = view[\"__tablename__\"].split(' ')[0], view[\"__schema__\"].split(' ')[0]\n try:\n dve = SQL('NULL from {}.{}').format(Identifier(vschema),\n Identifier(viewname))\n veq = self.__session.query(self._sql_to_string(dve)).limit(1)\n self.__session.execute(veq)\n self._commit()\n except ProgrammingError:\n self._rollback()\n like = text(\"information_schema.routines.routine_name like 'crosstab%'\")\n count = self.__session.query('* FROM information_schema.routines')\n count = count.filter(like).count()\n if int(count) == 0:\n self._create_extension(config)\n self.exschema = 'public'\n else:\n like = text(\"information_schema.routines.routine_name like 'crosstab%'\")\n count = self.__session.query('routine_schema FROM'\n ' information_schema.routines')\n count = count.filter(like).limit(1)\n count = self.__session.execute(count).fetchone()[0]\n self._commit()\n self.exschema = count\n like = text(\"SELECT has_schema_privilege(:exschema, 'USAGE')\")\n like = self.__session.execute(like,\n {\"exschema\": self.exschema}).fetchone()[0]\n self._commit()\n if not like:\n self._grant_access(config)\n viewst, raw = self._sql_to_string(view[\"__statement__\"]), '{}.crosstab'\n defsch = self._sql_to_string(SQL(raw).format(Identifier(schema)))\n exsch = SQL(raw).format(Identifier(self.exschema))\n self.__session.execute(viewst.replace(defsch, self._sql_to_string(exsch)))\n self._commit()\n except Exception:\n self._rollback()\n self._reset_session()\n raise", "def view(name, selectable, *, clear: bool = False):\n log.debug('view(%r, clear=%r)', name, clear)\n\n if clear:\n DDL[name] = None, None\n return None\n\n DDL[name] = (CreateView(name, selectable),\n DropView(name))\n\n return make_table(selectable, name=name)", "def DEADcreate_v_fix_view():\n sql_view = \"\"\"create or replace view v_fix as\n SELECT \n fix.fix_ident, \n fix.fix_center,\n ST_Y(ST_Transform(fix.fix_center, 4326)) as fix_lat84,\n ST_X(ST_Transform(fix.fix_center, 4326)) as fix_lon84\n \n FROM \n fix\"\"\"\n conf.Cur.execute(sql_view)\n conf.Con.commit()", "def view(self, viewname, **data):\n view = self.mylookup.get_template(viewname + '.mako').render(**data)\n \n self.res.status = 202\n self.res.content_type = 'text/html'\n self.res.content_length = len(view)\n \n self.start_response(self.res.status, self.res.headerlist)\n return view", "def create_revision(self, model_definition_uid):\n WMLResource._chk_and_block_create_update_for_python36(self)\n\n if self._client.ICP_30 is None and not self._client.CLOUD_PLATFORM_SPACES and not self._client.ICP_PLATFORM_SPACES:\n raise WMLClientError(\n u'Revisions APIs are not supported in this release.')\n\n self._client._check_if_either_is_set()\n\n model_defn_id = str_type_conv(model_definition_uid)\n ModelDefinition._validate_type(model_defn_id, u'model_defn_id', STR_TYPE, True)\n\n print(\"Creating model_definition revision...\")\n\n # return self._get_required_element_from_response(\n # self._create_revision_artifact_for_assets(model_defn_id, 'Model definition'))\n\n response = self._get_required_element_from_response(\n self._create_revision_artifact_for_assets(model_defn_id, 'Model definition'))\n\n if not self._client.CLOUD_PLATFORM_SPACES and not self._client.ICP_PLATFORM_SPACES:\n return response\n else:\n entity = response[u'entity']\n\n try:\n del entity[u'wml_model_definition'][u'ml_version']\n except KeyError:\n pass\n\n final_response = {\n \"metadata\": response[u'metadata'],\n \"entity\": entity\n }\n\n return final_response", "def create_view(self):\n title_label = Label(self, text='Upload, Preview, Describe and Visualize',\n fg='blue', font=('Arial', 16))\n title_label.pack(fill=BOTH, expand=True)\n select_file_button = Button(self, background='White', text='Select Data File [.csv, .xlsx, .xls, .json, .txt]',\n command=self.start_upload)\n select_file_button.pack(padx=5, pady=10)", "def loadViews(filename='views.camera', path=os.path.expanduser('~')):\n os.chdir(path)\n with open(filename, 'rb') as f:\n return pickle.load(f)", "def create(cls, xml):\n raise Exception('Not Implemented Yet')", "def save_xml(self, filename):\n if \".xml\" not in filename:\n filename = filename + \".xml\"\n\n shutil.copyfile(self.env.model_file, filename)", "def view(self, view_name, schema=None, wrapper=None, **params):\n\n if view_name.startswith('/'):\n view_name = view_name[1:]\n if view_name == '_all_docs':\n view_path = view_name\n elif view_name == '_all_docs_by_seq':\n view_path = view_name\n else:\n view_name = view_name.split('/')\n dname = view_name.pop(0)\n vname = '/'.join(view_name)\n view_path = '_design/%s/_view/%s' % (dname, vname)\n\n return ViewResults(self.raw_view, view_path, wrapper, schema, params)", "def _add_view(self, window, view):\r\n\r\n # If no 'relative_to' is specified then the view is positioned\r\n # relative to the editor area.\r\n if len(view.relative_to) > 0:\r\n relative_to = window.get_view_by_id(view.relative_to)\r\n \r\n else:\r\n relative_to = None\r\n\r\n # Add the view to the window.\r\n window.add_view(\r\n view, view.position, relative_to, (view.width, view.height)\r\n )\r\n\r\n return", "def SaveView(view_name):\n\n # Start the RoboDK API\n RDK = robolink.Robolink()\n\n # Get the 3D view pose\n vp = RDK.ViewPose()\n\n # Convert to a string as XYZABC\n vp_str = str(robomath.Pose_2_KUKA(vp))\n\n # Save it as a station parameter (saved with the RDK file)\n RDK.setParam(view_name, vp_str)\n\n RDK.ShowMessage(\"Current view point saved: \" + vp_str, False)", "def updateViewsFromFile(self, id, viewsFile):\n\n kparams = KalturaParams()\n kparams.addIntIfDefined(\"id\", id);\n kfiles = {\"viewsFile\": viewsFile}\n self.client.queueServiceActionCall(\"metadata_metadataprofile\", \"updateViewsFromFile\", \"KalturaMetadataProfile\", kparams, kfiles)\n if self.client.isMultiRequest():\n return self.client.getMultiRequestResult()\n resultNode = self.client.doQueue()\n return KalturaObjectFactory.create(resultNode, 'KalturaMetadataProfile')", "def from_file(self, xml_filepath: str) -> None:\n\t\t# Set filename and get root element of the tree\n\t\txml_filelist = xml_filepath.split(\"/\")\n\t\tself.xml_dir = xml_filelist[0]\n\t\tself.xml_name = xml_filelist[1]\n\t\troot: ET.Element = get_xml_file(xml_filepath)\n\t\t# Set name\n\t\tself.name = root.tag\n\n\t\t# Iterate over and add child nodes\n\t\tchild: ET.Element\n\t\tfor child in root:\n\t\t\t# Determine if child is a SubNode or a Node\n\t\t\t# If child has children or attributes it is a Node\n\t\t\tif len(child) or len(child.attrib):\n\t\t\t\t# Add Node\n\t\t\t\tself.add_node(Node(child))\n\t\t\telse:\n\t\t\t\tself.add_subnode(SubNode(child))", "def view(self) -> 'outputs.ViewDefinitionResponse':\n return pulumi.get(self, \"view\")", "def view(filepath, quiet: bool = False) -> None:\n try:\n view_func = getattr(view, PLATFORM)\n except AttributeError:\n raise RuntimeError(f'platform {PLATFORM!r} not supported')\n view_func(filepath, quiet=quiet)", "def document_view(self, doc_type, view, path, is_zip=False, **kwargs):\n return self.get('fileops/documentView', api='CONV', params={\n 'root': self.root,\n 'path': path,\n 'type': doc_type,\n 'view': view,\n 'zip': 1 if is_zip else 0,\n }, **kwargs)", "def convert_xml_to_rst(src_file, dst_file, template_file, author, maintainer,\n version=None, committer=None):\n template = Template(_read_file(template_file))\n try:\n with open(src_file, 'r') as f:\n e = objectify.parse(f).getroot()\n template_string = template.render(e=e, author=author, maintainer=maintainer,\n version=version, committer=committer)\n _write_template_to_file(dst_file, template_string)\n except (TemplateError, LxmlError, OSError, IOError) as e:\n print(e)", "def open( self, filename ):\r\n #http://www.oooforum.org/forum/viewtopic.phtml?t=35344\r\n properties = []\r\n properties.append( OpenOfficeDocument._makeProperty( 'Hidden', True ) ) \r\n properties = tuple( properties )\r\n self.oodocument = self.openoffice.loadComponentFromURL( uno.systemPathToFileUrl( os.path.abspath( filename ) ), \"_blank\", 0, properties )", "def register_view( self, target, view ):\n skins = getToolByName( target, 'portal_skins', None )\n write = self.stream.write\n\n if skins._getOb( view, None ) is not None:\n write( \"Failed to register view '%s' (already exists)\\n\" % view )\n return view\n\n found = 0\n dw_path = os.path.join( minimalpath(package_home( globals() )), *view.split('/') )\n dw_path = re.sub(r'\\\\', r'/', dw_path)\n\n for dir_path in DirectoryView.manage_listAvailableDirectories():\n if dir_path.endswith( dw_path ):\n found = 1\n break\n\n if not found:\n write( \"Failed to register view '%s' (directory not found)\\n\" % view )\n return view\n\n # TODO: handle paths better\n dw_path = dw_path.replace( '\\\\', '/' )\n DirectoryView.manage_addDirectoryView( skins, dw_path )\n write( \"Registered view '%s' = '%s'\\n\" % ( view, dw_path ) )\n\n return view", "def draw_design(self, dxfversion=None):\n\n if self.file == None:\n raise Exception(\"No file name given. Use design.file to set name.\")\n \n if dxfversion is None:\n self.drawing = ezdxf.new()\n else:\n self.drawing = ezdxf.new(dxfversion=dxfversion)\n self.msp = self.drawing.modelspace()\n \n for x in self.layers:\n self.drawing.layers.add(self.layers[x]['name'], color=self.layers[x]['color'])\n\n for x in self.features:\n self.add_polyline(self.layers[self.features[x].layer],self.features[x].coord,\n self.features[x].open)\n \n self.drawing.saveas(self.file)", "def create_view(self, start: int = 0, stop: int = 0):\n stmt = f\"\"\"create or replace view {self._view_name} as {self.qry}\"\"\"\n if start != 0 or stop != 0:\n sql = stmt + f\" limit {stop} offset {start}\"\n else:\n sql = stmt\n self.execquery(sql)", "def open_document(filepath, show=True):\n\t\n\tk = krita.Krita.instance()\n\tprint('Debug: opening %s' % filepath)\n\tdoc = k.openDocument(filepath)\n\tif show:\n\t\tApplication.activeWindow().addView(doc)\n\treturn doc", "def _addView(self, win, fn=None, noName=\"\", addNext=False, indexes=None):\n raise RuntimeError('Not implemented')", "def relaxng(view_type):\n if view_type not in _relaxng_cache:\n with tools.file_open(os.path.join('base', 'rng', '%s_view.rng' % view_type)) as frng:\n try:\n relaxng_doc = etree.parse(frng)\n _relaxng_cache[view_type] = etree.RelaxNG(relaxng_doc)\n except Exception:\n _logger.exception('Failed to load RelaxNG XML schema for views validation')\n _relaxng_cache[view_type] = None\n return _relaxng_cache[view_type]", "def viewfactory(self):\n raise NotImplementedError()", "def create_version(self, task, take_name):\n # just renew the scene\n v = Version(task=task, take_name=take_name)\n v.update_paths()\n\n DBSession.add(v)\n DBSession.commit()\n\n # create a file\n try:\n os.makedirs(os.path.dirname(v.absolute_full_path))\n except OSError: # dir exists\n pass\n\n with open(v.absolute_full_path, 'w+'):\n pass\n\n return v", "def from_xml(cls, xml_data, system, id_generator):\r\n\r\n xml_object = etree.fromstring(xml_data)\r\n # VS[compat] -- just have the url_name lookup, once translation is done\r\n url_name = xml_object.get('url_name', xml_object.get('slug'))\r\n def_id = id_generator.create_definition(xml_object.tag, url_name)\r\n usage_id = id_generator.create_usage(def_id)\r\n\r\n # VS[compat] -- detect new-style each-in-a-file mode\r\n if is_pointer_tag(xml_object):\r\n # new style:\r\n # read the actual definition file--named using url_name.replace(':','/')\r\n filepath = cls._format_filepath(xml_object.tag, name_to_pathname(url_name))\r\n definition_xml = cls.load_file(filepath, system.resources_fs, def_id)\r\n else:\r\n definition_xml = xml_object\r\n filepath = None\r\n\r\n definition, children = cls.load_definition(definition_xml, system, def_id) # note this removes metadata\r\n\r\n # VS[compat] -- make Ike's github preview links work in both old and\r\n # new file layouts\r\n if is_pointer_tag(xml_object):\r\n # new style -- contents actually at filepath\r\n definition['filename'] = [filepath, filepath]\r\n\r\n metadata = cls.load_metadata(definition_xml)\r\n\r\n # move definition metadata into dict\r\n dmdata = definition.get('definition_metadata', '')\r\n if dmdata:\r\n metadata['definition_metadata_raw'] = dmdata\r\n try:\r\n metadata.update(json.loads(dmdata))\r\n except Exception as err:\r\n log.debug('Error in loading metadata %r', dmdata, exc_info=True)\r\n metadata['definition_metadata_err'] = str(err)\r\n\r\n # Set/override any metadata specified by policy\r\n cls.apply_policy(metadata, system.get_policy(usage_id))\r\n\r\n field_data = {}\r\n field_data.update(metadata)\r\n field_data.update(definition)\r\n field_data['children'] = children\r\n\r\n field_data['xml_attributes']['filename'] = definition.get('filename', ['', None]) # for git link\r\n kvs = InheritanceKeyValueStore(initial_values=field_data)\r\n field_data = KvsFieldData(kvs)\r\n\r\n return system.construct_xblock_from_class(\r\n cls,\r\n # We're loading a descriptor, so student_id is meaningless\r\n ScopeIds(None, xml_object.tag, def_id, usage_id),\r\n field_data,\r\n )", "def test_adding_dynamic_view(empty_model):\n viewset = ViewSet(model=empty_model)\n view = viewset.create_dynamic_view(key=\"dyn1\", description=\"test\")\n assert view.model is empty_model\n assert view.get_viewset() is viewset\n assert view.description == \"test\"\n assert view in viewset.dynamic_views", "def view_doc(request):\n cfg = request.cfg\n document = request.where\n filename = cfg.path(os.path.join(cfg.options.template_dir, \"docroot\", document))\n\n # Stat the file to get content length and last-modified date.\n try:\n info = os.stat(filename)\n except OSError as v:\n raise ViewVCException(\n 'Static file \"%s\" not available (%s)' % (document, str(v)), \"404 Not Found\"\n )\n content_length = str(info[stat.ST_SIZE])\n last_modified = info[stat.ST_MTIME]\n\n # content_length + mtime makes a pretty good etag.\n if check_freshness(request, last_modified, \"%s-%s\" % (content_length, last_modified)):\n return\n\n try:\n fp = open(filename, \"rb\")\n except IOError as v:\n raise ViewVCException(\n 'Static file \"%s\" not available (%s)' % (document, str(v)), \"404 Not Found\"\n )\n\n if document[-3:] == \"png\":\n mime_type = \"image/png\"\n elif document[-3:] == \"jpg\":\n mime_type = \"image/jpeg\"\n elif document[-3:] == \"gif\":\n mime_type = \"image/gif\"\n elif document[-3:] == \"css\":\n mime_type = \"text/css\"\n else: # assume HTML\n mime_type = None\n copy_stream(fp, get_writeready_server_file(request, mime_type, content_length=content_length))\n fp.close()", "def _showView(self, win, fn=None):\n raise RuntimeError('Not implemented')", "def export_to_file(self, filename):\n if len(filename.split(\".\")) == 1:\n filename += \".xml\"\n xmlstring = self._dommodel.toprettyxml(\" \", \"\\n\")\n with open(filename, \"w\") as f:\n f.write(xmlstring)", "def configure_traits ( self, filename = None, view = None,\n kind = None, edit = True,\n context = None, handler = None,\n id = '', scrollable = None, **args ):\n ### JMS: Is it correct to assume that nonmodel options for 'kind'\n ### behave modally when called from this method?\n if filename is not None:\n fd = None\n try:\n import cPickle\n fd = open( filename, 'rb' )\n self.copy_traits( cPickle.Unpickler( fd ).load() )\n except:\n if fd is not None:\n fd.close()\n\n if edit:\n from enthought.traits.ui.api import toolkit\n if context is None:\n context = self\n rc = toolkit().view_application( context, self.trait_view( view ),\n kind, handler, id, scrollable, args )\n if rc and (filename is not None):\n fd = None\n try:\n import cPickle\n fd = open( filename, 'wb' )\n cPickle.Pickler( fd, True ).dump( self )\n except:\n if fd is not None:\n fd.close()\n return rc\n\n return True", "def generateView(data):\n\n view = data[\"view\"]\n global h_include\n addInclude(h_include, view[\"type\"])\n global h_view\n h_view.append(\"class %s : public %s\" % (view[\"name\"], view[\"type\"]))\n h_view.append(\"{\")\n h_view.append(\"\tQ_OBJECT\")\n h_view.append(\"public:\")\n h_view.append(\"\t%s(QWidget *parent=0);\" % view[\"name\"])\n\n global c_include\n addInclude(c_include, \"QHeaderView\")\n\n global c_view\n c_view.append(\"%s::%s(QWidget *parent)\" % (view[\"name\"],view[\"name\"]))\n c_view.append(\"\\t: %s(parent)\" % view[\"type\"])\n c_view.append(\"{\")\n # TODO: should this be configurable?\n c_view.append(\"\tsetAlternatingRowColors(true);\")\n c_view.append(\"\tverticalHeader()->hide();\")\n c_view.append(\"\thorizontalHeader()->setResizeMode(QHeaderView::Stretch);\")\n c_view.append(\"\tsetTabKeyNavigation(false);\")\n c_view.append(\"\\tsetHorizontalScrollMode(QAbstractItemView::ScrollPerPixel);\")\n c_view.append(\"\\tsetVerticalScrollMode(QAbstractItemView::ScrollPerPixel);\")\n c_view.append(\"\")\n if get(view, \"sort\", True):\n c_view.append(\"\\tconnect(horizontalHeader(), SIGNAL(sortIndicatorChanged(int, Qt::SortOrder)),\")\n c_view.append(\"\\t SLOT(sortByColumn(int)) );\")\n c_view.append(\"\\tsetSortingEnabled(true);\")\n c_view.append(\"\\tsortByColumn(0, Qt::AscendingOrder);\")\n c_view.append(\"\\tsetEditTriggers(QAbstractItemView::AnyKeyPressed | QAbstractItemView::EditKeyPressed);\")\n if data.has_key(\"dialog\"):\n c_view.append(\"\\tconnect(this, SIGNAL(doubleClicked(const QModelIndex &)), SLOT(slotEdit(const QModelIndex &)) );\")\n c_view.append(\"}\\n\")\n\n if get(view, \"delete\") or get(view, \"insert\"):\n if data.has_key(\"container\"):\n generateViewInsertDelete(data)\n else:\n print \"Warning: cannot generate view inser/delete function without container\"\n\n if data.has_key(\"dialog\"):\n if data.has_key(\"container\"):\n generateViewSlotEdit(data)\n else:\n print \"Warning: cannot generate edit slot in view without container\"\n\n h_view.append(\"};\")", "def create(\n cls,\n draft_instrument_version,\n definition=None,\n implementation_context=None):\n\n raise NotImplementedError()", "def add_spec_view(\n config: Configurator,\n filepath: str,\n route: str = \"/openapi.yaml\",\n route_name: str = \"pyramid_openapi3.spec\",\n permission: str = NO_PERMISSION_REQUIRED,\n apiname: str = \"pyramid_openapi3\",\n) -> None:\n\n def register() -> None:\n settings = config.registry.settings.get(apiname)\n if settings and settings.get(\"spec\") is not None:\n raise ConfigurationError(\n \"Spec has already been configured. You may only call \"\n \"pyramid_openapi3_spec or pyramid_openapi3_spec_directory once\"\n )\n\n if hupper.is_active(): # pragma: no cover\n hupper.get_reloader().watch_files([filepath])\n spec_dict, _ = read_from_filename(filepath)\n\n validate_spec(spec_dict)\n spec = Spec.create(spec_dict)\n\n def spec_view(request: Request) -> FileResponse:\n return FileResponse(filepath, request=request, content_type=\"text/yaml\")\n\n config.add_route(route_name, route)\n config.add_view(route_name=route_name, permission=permission, view=spec_view)\n\n config.registry.settings[apiname] = _create_api_settings(\n config, filepath, route_name, spec\n )\n config.registry.settings.setdefault(\"pyramid_openapi3_apinames\", []).append(\n apiname\n )\n\n config.action((f\"{apiname}_spec\",), register, order=PHASE0_CONFIG)", "def from_file(filename, decomposer=None, rx=None, ax=None):\n from six.moves.cPickle import load\n with open(filename, 'rb') as f:\n data = load(f)\n return SOAPVector.from_dict(data, decomposer, rx, ax)", "def create_node(self, xmlnode):\n node = self.nf.get_node(xmlnode)\n # Check that the uri is valid\n check_uri(node.uri, self.sm, shouldExist = False)\n # Check for reserved URI\n if node.uri.endswith(AUTO): \n node.set_uri(generate_uri(node.uri))\n # Make sure capabilities is clear\n node.clear_capabilities()\n if isinstance(node, DataNode):\n # Add service views\n node.clear_accepts()\n for uris in SERVICE_VIEWS:\n node.add_accepts(uris)\n # Make sure provides is clear\n node.clear_provides()\n views = [set(PROVIDES_VIEWS[x]) for x in SERVICE_VIEWS]\n views = set.union(*views)\n for view in list(views):\n node.add_provides(view)\n # Make sure nodes is clear\n if isinstance(node, ContainerNode):\n node.clear_nodes()\n # Check properties\n for property in node.properties:\n if property in READ_ONLY_PROPERTIES: raise VOSpaceError(401, 'User does not have permissions to set a readonly property.', summary = PERMISSION_DENIED)\n # If container, create container\n location = get_location(node.uri)\n if isinstance(node, ContainerNode) and not os.path.exists(location): os.makedirs(location)\n # Store node\n self.sm.create_node(node.tostring(), node.uri, NODETYPES[node.TYPE], location = location) \n # Register properties\n self.sm.register_properties(node.uri, node.properties)\n return xmlnode", "def run_view():\n parser = ArgumentParser()\n parser.add_argument('name',nargs='?',default=None,help=\"Results file or directory with result files\")\n parser.add_argument('-x','--xunit',action='store_true',help=\"Save test results in the standard XUnit XML format\")\n parser.add_argument('-c','--cause',action='store_true',help=\"Print cause of fails and errors.\")\n parser.add_argument('-d','--details',action='store_true',help=\"Print details for fails and errors.\")\n #parser.add_argument('-o','--output',matavar='FILENAME',help=\"Save output to file.\")\n\n script_runner.run_view(parser.parse_args())", "def create(self, window):\r\n\r\n # Set the size of the editor area.\r\n if self.editor_area_size != (-1, -1):\r\n window.editor_area_size = self.editor_area_size\r\n\r\n # If the perspective has specific contents then add just those.\r\n if len(self.contents) > 0:\r\n self._add_contents(window, self.contents)\r\n\r\n # Otherwise, add all of the views defined in the window at their\r\n # default positions realtive to the editor area.\r\n else:\r\n self._add_all(window)\r\n\r\n # Activate the first view in every region.\r\n window.reset_views()\r\n \r\n return", "def write(self, filename):\n \n return self.model.write(filename,xml_declaration=True, encoding='utf-8')", "def fromxlsx(filename, sheet=None, range=None, **kwargs):\n \n return XLSXView(filename, sheet=sheet, range=range, **kwargs)", "def create_definition_from_data(self, new_def_data, category, user_id):\r\n new_def_data = self._serialize_fields(category, new_def_data)\r\n new_id = ObjectId()\r\n document = {\r\n '_id': new_id,\r\n \"category\" : category,\r\n \"fields\": new_def_data,\r\n \"edit_info\": {\r\n \"edited_by\": user_id,\r\n \"edited_on\": datetime.datetime.now(UTC),\r\n \"previous_version\": None,\r\n \"original_version\": new_id,\r\n },\r\n 'schema_version': self.SCHEMA_VERSION,\r\n }\r\n self.db_connection.insert_definition(document)\r\n definition_locator = DefinitionLocator(category, new_id)\r\n return definition_locator", "def add_explorer_view(\n config: Configurator,\n route: str = \"/docs/\",\n route_name: str = \"pyramid_openapi3.explorer\",\n template: str = \"static/index.html\",\n ui_version: str = \"4.18.3\",\n permission: str = NO_PERMISSION_REQUIRED,\n apiname: str = \"pyramid_openapi3\",\n proto_port: t.Optional[t.Tuple[str, int]] = None,\n) -> None:\n\n def register() -> None:\n resolved_template = AssetResolver().resolve(template)\n\n def explorer_view(request: Request) -> Response:\n settings = config.registry.settings\n if settings.get(apiname) is None:\n raise ConfigurationError(\n \"You need to call config.pyramid_openapi3_spec for the explorer \"\n \"to work.\"\n )\n with open(resolved_template.abspath()) as f:\n if proto_port:\n spec_url = request.route_url(\n settings[apiname][\"spec_route_name\"],\n _scheme=proto_port[0],\n _port=proto_port[1],\n )\n else:\n spec_url = request.route_url(settings[apiname][\"spec_route_name\"])\n\n template = Template(f.read())\n html = template.safe_substitute(\n ui_version=ui_version,\n spec_url=spec_url,\n )\n return Response(html)\n\n config.add_route(route_name, route)\n config.add_view(\n route_name=route_name, permission=permission, view=explorer_view\n )\n\n config.action((f\"{apiname}_add_explorer\",), register, order=PHASE0_CONFIG)", "def export_sqlite_views(self):\r\n # Gather the names of report views in the db\r\n SQL_TRAN.execute(\"SELECT name FROM sqlite_master WHERE type='view'\")\r\n view_names = SQL_TRAN.fetchall()\r\n\r\n # Export report views to tsv files\r\n for i in view_names:\r\n\r\n query = \"SELECT * FROM %s\" % (i[0])\r\n SQL_TRAN.execute(query)\r\n row = ' '\r\n # Get outfile to write to\r\n outfile = getattr(self, \"l_\" + i[0])\r\n row = SQL_TRAN.fetchone()\r\n if row is None:\r\n print(\" No records found in view {}. Nothing to export\".format(i[0]))\r\n outfile.close()\r\n os.remove(outfile.name)\r\n else:\r\n print(\" Exporting view {} from database\".format(i[0]))\r\n # For each row join using tab and output to file\r\n while row is not None:\r\n values = []\r\n try:\r\n for cell in row:\r\n if type(cell) is str or type(cell) is unicode:\r\n values.append(cell)\r\n else:\r\n values.append(unicode(cell))\r\n except:\r\n values.append(\"ERROR_IN_VALUE\")\r\n print(\"ERROR: \", row)\r\n m_row = u'\\t'.join(values)\r\n m_row = m_row + u'\\n'\r\n outfile.write(m_row.encode(\"utf-8\"))\r\n row = SQL_TRAN.fetchone()", "def run_view(self, expanded, unexpanded) :\n\t\treturn self.manage_view_properties(expanded, unexpanded, \"\", perms = \"View\")", "def newDoc(version):\n ret = libxml2mod.xmlNewDoc(version)\n if ret is None:raise treeError('xmlNewDoc() failed')\n return xmlDoc(_obj=ret)", "def save_view(self, subject, name, is_overwrite=False):\n db.save_view(self, subject, name, is_overwrite)", "def __init__( viewname, view ):", "def create(self):\n self.create_file()", "def from_file(self, xml_file):\n try:\n root = ET.parse(xml_file, self.parser).getroot()\n if hasattr(xml_file, \"close\"):\n xml_file.close()\n except ET.XMLSyntaxError as exc:\n raise ParserException(exc.msg)\n\n self._handle_version(root)\n doc = self.parse_element(root)\n\n # Provide original file name via the in memory document\n if isinstance(xml_file, str):\n doc.origin_file_name = basename(xml_file)\n\n return doc", "def setup_document(document_name=\"fSCAD-Preview\"):\n preview_doc = None\n saved_camera = None\n saved_units = None\n for document in app().documents:\n if document.name == document_name:\n preview_doc = document\n break\n if preview_doc is not None:\n preview_doc.activate()\n saved_camera = app().activeViewport.camera\n saved_units = design().fusionUnitsManager.distanceDisplayUnits\n preview_doc.close(False)\n\n preview_doc = app().documents.add(adsk.core.DocumentTypes.FusionDesignDocumentType)\n preview_doc.name = document_name\n preview_doc.activate()\n if saved_camera is not None:\n is_smooth_transition_bak = saved_camera.isSmoothTransition\n saved_camera.isSmoothTransition = False\n app().activeViewport.camera = saved_camera\n saved_camera.isSmoothTransition = is_smooth_transition_bak\n app().activeViewport.camera = saved_camera\n if saved_units is not None:\n design().fusionUnitsManager.distanceDisplayUnits = saved_units\n design().designType = adsk.fusion.DesignTypes.DirectDesignType", "def CreateXMLFromDB(tableName):\r\n fObj = open('htdocs/gl/UI.xml', 'w')\r\n fStr = \"\"\"\r\n <tables>\r\n <table>\r\n <col>data</col>\r\n </table>\r\n </tables>\r\n \"\"\" \r\n fObj.write(fStr)\r\n fObj.close()", "def update_views():\n # replace Supervisor main entry\n here = path.abspath(path.dirname(__file__))\n # set main page\n VIEWS['index.html'] = {'template': path.join(here, 'ui/index.html'), 'view': SupvisorsView}\n # set address /processpage\n VIEWS['procaddress.html'] = {'template': path.join(here, 'ui/procaddress.html'), 'view': ProcAddressView}\n # set address/host page\n VIEWS['hostaddress.html'] = {'template': path.join(here, 'ui/hostaddress.html'), 'view': HostAddressView}\n # set application page\n VIEWS['application.html'] = {'template': path.join(here, 'ui/application.html'), 'view': ApplicationView}\n # set fake page to export images\n VIEWS['process_cpu.png'] = {'template': path.join(here, 'ui/empty.html'), 'view': ProcessCpuImageView}\n VIEWS['process_mem.png'] = {'template': path.join(here, 'ui/empty.html'), 'view': ProcessMemoryImageView}\n VIEWS['address_cpu.png'] = {'template': path.join(here, 'ui/empty.html'), 'view': AddressCpuImageView}\n VIEWS['address_mem.png'] = {'template': path.join(here, 'ui/empty.html'), 'view': AddressMemoryImageView}\n VIEWS['address_io.png'] = {'template': path.join(here, 'ui/empty.html'), 'view': AddressNetworkImageView}", "def add_file(self, xml_filepath: str) -> None:\n\t\t# Set filename and get root element of the tree\n\t\troot: ET.Element = get_xml_file(xml_filepath)\n\t\t# Check name\n\t\tif self.name != root.tag:\n\t\t\traise Exception(\"Tag '{}' is not equal to current name '{}'\".format(root.tag, self.name))\n\n\t\t# Iterate over and add child nodes\n\t\tchild: ET.Element\n\t\tfor child in root:\n\t\t\t# Determine if child is a SubNode or a Node\n\t\t\t# If child has children or attributes it is a Node\n\t\t\tif len(child) or len(child.attrib):\n\t\t\t\t# Add Node\n\t\t\t\tself.add_node(Node(child))\n\t\t\telse:\n\t\t\t\tself.add_subnode(SubNode(child))", "def definition_from_xml(cls, xml_object, system):\r\n raise NotImplementedError(\"%s does not implement definition_from_xml\" % cls.__name__)", "def create_document(self, data):\n command = CreateDocumentFromOneOffixxTemplateCommand(self.context, data['title'], data['template'])\n return command.execute()", "def _make_view(tabbed=False, split=False, scene_width=-1):\n view_options = VGroup(Item('headview', style='custom'), 'view_options',\n show_border=True, show_labels=False, label='View')\n\n scene = VGroup(Item('scene', show_label=False,\n editor=SceneEditor(scene_class=MayaviScene),\n dock='vertical', width=500),\n view_options)\n\n data_panel = VGroup(VGroup(Item('subject_panel', style='custom'),\n label=\"MRI Subject\", show_border=True,\n show_labels=False),\n VGroup(Item('lock_fiducials', style='custom',\n editor=EnumEditor(cols=2,\n values={False: '2:Edit',\n True: '1:Lock'}),\n enabled_when='fid_ok'),\n HGroup('hsp_always_visible',\n Label(\"Always Show Head Shape Points\"),\n show_labels=False),\n Item('fid_panel', style='custom'),\n label=\"MRI Fiducials\", show_border=True,\n show_labels=False),\n VGroup(Item('raw_src', style=\"custom\"),\n HGroup(Item('distance', show_label=True),\n 'omit_points', 'reset_omit_points',\n show_labels=False),\n Item('omitted_info', style='readonly',\n show_label=False),\n label='Head Shape Source (Raw)',\n show_border=True, show_labels=False),\n show_labels=False, label=\"Data Source\")\n\n coreg_panel = VGroup(Item('coreg_panel', style='custom'),\n label=\"Coregistration\", show_border=True,\n show_labels=False,\n enabled_when=\"fid_panel.locked\")\n\n if split:\n main_layout = 'split'\n else:\n main_layout = 'normal'\n\n if tabbed:\n main = HGroup(scene,\n Group(data_panel, coreg_panel, show_labels=False,\n layout='tabbed'),\n layout=main_layout)\n else:\n main = HGroup(data_panel, scene, coreg_panel, show_labels=False,\n layout=main_layout)\n\n view = View(main, resizable=True, handler=CoregFrameHandler(),\n buttons=NoButtons)\n return view", "def double_clicked_to_view(self):\n\n # TODO need this method? better in init to go to view_file\n self.view_file()", "def set_remote_template(self, filename):\n if self.template_exists(filename):\n self.client.service.SetRemoteTemplate(filename=filename)\n else:\n raise LiveDocxError('Remote template \"%s\" not exists' % filename)", "def test_create_view_returns_empty(dummy_request):\n from learning_journal.views.default import new_entry\n assert new_entry(dummy_request) == {}", "def replaceView(self, modeId, newView):\n oldView = None\n for view in self.__views:\n if view.modeId() == modeId:\n oldView = view\n break\n elif isinstance(view, _CompositeDataView):\n # recurse\n hooks = self.getHooks()\n if hooks is not None:\n newView.setHooks(hooks)\n if view.replaceView(modeId, newView):\n return True\n if oldView is None:\n return False\n\n # replace oldView with new view in dict\n self.__views = dict(\n (newView, None) if view is oldView else (view, idx) for\n view, idx in self.__views.items())\n return True", "def SaveXMLToDB(xmlFileName):", "def manage_addXMLTemplate(self, id, file, \n REQUEST=None, RESPONSE=None, submit=None):\n if not id and file:\n id = file.filename\n obj = XMLTemplate(id, file)\n self._setObject(id, obj)\n \n if RESPONSE and submit:\n if submit.strip().lower() == 'add':\n RESPONSE.redirect('%s/manage_main' % self.DestinationURL())\n else:\n RESPONSE.redirect('%s/manage_main' % id)", "def export_view(self, repo, view, file_format='CSV',\n delimiter=',', header=True):\n # clean up names:\n repo = clean_str(repo, '')\n view = clean_str(view, '')\n\n # check for permissions\n DataHubManager.has_repo_db_privilege(\n self.username, self.repo_base, repo, 'CREATE')\n\n # make the repo_base and repo's folder, if they don't already exist\n DataHubManager.create_user_data_folder(self.repo_base, repo)\n\n # define the file path for the new view\n file_name = clean_file_name(view)\n file_path = user_data_path(\n self.repo_base, repo, file_name, file_format)\n\n # format the full view name\n view_name = '%s.%s' % (repo, view)\n\n self.user_con.export_view(\n view_name=view_name,\n file_path=file_path,\n file_format=file_format,\n delimiter=delimiter,\n header=header)", "def create(self):\n if os.path.exists(self.__path):\n raise IOError(\"Can't create database at '%s'. File exists.\" %\n (self.__path,))\n else:\n with sqlite3.connect(self.__path) as c:\n cur = c.cursor()\n cur.execute(\"CREATE VIRTUAL TABLE AddressBook USING fts4(Name, Address)\")\n cur.execute(\"CREATE VIEW AddressBookView AS SELECT * FROM addressbook\")\n cur.executescript(\n \"CREATE TRIGGER insert_into_ab \" +\n \"INSTEAD OF INSERT ON AddressBookView \" +\n \"BEGIN\" +\n \" SELECT RAISE(ABORT, 'column name is not unique')\" +\n \" FROM addressbook\" +\n \" WHERE address = new.address;\" +\n \" INSERT INTO addressbook VALUES(new.name, new.address);\" +\n \"END;\")", "def update_views():\n # replace Supervisor main entry\n here = os.path.abspath(os.path.dirname(__file__))\n # set main page\n VIEWS['index.html'] = {'template': os.path.join(here, 'ui/index.html'),\n 'view': SupvisorsView}\n # set address /processpage\n VIEWS['procaddress.html'] = {'template': os.path.join(\n here, 'ui/procaddress.html'),\n 'view': ProcAddressView}\n # set address/host page\n VIEWS['hostaddress.html'] = {'template': os.path.join(\n here, 'ui/hostaddress.html'),\n 'view': HostAddressView}\n # set application page\n VIEWS['application.html'] = {'template': os.path.join(\n here, 'ui/application.html'),\n 'view': ApplicationView}\n # set fake page to export images\n VIEWS['process_cpu.png'] = {'template': os.path.join(\n here, 'ui/empty.html'),\n 'view': ProcessCpuImageView}\n VIEWS['process_mem.png'] = {'template': os.path.join(\n here, 'ui/empty.html'),\n 'view': ProcessMemoryImageView}\n VIEWS['address_cpu.png'] = {'template': os.path.join(\n here, 'ui/empty.html'),\n 'view': AddressCpuImageView}\n VIEWS['address_mem.png'] = {'template': os.path.join(\n here, 'ui/empty.html'),\n 'view': AddressMemoryImageView}\n VIEWS['address_io.png'] = {'template': os.path.join(\n here, 'ui/empty.html'),\n 'view': AddressNetworkImageView}", "def replaceView(self, modeId, newView):\n oldView = None\n for iview, view in enumerate(self.__views):\n if view.modeId() == modeId:\n oldView = view\n break\n elif isinstance(view, CompositeDataView):\n # recurse\n hooks = self.getHooks()\n if hooks is not None:\n newView.setHooks(hooks)\n if view.replaceView(modeId, newView):\n return True\n\n if oldView is None:\n return False\n\n # replace oldView with new view in dict\n self.__views[iview] = newView\n return True", "def add_view(self, view):\n # Add to views\n self._views.append(view)\n\n # If app was provided in constructor, register view with Flask app\n if self.app is not None:\n self.app.register_blueprint(view.create_blueprint(self))\n if view.is_menu:\n self._add_view_to_menu(view)", "def __init__(self, view_name, cursor=None, schema=None):\n self.name = view_name\n self.type = 'view' # Saves using type() or isinstance\n self.columns = {}\n self.sql = ''\n self.triggers = {}\n if schema:\n self.schema = schema\n else:\n schema = None\n if cursor:\n self._get_view(cursor)", "def write_viewer_file(self, file_name):\n viewer_writer = ViewerWriter(self.dna_structure, self.dna_parameters)\n viewer_writer.write(file_name)", "def download(cls):\n cls._check_folder()\n os.chdir(cls.VIEWS_PATH)\n # iterate documents\n for doc in cls._documents:\n design_doc = doc().view()\n if design_doc is None:\n continue\n bucket_name = design_doc.bucket.name\n # iterate viewtypes (i.e. spatial and views)\n for view_type, views in design_doc.ddoc.iteritems():\n save_dir = '%s/%s/%s' % (bucket_name, design_doc.name, view_type)\n try:\n # remove and recreate the dir\n shutil.rmtree(save_dir, ignore_errors=True)\n os.makedirs(save_dir)\n except OSError:\n pass\n for name, view in views.iteritems():\n if isinstance(view, unicode) and view_type=='spatial':\n spatial_file = '%s/%s.spatial.js' % (save_dir, name)\n with open(spatial_file, 'w') as f:\n f.write(view)\n print 'Downloaded: %s' % spatial_file\n if isinstance(view, dict) and 'map' in view:\n map_file = '%s/%s.map.js' % (save_dir, name)\n with open(map_file, 'w') as f:\n f.write(view['map'])\n print 'Downloaded: %s' % map_file\n if isinstance(view, dict) and 'reduce' in view:\n reduce_file = '%s/%s.reduce.js' % (save_dir, name)\n with open(reduce_file, 'w') as f:\n f.write(view['reduce'])\n print 'Downloaded: %s' % reduce_file\n pass", "def test_xmloutput_view(self):\n print 'Running %s ...' % getName()\n \n self.sequenceListingFixture.create_sequence_instance(self.sequenceListing)\n \n response = self.client.get(reverse('sequencelistings:xmloutput', args=[self.sequenceListing.pk, ]))\n self.assertEqual(response.status_code, 200)\n# test that the page returns expected html contents\n# self.assertContains(response, '%s.xml' % self.sequenceListing.fileName)\n self.assertContains(response, self.sequenceListing.fileName)", "def construct(self):\n top = Toplevel()\n top.withdraw()\n top.protocol(\"WM_DELETE_WINDOW\", self.view_xml_pane)\n top.columnconfigure(0, weight=1)\n top.rowconfigure(0, weight=1)\n top.title(\"XML Preview\")\n self._pane = top\n\n xml_area = Text(top, borderwidth=2, relief=\"sunken\")\n xml_area.config(font=(\"consolas\", 12), undo=True, wrap='word', state=DISABLED)\n xml_area.grid(row=0, column=0, sticky=\"nsew\", padx=2, pady=2)\n\n scrollbar = Scrollbar(top, command=xml_area.yview)\n scrollbar.grid(row=0, column=1, sticky='nsew')\n xml_area['yscrollcommand'] = scrollbar.set\n\n self._text_area = xml_area", "def create(\n self, *, db_structure, definitions, grouping_mapping, is_published, created_by\n ):\n contents_hash = md5(\n db_structure.read() + definitions.read() + grouping_mapping.read()\n ).digest()\n\n # reset file streams after reading them to generate hash\n db_structure.seek(0)\n definitions.seek(0)\n grouping_mapping.seek(0)\n\n try:\n processed_versions = Version.objects.exclude(last_process_at=None)\n existing_version = processed_versions.get(files_hash=contents_hash)\n raise VersionAlreadyExists(existing_pk=existing_version.pk)\n except Version.DoesNotExist:\n pass\n\n version = Version.objects.create(\n created_by=created_by, is_published=is_published, files_hash=contents_hash\n )\n\n version.db_structure = db_structure\n version.definitions = definitions\n version.grouping_mapping = grouping_mapping\n version.save()\n\n return version", "def create_training_file(self):\n self.master.switch_frame(TrainingFileView)", "def create(self, xact, path, msg):\n self._log.debug(\"Creating VNFR xact = %s, %s:%s\",\n xact, path, msg)\n self.regh.create_element(path, msg)\n self._log.debug(\"Created VNFR xact = %s, %s:%s\",\n xact, path, msg)", "def goToDefinition(file, line, offset):\n args = {\"file\": file, \"line\": line, \"offset\": offset}\n response = send_request(\"definition\", args)\n return get_response_body(response)" ]
[ "0.59218645", "0.5722048", "0.5537454", "0.5507403", "0.54620814", "0.54460394", "0.5323818", "0.53216887", "0.5261694", "0.52564645", "0.5204779", "0.5170237", "0.5099098", "0.5052455", "0.50478405", "0.50409013", "0.50351095", "0.50225526", "0.50119054", "0.49706888", "0.49474493", "0.49418244", "0.49392807", "0.49227813", "0.49002323", "0.48775333", "0.48600185", "0.484708", "0.48421258", "0.48366874", "0.4823523", "0.48194367", "0.47903526", "0.47825935", "0.47743583", "0.4760238", "0.47493604", "0.47424695", "0.47383222", "0.47085753", "0.4701727", "0.46929082", "0.4654709", "0.4654389", "0.46479854", "0.46475118", "0.46381533", "0.46273062", "0.4626488", "0.46126089", "0.45998144", "0.45965293", "0.45826337", "0.45785928", "0.4565958", "0.45171478", "0.45004097", "0.44976604", "0.44962594", "0.44834217", "0.44819787", "0.448178", "0.44781503", "0.44745937", "0.4464535", "0.44574377", "0.4450661", "0.4440626", "0.4434419", "0.44333926", "0.4432767", "0.43944457", "0.43877456", "0.43867153", "0.43782282", "0.4375021", "0.4358703", "0.43393978", "0.43178216", "0.4316652", "0.4316263", "0.4314373", "0.43126044", "0.4304852", "0.43012962", "0.4300938", "0.42994973", "0.4299431", "0.4297341", "0.42940402", "0.42863497", "0.42841145", "0.4280983", "0.4280338", "0.42686656", "0.42666033", "0.42647022", "0.42618942", "0.42614713", "0.42545384" ]
0.74085957
0
Returns True if the given job exists.
def job_exists(self, job): with open(os.devnull, 'w') as devnull: result = subprocess.call(self.cli + [PlatformJenkinsJavaCLI.GET_JOB, job.name], stdout=devnull) return result == 0
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def job_exists(self, job_id):\n\n return True if self.get_status(job_id) else False", "def exists(cls, job_id: str, connection: Optional['Redis'] = None) -> bool:\n if not connection:\n connection = resolve_connection()\n job_key = cls.key_for(job_id)\n job_exists = connection.exists(job_key)\n return bool(job_exists)", "def isJobRunning ( self ):\n #cmd = \"qstat \" + str(self.jobid)\n \n #magicString='Unknown Job Id' ### magicString _might_ need to be changed if Torque version changes\n #(output, error) = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()\n\n if self.ofile_exists(): #output.find(magicString) >=0 or redhawkStatsRe.search(output):\n self.status = \"finished\"\n return False\n \n\n return True", "def test_job_exists():\n with tempfile.TemporaryDirectory() as STATUS_DIR:\n Status.add_job(STATUS_DIR, 'generation', 'test1',\n job_attrs={'job_status': 'submitted'})\n exists = Status.job_exists(STATUS_DIR, 'test1')\n assert exists", "def exists(self):\r\n return bool(self.bucket.lookup(self.name))", "def check_job_exists( job_list, analysis_group_id, reprocess_config_id):\n for job in job_list:\n struct = JSONMessage.unserialize(job.input_message)\n\n if( int( struct.analysis_group_id ) == int( analysis_group_id ) and \\\n int( struct.reprocess_config_id ) == int( reprocess_config_id ) ):\n return 1\n return 0", "def exists(self, prefix, args=()):\n file_path = self.path(prefix, args)\n return os.path.isfile(file_path)", "def in_queue(self):\n if self.get_db('jobid') is None:\n log.debug('jobid not found for calculation.')\n return False\n else:\n # get the jobid\n jobid = self.get_db('jobid')\n # see if jobid is in queue\n _, jobids_in_queue, _ = getstatusoutput('qselect',\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n\n if str(jobid) in jobids_in_queue.split('\\n'):\n # get details on specific jobid in case it is complete\n status, output, err = getstatusoutput(['qstat', jobid],\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n if status == 0:\n lines = output.split('\\n')\n fields = lines[2].split()\n job_status = fields[4]\n if job_status == 'C':\n return False\n else:\n return True\n else:\n return False", "def exists(self):\n return self.path.exists()", "def _check_queryinfo_existence(self, hostname: str, job: str) -> bool:\n with self.lock:\n hosts = self.host_query_info.all()\n for host in hosts:\n if host['hostname'] == hostname and host['job'] == job:\n return True\n return False", "def exists(self):\n return _os.path.exists(self.__str__())", "def batch_job_running(self, name):\n if name not in self.batch_jobs:\n raise ValueError(\"job {} doesn't exists\".format(name))\n return name in self.jobs", "def pid_exists(self, job_id):\n ## class QueuePage()\n pid = mysql.job_get_pid(job_id)\n if pid == None:\n ## job PID somehow did not get stored in the database, so return\n ## False => state='syserror'; job may still be running!\n return False\n else:\n pid = int(pid)\n try:\n #os.kill(pid, 0) ## This does not work, 2009-05-27\n ## NOTE: Three possible results:\n ## (1): os.kill(pid, 0) -> None: process exists, and you are process\n ## owner or root\n ## (2): os.kill(pid, 0) -> OSError, Operation not permitted:\n ## process exists, you are not owner or root\n ## (3): os.kill(pid, 0) -> OSError, No such process:\n ## process does not exist\n if os.path.exists(\"/proc/%s\" % pid):\n return True ## process is still running\n return False\n except:\n return False", "def isThisJobFinished(self, identifier):\n identifier = identifier.strip()\n with self.__queueLock:\n # Look through the finished jobs and attempt to find a matching\n # identifier. If the job exists here, it is finished\n for run in self.__finished:\n if run.identifier == identifier:\n return True\n\n # Look through the pending jobs and attempt to find a matching identifier\n # If the job exists here, it is not finished\n for queue in [self.__queue, self.__clientQueue]:\n for run in queue:\n if run.identifier == identifier:\n return False\n\n # Look through the running jobs and attempt to find a matching identifier\n # If the job exists here, it is not finished\n for run in self.__running+self.__clientRunning:\n if run is not None and run.identifier == identifier:\n return False\n\n # If you made it here and we still have not found anything, we have got\n # problems.\n self.raiseAnError(RuntimeError,\"Job \"+identifier+\" is unknown!\")", "def checkjob(sid, jid):\n with slycat.web.server.remote.get_session(sid) as session:\n return session.checkjob(jid)", "def exists(self) -> bool:\n try:\n result = self.get()\n except KeyError:\n return False\n return True", "def exists(self):\r\n return os.path.exists(self.full_path)", "def remove_job_if_exists(name: str, context: ContextTypes.DEFAULT_TYPE) -> bool:\n current_jobs = context.job_queue.get_jobs_by_name(name)\n if not current_jobs:\n return False\n for job in current_jobs:\n job.schedule_removal()\n return True", "def is_job_running(self, condor_id):\n\n classads = self.get_classads(\"OSGRSVUniqueName==\\\"%s\\\"\" % condor_id)\n\n if classads is None:\n self.rsv.log(\"ERROR\", \"Could not determine if job is running\")\n return False\n\n for classad in classads:\n # We put the attribute into the classad in quotes, so search for it accordingly\n if classad[\"OSGRSVUniqueName\"] == '\"' + condor_id + '\"':\n return True\n\n return False", "def exists(self):\n\n return os.path.exists(self.path)", "def exists(self):\n return True", "def exists(self):\n return True", "def verify_job(cls, auth_key, job_id):\n key = ObjectId(job_id)\n user_id = ObjectId(auth_key)\n db = cls.mongo_cli.get_database(collection=Job.collection_name)\n if db.count({\"_id\": key, \"user_id\": user_id}) > 0:\n return True\n return False", "def remove_job_if_exists(self, name: str, context: CallbackContext) -> bool:\n current_jobs = context.job_queue.get_jobs_by_name(name)\n if not current_jobs:\n return False\n for job in current_jobs:\n job.schedule_removal()\n return True", "def remove_job_if_exists(self, name: str, context: CallbackContext) -> bool:\n current_jobs = context.job_queue.get_jobs_by_name(name)\n if not current_jobs:\n return False\n for job in current_jobs:\n job.schedule_removal()\n return True", "def exists(self):\n return bool(self.get())", "def remove_job_if_exists(name, context):\n current_jobs = context.job_queue.get_jobs_by_name(name)\n if not current_jobs:\n return False\n for job in current_jobs:\n job.schedule_removal()\n return True", "def queue_exists(name: str) -> bool:\n try:\n batch = aws.client_with_default_region(\"batch\")\n\n return bool(\n batch.describe_job_queues(jobQueues = [name]) \\\n .get(\"jobQueues\"))\n except:\n return False", "def exists(self) -> bool:\n return self._file_exists()", "def exists(self):\n return os.path.isfile(self.location)", "def object_exists(self, fname):\n return self.object_exists", "def exists(bucket: str, key: str) -> bool:\n try:\n client().head_object(Bucket=bucket, Key=key)\n except botocore.client.ClientError:\n return False\n else:\n return True", "def _check_row_exists(self, pk):\n session = self.session_factory()\n exists = session.query(PipelineRun).filter_by(id=pk).first()\n session.close()\n if exists:\n return True\n return False", "def check_status(job):\n client = get_dropbox_client()\n\n try:\n return client.metadata(\n '/Video Automation Platform/jobs/{job}/{job}.png'.format(job=job))\n\n except ErrorResponse:\n return False", "def exists(self, path, flag='-e'):\r\n try:\r\n return self._call(\"-test\", flag, path) == 0\r\n except subprocess.CalledProcessError:\r\n return False", "def exists(self, task_identifier: str, timeout: int) -> bool:\n session = self.result_session()\n with self.session_cleanup(session):\n lock = session.query(Lock)\\\n .filter(Lock.task_identifier == task_identifier).first() # pylint: disable=no-member\n if not lock:\n return False\n difference = datetime.utcnow() - lock.created\n if difference < timedelta(seconds=timeout):\n return True\n\n return False", "def is_job_complete(self, job_id):\n\n job_status = self.get_job_progress(job_id)\n complete = job_status['completed']\n total = job_status['total']\n if (complete == total):\n return True\n else:\n return False", "def exists(path):\n return get_instance(path).exists(path)", "def exists(profile, name):\n result = fetch_by_name(profile, name)\n return len(result) > 0", "def exists(profile, name):\n result = fetch_by_name(profile, name)\n return len(result) > 0", "def exist(self):\n return self.file_path.exists()", "def exists(self):\n\n return os.path.exists(self[\"~filename\"])", "def exists(self, name):\n return self.backend.exists(name)", "def exist(name: str) -> bool:\n return bool(os.path.exists(name))", "def exists(self, task_identifier: str, timeout: int) -> bool:\n raise NotImplementedError", "def exists(self) -> bool:\n doc_ref = self.doc_ref\n if isinstance(doc_ref, DocumentReference):\n return doc_ref.get().exists\n return False", "async def _exists(self, key):\n with await self._connect() as redis:\n exists = await redis.exists(key)\n return True if exists > 0 else False", "def exists(cls, jti):\n\n with qdb.sql_connection.TRN:\n sql = \"\"\"SELECT COUNT(jti) FROM qiita.{0}\n WHERE jti=%s\"\"\".format(cls._table)\n qdb.sql_connection.TRN.add(sql, [jti])\n return qdb.sql_connection.TRN.execute_fetchlast() == 1", "def wait_on_job(self, delay=10):\n while self.isJobRunning() == True:\n time.sleep(delay)\n return self.ofile_exists()", "def exists(self, args):\n file_path = self.path(args)\n return os.path.isfile(file_path)", "def process_exists(pid=None, name=None):\n\n return count_processes(pid, name) > 0", "def wait_for_job(job) -> bool:\n job.refresh_from_db()\n is_done = False\n\n while not is_done:\n if job.end_time is None and job.success is None:\n print(f\"Polling {type(job).__name__}s. Currently waiting for job id: {job.id}\")\n sleep(20)\n job.refresh_from_db()\n elif job.retried and job.retried_job:\n job = job.retried_job\n elif job.success:\n return True\n else:\n print(f\"{type(job).__name__} {job.id} failed!\")\n return False\n\n return False", "def job_status(bot, update, args, job_queue, chat_data):\n if len(args) == 0:\n update.message.reply_text('No parameter provided')\n return\n\n job_name = args[0]\n if job_name not in settings.JOBS:\n update.message.reply_text(\n 'Sorry {0} is not a valid job'.format(job_name))\n return\n\n job = find_job(job_name, job_queue)\n\n if not job:\n update.message.reply_text('{0} job is not running'.format(job_name))\n return\n\n update.message.reply_text('{0} job is running'.format(job_name))", "def exists(self):\n if self._result_cache is None:\n return self.query.has_results(using=self.db)\n return bool(self._result_cache)", "def exists(self):\n return self._repository is not None", "def object_exists(self, fname):\n return True", "def Exists(self, path: str) -> bool:\n ...", "def job_has_params(job_url):\n name = job_url.rstrip(\"/\").rsplit(\"/\")[-1]\n if name in (\n \"pr-docs\",\n \"pr-lint\",\n \"pr-pre-commit\",\n ):\n return False\n else:\n return True", "def exists(self):\n return self.obj is not None", "def is_job_complete(job_name):\n complete = False\n log.info(\"Checking if %s is complete\", job_name)\n try:\n response = batchV1Api.read_namespaced_job_status(job_name, namespace)\n if response.status.succeeded == 1:\n job_status_type = response.status.conditions[0].type\n if job_status_type == \"Complete\":\n complete = True\n log.info(\"%s is complete\", job_name)\n else:\n log.info(\"%s is NOT complete\", job_name)\n else:\n log.info(\"%s has not succeeded yet\", job_name)\n except ApiException as exc:\n log.error(\"Exception when calling read_namespaced_job_status: %s\\n\",\n exc)\n return complete", "def exists(self, key_name: str) -> bool:\n pass", "def has_active_jobs(self, **kwargs):\n if Job.objects.add_balance().filter(house=self.house, balance1__gt=0, approved=True, **kwargs).exists():\n return True\n\n return False", "def isNodeExists(self, longName):\n return self.getComponentByLongName(longName) != None", "def exists(self):\n # TODO: What about broken sym-links?\n return os.path.exists(self.path)", "def object_exists(self, fname):\n return False", "def exists(self, prefix, args=()):\n dir_path = self.path(prefix, args)\n return os.path.isdir(dir_path)", "def exists(self, path: str) -> bool:\n pass", "def chronos_job_is_ready(context, job_name):\n chronos_tools.wait_for_job(context.chronos_client, context.jobs[job_name]['name'])", "def check_job_status(job):\n assert isinstance(job, PreprocessJob),\\\n 'job must be a PreprocessJob'\n\n if job.is_finished():\n return True\n\n return True\n \"\"\"\n ye_task = AsyncResult(job.task_id,\n app=preprocess_csv_file)\n\n if ye_task.state == 'SUCCESS':\n\n if ye_task.result['success']:\n\n preprocess_data = ContentFile(json.dumps(ye_task.result['data']))\n\n new_name = 'preprocess_%s.json' % get_alphanumeric_lowercase(8)\n job.metadata_file.save(new_name,\n preprocess_data)\n job.set_state_success()\n\n job.user_message = 'Task completed! Preprocess is available'\n job.save()\n\n else:\n # Didn't work so well\n job.set_state_failure(ye_task.result['message'])\n job.save()\n\n ye_task.forget()\n return True\n\n elif ye_task.state == STATE_FAILURE:\n job.set_state_failure('ye_task failed....')\n job.save()\n ye_task.forget()\n return True\n\n return False\n \"\"\"", "async def exists(self, tag_name):\n try:\n if await self.get_id(tag_name):\n return True\n except RtbDoesntExists:\n return False", "def exists(path):\n return os.path.exists(path)", "def exists(self):\n return self.properties.get(\"Exists\", None)", "def is_running(self):\n # do we have a job ID to work with?\n if self.jobid == None:\n return False\n else:\n q_status = self.queue.get_status(self.jobid)\n\n if q_status == self.queue.state[\"active\"]:\n self.meta[\"status\"] = 'PENDING'\n return True\n else:\n return False", "def exists(self):\n return os.path.exists(self.key_file)", "def flag_exists(self):\n return os.path.exists(self.flag_file)", "def exists(self):\n return self.islink() or exists(self._path)", "def check_job_status_by_id(job_id):\n print('=' * 40)\n print('check_status_by_job_id', job_id)\n print('=' * 40)\n\n it_worked = check_job_status(job)\n if it_worked:\n return ok_resp(job)\n\n user_msg = ('PreprocessJob still in process: %s') % (job_id)\n return err_resp(user_msg)", "def isJobPending(name):\n return Cuebot.getStub('job').IsJobPending(\n job_pb2.JobIsJobPendingRequest(name=name), timeout=Cuebot.Timeout).value", "def exists(self):\n return self.pod.file_exists(self.source_pod_path)", "def isFinished(self):\r\n try:\r\n output = Popen(\"qstat | grep \"+self.jobId, shell=True, stdout=PIPE, stderr=PIPE).communicate()[0]\r\n if self.jobId in output:\r\n if output.split()[4] == \"Eqw\":\r\n #If the job fails, print a warning, and wait a minute so the user can check why the job fails,\r\n #before resubmitting the job.\r\n logging.warning(\"job \" + output.split()[2] + \" failed to run, resubmitting in one minute\")\r\n time.sleep(60)\r\n output = Popen(\"qdel \"+self.jobId, shell=True, stdout=PIPE, stderr=PIPE).communicate()[0]\r\n self.submit()\r\n return False\r\n else:\r\n logging.info(\"job with ID: \" + self.jobId + \" is finished.\")\r\n return True\r\n \r\n except ValueError:\r\n logging.info(\"Error: waiting for not submitted job...\")", "def exists(self, arg):\n raise NotImplementedError", "def exist(self, key):\n record = self._storage.get(key, None)\n if record:\n return record.ttl >= time.time()\n return False", "def _does_rule_exist(cls, rule_suffix: str) -> str:\n check_rule = cls._build_rule_string(IpTableCommandOption.CHECK, rule_suffix)\n _, _, exit_code = utils.run_command(check_rule, shell=True, raise_errors=False)\n\n return exit_code == 0", "def exists(self, Search_ID):\n if self.get_id(Search_ID) is None:\n return False\n else:\n return True", "def exists(self) -> bool:\n p = pathlib.Path(self.summary_path)\n return p.exists()", "def test_matching_jobs_existing(self):\n self.assertEquals(\n self.query_api.get_matching_jobs(\n \"try\", \"146071751b1e\",\n 'Linux x86-64 try build'), json.loads(JOBS_SCHEDULE))", "def objExists(*args, **kwargs)->bool:\n pass", "def file_exists(self):\n return os.path.exists(self._fileName)", "def work_item_exists(self):\n\n try:\n work_items = self.connection.getWorkItems(self.issue_id)\n except YouTrackException as e:\n return False\n except TypeError as e:\n # no issue id\n return False\n else:\n for work_item in work_items:\n if (work_item.authorLogin == self.username and\n work_item.date == self.work_item.date and\n work_item.duration == self.work_item.duration):\n return True\n return False", "def check_job_status(self, jobid=None):\n\n if jobid is None:\n if hasattr(self, 'current_job'):\n jobid = self.current_job\n else:\n jobid = self.current_job\n\n response = self._request(\n 'GET', CosmoSim.QUERY_URL + '/{}'.format(jobid) + '/phase',\n auth=(self.username, self.password), data={'print': 'b'},\n cache=False)\n\n log.info(\"Job {}: {}\".format(jobid, response.content))\n return response.content", "def exists(self, path: str) -> bool:\n return self.fs.exists(self._full_path(path))", "async def exists(self, payload: TPayload) -> bool:", "def exists(path: str) -> bool:\n pass", "def exists_task(self, task):\n assert task, \"Must input a valid task name.\"\n return any(self.get_by_task(task))", "def exists(self, url):\n return (self.base_path / url).exists()", "def exists(name):\n\n return get_component(CachingPackage.COMPONENT_NAME).exists(name)", "def check_project_exists(self, project):\n session = self.session_factory()\n exists = session.query(PipelineRun).filter_by(project=project).first()\n session.close()\n if exists:\n return True\n return False", "def schedule(self, job: Job) -> bool:\n if self.num_avail_cores < job.num_cores:\n return False\n\n # Find the available cores\n num_cores_found = 0\n\n for i in range(self.num_cores):\n if self.core_status[i] == 0:\n # available\n\n self.core_status[i] = job.num_timesteps\n self.core_job_id[i] = job.id\n \n self.num_avail_cores -= 1\n num_cores_found += 1\n if num_cores_found >= job.num_cores:\n # found all the cores needed, we're done\n break\n \n return True", "def path_exists(path):\r\n return os.path.exists(path)", "def Exists(pathname: str) -> bool:\n exist = None\n if exist is None:\n exist = os.path.exists(pathname)\n return exist" ]
[ "0.8646111", "0.80569893", "0.69944775", "0.6987492", "0.6604774", "0.654603", "0.6538874", "0.65194386", "0.6514213", "0.6511255", "0.64722615", "0.64207816", "0.6419019", "0.6390656", "0.63874865", "0.63817424", "0.6360946", "0.63050056", "0.63043344", "0.62540364", "0.6230864", "0.6230864", "0.6206416", "0.6196641", "0.6196641", "0.6193515", "0.61923015", "0.6178714", "0.61785465", "0.6163299", "0.6146791", "0.61414623", "0.6131903", "0.6116357", "0.6113077", "0.6094289", "0.6090544", "0.60891485", "0.6085514", "0.6085514", "0.6073408", "0.6056852", "0.60395604", "0.60335475", "0.602863", "0.6024874", "0.6009285", "0.6006715", "0.5981558", "0.59619576", "0.59595287", "0.59595156", "0.5948198", "0.5947755", "0.59454614", "0.59419066", "0.59292173", "0.5915909", "0.5914455", "0.59141374", "0.5913468", "0.5913432", "0.5898614", "0.58983", "0.58936304", "0.58801746", "0.58737797", "0.58611673", "0.5859276", "0.5857418", "0.5848755", "0.5839099", "0.58058625", "0.5802357", "0.5801387", "0.5795464", "0.5789656", "0.5777591", "0.57689035", "0.57666326", "0.5765502", "0.5763115", "0.5763093", "0.57607025", "0.5758055", "0.5756357", "0.5753667", "0.57508415", "0.5749128", "0.5747999", "0.5745865", "0.57349706", "0.57309115", "0.571765", "0.5714788", "0.5713205", "0.57123226", "0.56971896", "0.56967145", "0.5693004" ]
0.85490435
1
Deletes a given job from Jenkins.
def delete_job(self, job): subprocess.call(self.cli + [PlatformJenkinsJavaCLI.DELETE_JOB, job.name])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete_job(self, job, context=None):\n return self._client.call_method(\n 'UserAndJobState.delete_job',\n [job], self._service_ver, context)", "def delete(job_id):\n job = JobModel.get_one_job(job_id)\n if not job:\n return custom_response({'Error':'Job Not Found'}, 404)\n\n JobModel.query.filter(JobModel.job_id == job_id).delete()\n\n return custom_response({'Message': 'Deleted'}, 204)", "def delete(\n address: Optional[str],\n job_id: str,\n headers: Optional[str],\n verify: Union[bool, str],\n):\n client = _get_sdk_client(address, headers=headers, verify=verify)\n client.delete_job(job_id)\n cli_logger.print(f\"Job '{job_id}' deleted successfully\")", "def DeleteJob(self, job_urn, token=None):\n aff4.FACTORY.Delete(job_urn, token=token)", "def delete_job(self, jobid=None, squash=None):\n\n self.check_all_jobs()\n\n if jobid is None:\n if hasattr(self, 'current_job'):\n jobid = self.current_job\n\n if jobid:\n if hasattr(self, 'current_job'):\n if jobid == self.current_job:\n del self.current_job\n\n if self.job_dict[jobid] in ['COMPLETED', 'ERROR',\n 'ABORTED', 'PENDING']:\n result = self.session.delete(\n CosmoSim.QUERY_URL + \"/{}\".format(jobid),\n auth=(self.username, self.password), data={'follow': ''})\n\n else:\n warnings.warn(\"Can only delete a job with phase: \"\n \"'COMPLETED', 'ERROR', 'ABORTED', or 'PENDING'.\")\n return\n\n if not result.ok:\n result.raise_for_status()\n if squash is None:\n warnings.warn('Deleted job: {}'.format(jobid))\n\n return result", "def _delete_job(self, job):", "def deleteJob(self, jobId):\n params = {'id': jobId}\n try:\n return self.gc.delete(JobUtils.JOB_ID_PATH, parameters=params)\n except HttpError as e:\n if e.status == 400:\n print('Error. invalid job id:', jobId)\n return {}\n raise", "def _delete_job(self, job):\n with self.db_lock:\n return self.rcon.zrem(job)", "def job_delete(job):\n\n if os.path.exists(job.output_abspath):\n os.remove(job.output_abspath)\n db.session.delete(job)\n db.session.commit()", "def delete(self, job_id):\n # Only admin can delete any job\n if not current_user.is_admin():\n return get_message_json('删除任务需要管理员权限'), HTTPStatus.FORBIDDEN\n\n try:\n result = jobs.delete_job_by_id(job_id)\n if result == 1:\n return get_message_json('已删除该任务'), HTTPStatus.OK\n else:\n if jobs.find_job_by_id(job_id) is None:\n return get_message_json('任务不存在'), HTTPStatus.NOT_FOUND\n return get_message_json('未知的任务删除失败'), HTTPStatus.BAD_REQUEST\n except Exception as err:\n return handle_internal_error(str(err))", "def delete_job(self, filename):\n job = Jobs.get(Jobs.filename == filename)\n job.delete_instance()", "def delete_job(api_instance, job_name):\n api_response = api_instance.delete_namespaced_job(\n name=job_name,\n namespace=\"default\",\n body=client.V1DeleteOptions(\n propagation_policy=\"Foreground\", grace_period_seconds=5\n ),\n )\n logger.info(\"Job deleted with status='%s'\" % str(api_response.status))", "def delete_job(request, job_id):\n job = get_object_or_404(Jobs, pk=job_id)\n\n if request.user.id != job.author.id:\n messages.error(request, 'You can only delete your own job profiles')\n return redirect(reverse('view_home'))\n\n job.delete()\n messages.success(request, 'You have successfully deleted the job profile!')\n return redirect(reverse('all_jobs'))", "def cmd_delete_job():\r\n id = request.form.get('id', \"\")\r\n confirm = request.form.get(\"confirm\", \"\")\r\n if confirm != \"DELETE\":\r\n flash(f\"Contact '{id}' NOT deleted. Please enter DELETE in the confirm field.\")\r\n return redirect(url_for('main.jobs'))\r\n \r\n index = get_job_by_id(id)\r\n Job.query.filter(Job.id == id).delete()\r\n db.session.commit()\r\n\r\n\r\n if index != None:\r\n flash(f\"Job '{id}' was succesfully deleted!\")\r\n return redirect(url_for('main.jobs'))\r\n else:\r\n flash(f\"Job '{id}' was not found\")\r\n return redirect(url_for('main.jobs'))", "def delete_dlp_job(project, job_name):\n\n # Import the client library.\n import google.cloud.dlp\n\n # Instantiate a client.\n dlp = google.cloud.dlp.DlpServiceClient()\n\n # Convert the project id and job name into a full resource id.\n name = dlp.dlp_job_path(project, job_name)\n\n # Call the API to delete job.\n dlp.delete_dlp_job(name)\n\n print('Successfully deleted %s' % job_name)", "def delete_job():\r\n id = request.args.get('id', \"\")\r\n return render_template(\"delete_job.html\", id=id)", "def delete(self):\n parser = reqparse.RequestParser()\n parser.add_argument(\"job_id\", type=str, location=\"form\")\n args = parser.parse_args()\n job_id = args[\"job_id\"]\n if job_id is None or job_id == \"\":\n return errors.all_errors(\n \"CLIENT_MISSING_PARAMETER\", \"job_id (str) parameter is required\"\n )\n\n get_job_info = get(\n config.Config.FLASK_ENDPOINT + \"/api/scheduler/job\",\n headers={\"X-SOCA-TOKEN\": config.Config.API_ROOT_KEY},\n params={\"job_id\": job_id},\n verify=False,\n ) # nosec\n\n if get_job_info.status_code != 200:\n return {\n \"success\": False,\n \"message\": \"Unable to retrieve this job. Job may have terminated\",\n }, 500\n else:\n job_info = get_job_info.json()[\"message\"]\n job_owner = job_info[\"Job_Owner\"].split(\"@\")[0]\n request_user = request.headers.get(\"X-SOCA-USER\")\n if request_user is None:\n return errors.all_errors(\"X-SOCA-USER_MISSING\")\n if request_user != job_owner:\n return errors.all_errors(\"CLIENT_NOT_OWNER\")\n try:\n qdel_command = config.Config.PBS_QDEL + \" \" + job_id\n try:\n delete_job = subprocess.check_output(shlex.split(qdel_command))\n return {\"success\": True, \"message\": \"Job deleted\"}\n except Exception as err:\n return {\n \"success\": False,\n \"message\": \"Unable to execute qdel command: \" + str(err),\n }, 500\n\n except Exception as err:\n return {\"success\": False, \"message\": \"Unknown error: \" + str(err)}, 500", "def removeJob(self):\n job, name = self.getJob() \n answer = tkMessageBox.askyesno(\"Warning\",'Remove this job?')\n if answer == False:\n return \n try: \n self.jobManager.deleteJob(job)\n except:\n print 'job not in database, removing from peat'\n del self.DB.meta.peatsa_jobs[name]\n self.DB.meta.__p__changed = 1\n self.updateJobs()\n return", "def remove_job(job_id):\n subprocess.check_call(['atrm', str(job_id)])\n return job_id", "def delete_job(self, id, jobstore=None):\n\n self._scheduler.remove_job(id, jobstore)", "def delete_job_by_id(self, job_id):\n try:\n self._session.query(JobEntity).\\\n filter(JobEntity.id == job_id).\\\n delete(synchronize_session=False)\n except SQLAlchemyError as err:\n Log.an().error('sql exception [%s]', str(err))\n return False\n\n return True", "def remove(self, job_or_id):\n if isinstance(job_or_id, Job):\n job = job_or_id\n else:\n job = Job(connection=self.connection, id=job_or_id)\n\n try:\n job.refresh()\n job._delete()\n except NoSuchJobError:\n pass\n\n self._remove(job.id)", "def cancel_job(self, job):\n try:\n self.jobs.remove(job)\n except ValueError:\n pass", "def delete(job, cmt=None, config_file=config_file):\n # Calling config file\n cf = config.ReadFile(config_file)\n user = cf[\"authentication\"][\"user\"]\n\n cron = CronTab(user=user)\n cron.remove_all(job)\n cron.remove_all(comment=cmt)", "def delete(self, jobs):\n assert isinstance(jobs, list), 'Jobs must be a list'\n assert len(jobs) > 0, 'One or more jobs required'\n\n req = list()\n if len(jobs) > 1:\n for r in self._batch_request(jobs):\n req.append(\n ''.join([self._scheduler_endpoint, '?', '&'.join(r)]))\n else:\n req = \"{}?job={}\".format(\n self._scheduler_endpoint, jobs[0])\n\n try:\n self._api_delete(req)\n except HTTPError as e:\n raise JobClientError(e.message)", "def delete_vmware_protection_job(job_name, delete_snapshots=True):\n try:\n cohesity_client = _get_client()\n jobs = cohesity_client.protection_jobs.get_protection_jobs(\n is_deleted=False, names=job_name)\n if not jobs:\n return \"Job with name {} not available.\".format(job_name)\n for job in jobs:\n if job.name == job_name:\n job_id = job.id\n break\n if not job_id:\n return \"Job with name {} not available.\".format(job_name)\n # Get recent job run id and status.\n body = DeleteProtectionJobParam()\n body.delete_snapshots = delete_snapshots\n cohesity_client.protection_jobs.delete_protection_job(job_id, body)\n return \"Successfully deleted job {}\".format(job_name)\n except APIException as err:\n return \"Error while attempting to delete the job {}, error : {}\".format(\n job_name, err)", "def delete_jobs(self, job_ids=['JID_CLEARALL']):\n return self._job_mgmt.delete_jobs(job_ids)", "def _remove(self, job_id):\n self.connection._lrem(self.key, 1, job_id)", "def delete_job_step_by_job_id(self, job_id):\n try:\n self._session.query(JobStepEntity).\\\n filter(JobStepEntity.job_id == job_id).\\\n delete(synchronize_session=False)\n except SQLAlchemyError as err:\n Log.an().error('sql exception [%s]', str(err))\n return False\n\n return True", "def kill_job(self, job):\n\n if job.status == Job.STATUS_QUEUED:\n # case 1: job is in QUEUED state\n # remove it from the queue and mark as killed\n\n job_queue = job_queue_name(job.model)\n logger.info(\n \"killing job {} by removing from queue {}\".\n format(job.uuid, job_queue))\n\n command_dict = {'command': 'PROCESS_JOB', 'job_uuid': job.uuid}\n remove_command(redis_connection(), job_queue, command_dict)\n job.status = Job.STATUS_KILLED\n # save it\n Job[job.uuid] = job\n elif job.status == Job.STATUS_RUNNING:\n # case 2: job is in RUNNING state\n # send message to worker to kill the job\n worker = worker_name(job.worker_url, job.model)\n worker_channel = node_channel_name(worker)\n logger.info(\"sending command to kill job on channel {}\".\n format(worker_channel))\n command_dict = {'command': \"KILL_JOB\", 'job_uuid': job.uuid}\n publish_command(redis_connection(), worker_channel, command_dict)\n else:\n logger.info(\"kill called on job {} in incompatible state {}\".\n format(job.uuid, job.status))", "def delete_job_final(self, job_id):\n job = self.backend.get_job(job_id)\n sure = self.yes_no_dialog(\"Are you sure you want to delete Job '{}'?\".format(job.title))\n\n if not sure:\n return\n\n self.backend.job_delete(job_id)\n self.refresh_jobs()", "def remove(self, job_or_id):\n job_id = job_or_id.id if isinstance(job_or_id, Job) else job_or_id\n self.connection.lrem(self.key, 0, job_id)\n return defer.succeed(job_or_id)", "def remove(self, job_id):\n self.background_scheduler.remove_job(job_id)", "def remove_job(data, job):\n for j in data.queue:\n if job.proc_id == j:\n del j\n return", "def force_delete_job(self, token, job, context=None):\n return self._client.call_method(\n 'UserAndJobState.force_delete_job',\n [token, job], self._service_ver, context)", "def delete_job():\n fsurfer.log.initialize_logging()\n logger = fsurfer.log.get_logger()\n\n parser = argparse.ArgumentParser(description=\"Process and remove old results\")\n # version info\n parser.add_argument('--version', action='version', version='%(prog)s ' + VERSION)\n # Arguments for action\n parser.add_argument('--dry-run', dest='dry_run',\n action='store_true', default=False,\n help='Mock actions instead of carrying them out')\n parser.add_argument('--debug', dest='debug',\n action='store_true', default=False,\n help='Output debug messages')\n\n args = parser.parse_args(sys.argv[1:])\n if args.debug:\n fsurfer.log.set_debugging()\n if args.dry_run:\n sys.stdout.write(\"Doing a dry run, no changes will be made\\n\")\n\n conn = fsurfer.helpers.get_db_client()\n cursor = conn.cursor()\n job_query = \"SELECT jobs.id, \" \\\n \" jobs.username, \" \\\n \" jobs.state, \" \\\n \" job_run.pegasus_ts, \" \\\n \" jobs.subject \" \\\n \"FROM freesurfer_interface.jobs AS jobs, \" \\\n \" freesurfer_interface.job_run AS job_run \" \\\n \"WHERE jobs.state = 'DELETE PENDING' AND \" \\\n \" jobs.id = job_run.job_id\"\n job_update = \"UPDATE freesurfer_interface.jobs \" \\\n \"SET state = 'DELETED' \" \\\n \"WHERE id = %s;\"\n try:\n cursor.execute(job_query)\n for row in cursor.fetchall():\n workflow_id = row[0]\n username = row[1]\n logger.info(\"Deleting workflow {0} for user {1}\".format(workflow_id,\n username))\n # pegasus_ts is stored as datetime in the database, convert it to what we have on the fs\n pegasus_ts = row[3]\n\n if pegasus_ts is None:\n # not submitted yet\n logger.info(\"Workflow {0} not \".format(workflow_id) +\n \"submitted, updating\")\n cursor.execute(job_update, [workflow_id])\n if args.dry_run:\n conn.rollback()\n else:\n conn.commit()\n continue\n\n workflow_dir = os.path.join(fsurfer.FREESURFER_SCRATCH,\n username,\n 'workflows',\n 'fsurf',\n 'pegasus',\n 'freesurfer',\n pegasus_ts)\n result_dir = os.path.join(fsurfer.FREESURFER_BASE,\n username,\n 'workflows',\n 'output',\n 'fsurf',\n 'pegasus',\n 'freesurfer',\n pegasus_ts)\n if args.dry_run:\n sys.stdout.write(\"Would run pegasus-remove \"\n \"{0}\\n\".format(result_dir))\n else:\n try:\n output = subprocess.check_output(['/usr/bin/pegasus-remove',\n workflow_dir],\n stderr=subprocess.STDOUT)\n exit_code = 0\n except subprocess.CalledProcessError as err:\n exit_code = err.returncode\n output = err.output\n # job removed (code = 0) just now or it's been removed earlier\n if exit_code == 0 or 'not found' in output:\n # look for condor job id and wait a bit for pegasus to remove it\n # so that we can delete the pegasus directories\n job_id = re.match(r'Job (\\d+.\\d+) marked for removal', output)\n if job_id is not None:\n logger.info(\"Waiting for running jobs to be removed...\\n\")\n count = 0\n while True:\n time.sleep(10)\n try:\n output = subprocess.check_output([\"/usr/bin/condor_q\",\n job_id.group(1)])\n except subprocess.CalledProcessError:\n logger.exception(\"An error occurred while \"\n \"checking for running \"\n \"jobs, exiting...\\n\")\n break\n if 'pegasus-dagman' not in output:\n break\n count += 1\n if count > 30:\n logger.error(\"Can't remove job, exiting...\\n\")\n break\n else:\n logger.error(\"Got error while removing workflow, \"\n \"exitcode: {0} error: {1}\".format(exit_code, output))\n logger.info(\"Jobs removed, removing workflow directory\\n\")\n try:\n if not args.dry_run and os.path.exists(workflow_dir):\n shutil.rmtree(workflow_dir)\n except shutil.Error:\n logger.exception(\"Can't remove directory at \"\n \"{0}, exiting...\\n\".format(workflow_dir))\n\n deletion_list = []\n # add input file\n input_files = get_input_files(workflow_id)\n if input_files is None:\n logger.error(\"Can't find input files for \" +\n \"workflow {0}\".format(workflow_id))\n else:\n deletion_list.extend(input_files)\n # remove files in result dir\n if os.path.isdir(result_dir):\n for entry in os.listdir(result_dir):\n deletion_list.append(os.path.join(result_dir, entry))\n if os.path.exists(result_dir):\n deletion_list.append(result_dir)\n # delete output and log copied over after workflow completion\n # if present\n deletion_list.append(os.path.join(fsurfer.FREESURFER_BASE,\n username,\n 'results',\n 'recon_all-{0}.log'.format(workflow_id)))\n deletion_list.append(os.path.join(fsurfer.FREESURFER_BASE,\n username,\n 'results',\n \"{0}_{1}_output.tar.bz2\".format(workflow_id,\n row[4])))\n for entry in deletion_list:\n if args.dry_run:\n sys.stdout.write(\"Would delete {0}\\n\".format(entry))\n else:\n logger.info(\"Removing {0}\".format(entry))\n if not purge_workflow_file(entry):\n logger.error(\"Can't remove {0} for job {1}\".format(entry,\n workflow_id))\n logger.info(\"Setting workflow {0} to DELETED\".format(workflow_id))\n cursor.execute(job_update, [workflow_id])\n if args.dry_run:\n conn.rollback()\n else:\n conn.commit()\n except psycopg2.Error as e:\n logger.exception(\"Error: {0}\".format(e))\n return 1\n finally:\n conn.commit()\n conn.close()\n\n retcode = delete_incomplete_jobs()\n return retcode", "def delete(cls, cluster, job, group=None):\n try:\n if group is not None:\n # get the job ids from the db\n\n arguments = {'cluster': cluster,\n 'group': group}\n db_jobs = cls.cm.find('batchjob',\n **arguments)\n\n list1 = []\n for i in db_jobs:\n list1.append(db_jobs[i]['job_id'])\n\n # read active jobs\n active_jobs = json.loads(cls.queue(cluster))\n list2 = []\n for i in active_jobs:\n list2.append(active_jobs[i]['jobid'])\n\n # find intersection\n res = set(list1).intersection(set(list2))\n\n if res is not None:\n for j in res:\n cmd = 'scancel {}'.format(str(j))\n Shell.ssh(cluster, cmd)\n print(\"Deleted {}\".format(j))\n\n return \"All jobs for group {} killed successfully\".format(group)\n\n else:\n args = 'scancel '\n if job.isdigit():\n args += job\n else:\n args += \"-n {}\".format(job)\n\n Shell.ssh(cluster, args)\n return \"Job {} killed successfully\".format(job)\n except Exception as ex:\n print(\"in exceptio\")\n print(ex)\n return ex", "def DeleteForTag(cls, tag):\n parent_key = cls._GetParentKeyFromTag(tag)\n frontend_job = cls.query(ancestor=parent_key).get(keys_only=True)\n if frontend_job:\n frontend_job.delete()", "def deleteJobs():\n deleteList = request.form.getlist(\"delete_job\")\n for jobId in deleteList:\n job = db.getJobs(jobId=jobId)[0]\n path = os.path.join(webapp.config['UPLOADED_JOBS_DEST'], job['appName'], jobId)\n shutil.rmtree(path, ignore_errors=True)\n db.deleteJob(jobId)\n return redirect(url_for('listJobs')), 302", "def test_delete_job(self):\n response = self.client.open(\n '/tx-queue/2/scheduler/job/{jobId}'.format(jobId=1),\n method='DELETE')\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def destroy(self, request, pk=None):\n try:\n job = Job.objects.get(pk=pk)\n job.delete()\n\n return Response({}, status=status.HTTP_204_NO_CONTENT)\n\n except Job.DoesNotExist as ex:\n return Response({'message': ex.args[0]}, status=status.HTTP_404_NOT_FOUND)\n\n except Exception as ex:\n return Response({'message': ex.args[0]}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)", "def cli(ctx, job_id):\n return ctx.gi.jobs.cancel_job(job_id)", "def destroy(self):\n if Path(self.path).exists():\n logger.info(f\"Trying to destroy old job {self.name}\")\n try:\n shutil.rmtree(self.path)\n except PermissionError:\n logger.error(f\"Cannot destroy job {self.name}!\")\n logger.error(\"Permission error.\")\n except:\n logger.error(f\"Cannot destroy job {self.name}!\")\n logger.error(\"Unknown error.\")\n else:\n logger.info(f\"Destroyed job {self.name}\")", "def qdel(job_id):\n ssh = connect_server()\n if isinstance(job_id, JobStatus):\n i,o,e = ssh.exec_command(qdel_c + ' ' + job_id.id)\n else:\n i,o,e = ssh.exec_command(qdel_c + ' ' + job_id)\n\n qdel_output = o.readlines() + e.readlines()\n ssh.close()", "def remove_job(self, job_specifier, _unprotect=False):\n self._project.remove_job(job_specifier=job_specifier, _unprotect=_unprotect)", "def delete(self, customerguid, jobguid=\"\", executionparams=None):", "def remove_job(redis_server, json_data):\n key = redis_server.get_keys_from_progress(json_data[\"job_id\"])\n redis_server.remove_job_from_progress(key)", "def test_job_delete_completed_job(self):\n test_app = self._create_app()\n class_path = \"spark.jobserver.VeryShortDoubleJob\"\n job = self.client.jobs.create(test_app, class_path,\n ctx=self._get_functional_context())\n time.sleep(3)\n self._wait_till_job_is_done(job)\n self.assertRaises(exceptions.NotFoundException,\n self.client.jobs.delete, job.jobId)", "def __delete_job_status(self, job: Job):\n\n keys = self._get_keys(f'jobstatus:{job.id}:*')\n for key in keys:\n self.redis_client.delete(key)", "async def job_remove(self, uid):\n self._require_running()\n job = self._get_job(uid)\n await job.close()\n del self._jobs[uid]\n del self._jobs_by_connection[job.sender.connection][uid]\n if len(self._jobs_by_connection[job.sender.connection]) == 0:\n del self._jobs_by_connection[job.sender.connection]\n self._log.debug('Removed job %s', job)", "def remove_job(self, name):\n if name not in self.jobs:\n raise ValueError(\"job {} doesn't exists\".format(name))\n self._jobs_list.remove(name)\n del self.jobs[name]\n del self.timers[name]", "def delete_jobs(self):\n jobs = self.get_jobs(self.age)\n print('Jobs queued for delete: ', jobs)\n for job in jobs:\n try: \n body = k_client.V1DeleteOptions(propagation_policy='Background')\n self.kube_v1_batch_client.delete_namespaced_job(job, body=body, namespace=self.project)\n self.kube_client.delete_namespaced_persistent_volume_claim(job+\"-storage-claim\", self.project, {})\n print('Deleted job: ', job)\n except ApiException as e:\n print(\"Exception when calling BatchV1Api -> delete_namespaced_job: %s\\n\" % e)\n exit(1)", "def delete_build(self, build_id):\n pass", "def delete(project, zone, instance):\n print >>sys.stderr, 'WARNING: duplicated jobs may fail/corrupt results'\n print >>sys.stderr, ('TODO(fejta): See http://stackoverflow.com/'\n 'questions/19645430/changing-jenkins-build-number')\n answer = raw_input('Delete %s [yes/NO]: ')\n if not answer or answer != 'yes':\n print >>sys.stderr, 'aborting'\n sys.exit(1)\n gcloud(\n project,\n 'compute',\n 'instances',\n 'delete',\n '--zone=%s' % zone,\n instance,\n )\n gcloud(\n project,\n 'compute',\n 'disks',\n 'delete',\n '--zone=%s' % zone,\n *get_disks(instance))", "def qdel(jid):\n command = '%s -j %d' % (QDEL_PATH, jid)\n subprocess.check_output([command], env=ENV, shell=True)", "def unregister_job(self, job_id):\n ujs = self.__ujs_client()\n ujs.unshare_job(job_id, [self.nar_user])", "def kill_job(self , index):\n job = self.jobs.__getitem__( index )\n if job:\n job.kill()", "def deleteDeleteSystemJob(self, jobId: str = None) -> dict:\n if jobId is None:\n raise ValueError(\"Require a system Job ID\")\n if self.loggingEnabled:\n self.logger.debug(f\"Starting deleteDeleteSystemJob\")\n path = f\"/system/jobs/{jobId}\"\n res = self.connector.deleteData(self.endpoint + path, headers=self.header)\n return res", "def stop_job(self):\n # DELETE /jobs/{job_id}/results\n pass", "def admin_delete_jobs(id):\n if is_admin(current_user.id):\n try:\n job = Status.query.filter_by(jobid = int(id)).first()\n if job:\n db.session.delete(job)\n db.session.commit()\n flash(\"Job deleted successfully.\",\"success\")\n else:\n flash(\"No such jobs present, so cannot be deleted\", \"danger\")\n return redirect(url_for(\"admin.admin_view_jobs\"))\n except:\n flash(\"Sorry! Something went wrong.If this keeps on comming, kindly contact developer\",\"danger\")\n return redirect(url_for(\"admin.admin_view_jobs\"))\n else:\n logout_user()\n flash(\"Login is required!\")\n return redirect(url_for(\"admin.admin_login\"))", "def delete_job_by_workflow_id(self, workflow_id):\n try:\n self._session.query(JobEntity).\\\n filter(JobEntity.workflow_id == workflow_id).\\\n delete(synchronize_session=False)\n except SQLAlchemyError as err:\n Log.an().error('sql exception [%s]', str(err))\n return False\n\n return True", "def test_job_delete_non_existing(self):\n self.assertRaises(exceptions.NotFoundException,\n self.client.jobs.delete, 'does-not-exist')", "async def delete(self, job):\n # nothing to delete if it doesn't exist\n info = await self.middleware.call('gluster.volume.exists_and_started', CTDB_VOL_NAME)\n if not info['exists']:\n return\n\n # stop the gluster volume\n if info['started']:\n options = {'args': (CTDB_VOL_NAME,), 'kwargs': {'force': True}}\n job.set_progress(33, f'Stopping gluster volume {CTDB_VOL_NAME!r}')\n await self.middleware.call('gluster.method.run', volume.stop, options)\n\n # finally, we delete it\n job.set_progress(66, f'Deleting gluster volume {CTDB_VOL_NAME!r}')\n await self.middleware.call('gluster.method.run', volume.delete, {'args': (CTDB_VOL_NAME,)})\n job.set_progress(100, f'Successfully deleted {CTDB_VOL_NAME!r}')", "def delete(self, userguid, jobguid=\"\", executionparams=dict()):", "def kill(self, job_id):\n if webtlsmdd.kill_job(job_id):\n x = ''\n x += '<center>'\n x += '<h3>Job %s has died ' % (job_id)\n x += 'or its associated pid has been manually killed.</h3>'\n x += '</center>'\n else:\n x = ''\n x += '<center>'\n x += '<h3>Error: Can not remove job %s.</h3>' % (job_id)\n x += '</center>'\n return x", "def do_project_delete(cs, args):\n key = args.project\n if cs.projects.is_id(key):\n id = key\n else:\n id = cs.projects.get_id_by_name(key)\n cs.projects.delete(id)\n print(\"Delete Project '%s' successfully.\" % key)", "def delete_job_schedule(self):\n job_schedule_delete = netapp_utils.zapi\\\n .NaElement.create_node_with_children(\n 'job-schedule-cron-destroy',\n **{'job-schedule-name': self.name})\n try:\n self.server.invoke_successfully(job_schedule_delete,\n enable_tunneling=True)\n except netapp_utils.zapi.NaApiError as error:\n self.module.fail_json(msg='Error deleting job schedule %s: %s'\n % (self.name, to_native(error)),\n exception=traceback.format_exc())", "def delete_job_state(self, job_origin_id):", "def delete_job_step_by_workflow_id(self, workflow_id):\n try:\n # use of a sub-query instead of join for delete is required\n # for sqlite\n sub_query = self._session.query(JobEntity.id).\\\n filter(JobEntity.workflow_id == workflow_id)\n self._session.query(JobStepEntity).\\\n filter(JobStepEntity.job_id.in_(sub_query)).\\\n delete(synchronize_session=False)\n except SQLAlchemyError as err:\n Log.an().error('sql exception [%s]', str(err))\n return False\n\n return True", "def queue_delete(queue):\n\n for job in queue.jobs:\n job_delete(job)\n if os.path.exists(queue.data_abspath):\n os.rmdir(queue.data_abspath)\n db.session.delete(queue)\n db.session.commit()", "def test_job_delete_remove_document(self, request_lookup_users, elastic_search_update, elastic_search_delete):\n from bilbyui.models import BilbyJob\n\n job = BilbyJob.objects.create(\n user_id=self.user.id,\n name=\"Test1\",\n description=\"first job\",\n job_controller_id=2,\n private=False,\n ini_string=create_test_ini_string({\"detectors\": \"['H1']\"}),\n )\n\n job_id = job.id\n\n job.delete()\n\n self.assertDictEqual(\n elastic_search_delete.mock_calls[0].kwargs, {\"index\": settings.ELASTIC_SEARCH_INDEX, \"id\": job_id}\n )", "def killJob(appName, jobId):\n jobs = db.getJobs(jobId=jobId)\n job = None if len(jobs) == 0 else jobs[0]\n\n if job == None:\n return returnError (\"Job ID, %s, does not exist\" % jobId, 404)\n\n logging.info (\"[FLASKWEB] Asked to KILL job #%s. Current Job status is %s\" % (jobId, job['status']))\n # Separate check to kill orphaned jobs in Db\n # TODO: Merge Job with experiments to post updates to correct table\n if job['status'] == 'RUNNING' or job['status'] == 'SUBMITTED':\n db.updateJob(jobId, status='KILLED')\n\n if int(jobId) in dispatcher.getActiveJobs():\n status = 'KILLED'\n logging.debug('[FLASKWEB] Job %s is active. Signaling to kill in mesos.' % jobId)\n dispatcher.cancelJob(int(jobId), driverDispatch)\n else:\n status = 'ORPHANED and CLEANED'\n logging.debug('[FLASKWEB] Job # %s is ORPHANED and does not exist in current state. Cleaning up.' % jobId)\n\n ts = db.getTS_est() #datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')\n thisjob = dict(jobId=jobId, time=ts, url=dispatcher.getSandboxURL(jobId), status=status)\n if 'application/json' in request.headers['Accept']:\n return jsonify(thisjob)\n else:\n return render_template(\"last.html\", appName=appName, lastjob=thisjob)", "def delete(ctx: click.Context, repository_path):\n root_commands.cmd_delete(ctx.obj, repository_path)", "def post_job_delete(self, name, **kwargs):\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async'):\n return self.post_job_delete_with_http_info(name, **kwargs)\n else:\n (data) = self.post_job_delete_with_http_info(name, **kwargs)\n return data", "def cancel_job(job_id: str, connection: Optional['Redis'] = None, serializer=None, enqueue_dependents: bool = False):\n Job.fetch(job_id, connection=connection, serializer=serializer).cancel(enqueue_dependents=enqueue_dependents)", "def unset_wrapper(bot, update, args, job_queue, chat_data):\n if len(args) == 0:\n update.message.reply_text('No parameter provided')\n return\n\n job_name = args[0]\n if len(args) == 0 or job_name not in settings.JOBS:\n update.message.reply_text(\n 'Sorry {0} is not a valid job'.format(job_name))\n return\n\n job = find_job(job_name, job_queue)\n\n if not job:\n update.message.reply_text('You have no active job')\n return\n\n job.schedule_removal()\n\n update.message.reply_text('{0} job successfully unset!'.format(job_name))", "def delete_job(\n self,\n *,\n id: str,\n error_trace: t.Optional[bool] = None,\n filter_path: t.Optional[\n t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]\n ] = None,\n human: t.Optional[bool] = None,\n pretty: t.Optional[bool] = None,\n ) -> ObjectApiResponse[t.Any]:\n if id in SKIP_IN_PATH:\n raise ValueError(\"Empty value passed for parameter 'id'\")\n __path = f\"/_rollup/job/{_quote(id)}\"\n __query: t.Dict[str, t.Any] = {}\n if error_trace is not None:\n __query[\"error_trace\"] = error_trace\n if filter_path is not None:\n __query[\"filter_path\"] = filter_path\n if human is not None:\n __query[\"human\"] = human\n if pretty is not None:\n __query[\"pretty\"] = pretty\n __headers = {\"accept\": \"application/json\"}\n return self.perform_request( # type: ignore[return-value]\n \"DELETE\", __path, params=__query, headers=__headers\n )", "def endace_delete_search_task_command(app, args):\r\n\r\n jobid = args.get(\"jobid\")\r\n if len(re.findall(r'([0-9a-fA-F]+)', jobid)) == 5:\r\n\r\n # calling search status function of app instance\r\n result = app.delete_search_task(jobid)\r\n\r\n # create entry context to return to Demisto\r\n output = {'Endace.Search.Delete(val.JobID == obj.JobID)': result}\r\n table_header = [\"Task\", \"JobID\", \"Status\", \"Error\"]\r\n readable_output = tableToMarkdown('EndaceResult', result, headers=table_header, removeNull=False)\r\n raw_response = result\r\n return readable_output, output, raw_response\r\n else:\r\n raise ValueError(\"Incorrect JOB ID provided\")", "def remove_jobfile(self):\n if os.path.isfile(self.options.jobfile):\n os.remove(self.options.jobfile)", "def teardown_job(self, job, filesystem_only=True):\n oqp = job.oq_params\n self.teardown_upload(oqp.upload, filesystem_only=filesystem_only)\n if filesystem_only:\n return\n job.delete()\n oqp.delete()", "def schedule_delete_video(video: Video):\n job = scheduler.scheduler.add_job(delete_video, args=[video])\n log.info('Scheduled delete video job video=(%s), job=%s', video, job.id)", "def endace_delete_archive_task_command(app, args):\r\n jobid = args.get(\"jobid\")\r\n if not re.fullmatch(r'[0-9a-zA-Z\\-]+', jobid) is None:\r\n\r\n # calling delete archive task function of app instance\r\n result = app.delete_archive_task(jobid)\r\n\r\n # create entry context to return to Demisto\r\n output = {'Endace.Archive.Delete(val.JobID == obj.JobID)': result}\r\n table_header = [\"Task\", \"JobID\", \"Status\", \"Error\"]\r\n readable_output = tableToMarkdown('EndaceResult', result, headers=table_header, removeNull=False)\r\n raw_response = result\r\n return readable_output, output, raw_response\r\n else:\r\n raise ValueError(\"Incorrect JOB ID provided\")", "def delete_problem(identifier):\n # Admin check\n if not current_user.admin == 1:\n return serve_error('You must be an admin to delete a problem',\n response_code=401)\n\n pid, problem = None, database.session.query(Problem)\n if is_pid(identifier):\n pid = identifier\n problem = problem.filter(Problem.pid == pid).first()\n else:\n problem = problem.filter(Problem.shortname == identifier).first()\n pid = problem.pid\n\n # Delete from problem_data table first to satisfy foreign key constraint\n problem_data = database.session.query(ProblemData).\\\n filter(ProblemData.pid == pid)\n if not problem_data.first():\n return serve_error('Could not find problem data with pid ' +\n pid, response_code=401)\n database.session.delete(problem_data.first())\n\n # Delete any and all sample cases associated w/ problem\n for case in database.session.query(SampleCase).\\\n filter(SampleCase.pid == pid).all():\n database.session.delete(case)\n\n # Delete from problem table\n database.session.delete(problem)\n\n # Commit changes\n database.session.flush()\n database.session.commit()\n\n # Delete judge data\n directory = os.path.join(app.config['DATA_FOLDER'], 'problems', pid)\n rmtree(directory)\n\n return serve_response({\n 'deleted_pid': pid\n })", "def test_007_delete(self):\n HEADING()\n db = self.db\n\n db.connect()\n print (\"AAA\")\n before_count = len(db)\n print (\"CCC\", len(db))\n job = db.insert(\"deleteme\")\n print (\"DDD\", len(db))\n\n job = db.delete_jobs(\"job_name\", \"deleteme\")\n print (\"EEE\")\n after_count = len(db)\n print (\"FFF\", len(db))\n assert(before_count - after_count == 0)", "def deleteImage(job):\n job = Job().updateJob(\n job,\n log='Started to Delete Docker images\\n',\n status=JobStatus.RUNNING,\n )\n docker_client = None\n try:\n deleteList = job['kwargs']['deleteList']\n error = False\n\n try:\n docker_client = docker.from_env(version='auto')\n\n except docker.errors.DockerException as err:\n logger.exception('Could not create the docker client')\n job = Job().updateJob(\n job,\n log='Failed to create the Docker Client\\n' + str(err) + '\\n',\n status=JobStatus.ERROR,\n )\n raise DockerImageError('Could not create the docker client')\n\n for name in deleteList:\n try:\n docker_client.images.remove(name, force=True)\n\n except Exception as err:\n logger.exception('Failed to remove image')\n job = Job().updateJob(\n job,\n log='Failed to remove image \\n' + str(err) + '\\n',\n )\n error = True\n if error is True:\n job = Job().updateJob(\n job,\n log='Failed to remove some images',\n status=JobStatus.ERROR,\n notify=True,\n progressMessage='Errors deleting some images'\n )\n else:\n job = Job().updateJob(\n job,\n log='Removed all images',\n status=JobStatus.SUCCESS,\n notify=True,\n progressMessage='Removed all images'\n )\n except Exception as err:\n logger.exception('Error with job')\n job = Job().updateJob(\n job,\n log='Error with job \\n ' + str(err) + '\\n',\n status=JobStatus.ERROR,\n\n )\n finally:\n if docker_client:\n docker_client.close()", "def delete_stored_project():\n client = RequestManager()\n client.set_method(\"DELETE\")\n client.set_endpoint(\"/projects/{0}\".format(STORED_ID['project_id']))\n client.execute_request()", "def removeJob(self, job_number):\n job = self.retrieveJob(job_number)\n job_type = job.getType() - 1\n del (self.assigned_jobs[job_number])\n self.span -= job.getLength()\n self.types[job_type] = self.types[job_type] - 1\n self.types_sums[job_type] = self.types_sums[job_type] - job.length\n job.in_machine = -1", "def post_job_delete_with_http_info(self, name, **kwargs):\n\n all_params = ['name', 'jenkins_crumb']\n all_params.append('async')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method post_job_delete\" % key\n )\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'name' is set\n if ('name' not in params) or (params['name'] is None):\n raise ValueError(\"Missing the required parameter `name` when calling `post_job_delete`\")\n\n\n collection_formats = {}\n\n path_params = {}\n if 'name' in params:\n path_params['name'] = params['name']\n\n query_params = []\n\n header_params = {}\n if 'jenkins_crumb' in params:\n header_params['Jenkins-Crumb'] = params['jenkins_crumb']\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n # Authentication setting\n auth_settings = ['jenkins_auth']\n\n return self.api_client.call_api('/job/{name}/doDelete', 'POST',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type=None,\n auth_settings=auth_settings,\n async=params.get('async'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)", "def job_stop(self, job_id):\n resp = self.backend.job_stop(job_id)\n\n self.refresh_jobs()", "def get_boardjob(cls, board_id, job_id):\n\n try:\n return cls.query.filter_by(board_id=board_id, job_id=job_id).one()\n except orm.exc.NoResultFound:\n return None\n except orm.exc.MultipleResultsFound:\n db.session.delete(cls.query.filter_by(board_id=board_id, job_id=job_id).first())\n db.session.commit()\n return cls.query.filter_by(board_id=board_id, job_id=job_id).one()", "def delete_agent(self, agent):\r\n return self.delete(self.agent_path % (agent))", "def remove(config, schedd, jobid):\n rm_cmd = osp.join(get_condor_bin_dir(config),\n CONDOR_COMMAND['remove'])\n return _simple_command_run([rm_cmd, jobid, '-name', schedd])", "def delete_workspace(client, workspace):\n data = {\"workspace\": workspace}\n return client._creoson_post(\"windchill\", \"delete_workspace\", data)", "def _remove_job(self, job_address):\n success = self.worker_status.remove_job(job_address)\n if success:\n while True:\n initialized_job = self.job_buffer.get()\n initialized_job.worker_address = self.master_heartbeat_address\n if initialized_job.is_alive:\n self.worker_status.add_job(initialized_job)\n if not initialized_job.is_alive: # make sure that the job is still alive.\n self.worker_status.remove_job(initialized_job.job_address)\n continue\n else:\n logger.warning(\"[Worker] a dead job found. The job buffer will not accept this one.\")\n if initialized_job.is_alive:\n break\n\n self.lock.acquire()\n self.request_master_socket.send_multipart(\n [remote_constants.NEW_JOB_TAG,\n cloudpickle.dumps(initialized_job),\n to_byte(job_address)])\n _ = self.request_master_socket.recv_multipart()\n self.lock.release()", "def delete_jobs(self, job_list, as_json=False):\n deletion_status = dict()\n for job_id in job_list:\n app_id = None\n if job_id.startswith('njs:'):\n # delete from njs\n is_deleted = True\n app_id = job_id[4:]\n elif job_id.startswith('method:'):\n # delete from njs_wrapper\n is_deleted = True\n app_id = job_id[7:]\n else:\n # delete from ujs (njs_wrapper?)\n is_deleted = False\n if app_id is not None:\n token = os.environ['KB_AUTH_TOKEN']\n njsClient = NarrativeJobService(URLS.job_service, token = token)\n try:\n status = njsClient.delete_app(app_id)\n if (not status == 'success') and ('was marked for deletion' not in status):\n is_deleted = False\n except Exception as e:\n # just return false until we get some better info from the NJS folks.\n is_deleted = False\n deletion_status[job_id] = is_deleted\n if as_json:\n import json\n deletion_status = json.dumps(deletion_status)\n return deletion_status", "def __clear_jobs(self):\n namespace = self._config.cluster_config.namespace\n self.__logger.info(f'Clearing old jobs in current namespace: {namespace}')\n\n for job in self.__client.get(namespace=self._config.cluster_config.namespace)['items']:\n job_name = job['metadata']['name']\n self.__logger.info(f'Deleting: {job_name}')\n try:\n self.__client.custom_api.delete_namespaced_custom_object(\n PYTORCHJOB_GROUP,\n PYTORCHJOB_VERSION,\n namespace,\n PYTORCHJOB_PLURAL,\n job_name)\n except Exception as e:\n self.__logger.warning(f'Could not delete: {job_name}')\n print(e)", "def delete(self, request, pk, format=None):\n settings.LOGGER.info(\n \"JobTitleDetailView >> delete >> request pk: {}\".format(pk))\n\n try:\n obj = self.get_object(pk)\n obj.delete()\n settings.LOGGER.info(\n \"JobTitleDetailView >> delete >> pk {}, success\".format(pk))\n return Response(status=status.HTTP_204_NO_CONTENT)\n except Exception as e:\n settings.LOGGER.info(\n \"JobTitleDetailView >> delete >> pk {}, error: {}\".format(pk,\n e))\n return Response({\"error\": \"{}\".format(e)},\n status=status.HTTP_400_BAD_REQUEST)", "def do_remove(self, arg):\n jail_destroy('remove', arg)", "def cancel_job(self, job_id):\n self.send(JobCommands.CANCEL_JOB, CancelJobPayload(job_id))", "def disable_job(self, job):\n if subprocess.call(self.cli + [PlatformJenkinsJavaCLI.DISABLE_JOB, job.name]) != 0:\n raise PlatformJenkinsException(\"Disabling job failed: \" + job.name)" ]
[ "0.7612383", "0.75631183", "0.75325656", "0.7520281", "0.74776965", "0.7339835", "0.73373073", "0.73164237", "0.7306595", "0.71888834", "0.70548284", "0.6966155", "0.69645363", "0.69290483", "0.6865273", "0.6846396", "0.682398", "0.6734061", "0.6658749", "0.6615366", "0.65832365", "0.65469056", "0.654216", "0.6523422", "0.64868456", "0.64190954", "0.6418003", "0.64175516", "0.6415986", "0.64063144", "0.6389352", "0.63794744", "0.6375018", "0.6369626", "0.63661", "0.6356967", "0.63348186", "0.633166", "0.6323704", "0.62916154", "0.627923", "0.6246358", "0.62334925", "0.6178857", "0.61741894", "0.6157946", "0.6156539", "0.6152936", "0.6133589", "0.60954535", "0.60856426", "0.60518557", "0.6011229", "0.5975692", "0.5946101", "0.59407985", "0.5929777", "0.59248835", "0.58550936", "0.58270776", "0.58073634", "0.5790703", "0.5784914", "0.5765401", "0.57564855", "0.5708055", "0.56913865", "0.5647084", "0.56449306", "0.55736446", "0.5573146", "0.5567617", "0.55080706", "0.54731274", "0.54643524", "0.54217553", "0.54145133", "0.5396841", "0.53948873", "0.53730494", "0.5370735", "0.5368602", "0.53595597", "0.5338244", "0.53340733", "0.5317638", "0.5313506", "0.53125626", "0.5306399", "0.5304075", "0.52887076", "0.5258648", "0.52560717", "0.5250032", "0.5247456", "0.5226795", "0.5201559", "0.520154", "0.519844", "0.51849675" ]
0.87232506
0
Triggers given job, providing a set of parameters to it.
def trigger_job(self, job, parameters=None): parameters = parameters or {} parameter_list = [] for key in parameters: parameter_list.append("-p") parameter_list.append("%s=%s" % (key, parameters[key])) if subprocess.call(self.cli + [PlatformJenkinsJavaCLI.BUILD_JOB, job.name] + parameter_list) != 0: raise PlatformJenkinsException("Triggering job failed: " + job.name)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def trigger(builder, revision, files=[], dry_run=False, extra_properties=None):\n repo_name = query_repo_name_from_buildername(builder)\n return buildapi.trigger_arbitrary_job(repo_name, builder, revision, files, dry_run,\n extra_properties)", "def trigger_labeling_job(input_batch_id, batch_id, job_params):\n\n job_input = input_config_to_job_input(\n input_batch_id, job_params[\"jobName\"], job_params[\"jobLevel\"], job_params[\"inputConfig\"]\n )\n\n if job_params[\"jobType\"] == SmgtJobType.BATCH:\n trigger_batch_job(batch_id, job_input, job_params)", "def trigger_job(self, job_id, auth_token=RUNDECK_AUTH_TOKEN, argString=None):\n self.headers['X-Rundeck-Auth-Token'] = auth_token\n if argString:\n payload = {\"argString\": argString}\n self.job_response = requests.post(json=payload,\n url= \"{}/job/{}/run\".format(self.api,job_id),\n headers = self.headers)\n\n else:\n self.job_response = requests.post(url=\"{}/job/{}/run\".format(self.api, job_id),\n headers=self.headers)", "def do_job(self, job_id, job_param):\n raise NotImplementedError(\"should be implemented in inherited class\")", "def executor(self, job):\n job.connect(self)\n job.trigger(wait=True, **job.trigger_args) # Wait until job is ready\n job.start() # Notify star of job\n # Run specific action for this job\n job.action(**job.action_args)", "def execute(self, job):\n raise NotImplementedError", "def trigger(hass, event, value1=None, value2=None, value3=None):\n data = {\n ATTR_EVENT: event,\n ATTR_VALUE1: value1,\n ATTR_VALUE2: value2,\n ATTR_VALUE3: value3,\n }\n hass.services.call(DOMAIN, SERVICE_TRIGGER, data)", "def trigger(self, journey_id, step_id, data):\n self.journey_id = journey_id\n self.step_id = step_id\n if 'email_address' not in data:\n raise KeyError('The automation email queue must have an email_address')\n\n check_email(data['email_address'])\n response = self._mc_client._post(\n url=self._build_path(\"journeys\", journey_id, 'steps', step_id, 'actions', \"trigger\"),\n data=data\n )\n\n return response", "def Trigger(self):\n\t\tpayload = { \"Arg1\": self.href }\n\t\treturn self._execute('trigger', payload=payload, response_object=None)", "def send(self, trigger, **kwargs):\n temp_data = {}\n for key, value in kwargs.iteritems():\n temp_data[key] = value\n self.evt.sendMessage(trigger, data=temp_data)", "def modify_job(self, job, parameter):\n job.set_encut(parameter[0])\n job.set_kpoints(parameter[1])\n return job", "def __trigger(self, toBeExecuted, args = []):\n\n self.__connect()\n [ f(args) for f in toBeExecuted ]\n self.__disconnect()", "def modify_job(self, id, jobstore=None, **changes):\n\n fix_job_def(changes)\n\n if 'trigger' in changes:\n trigger, trigger_args = pop_trigger(changes)\n self._scheduler.reschedule_job(id, jobstore, trigger, **trigger_args)\n\n return self._scheduler.modify_job(id, jobstore, **changes)", "def build_trigger(ctx, build_type_id, branch, comment, parameter, agent_id,\n open_build_log, wait_for_run):\n parameters = dict([p.split('=', 1) for p in parameter])\n data = ctx.obj.trigger_build(\n build_type_id=build_type_id,\n branch=branch,\n comment=comment,\n parameters=parameters,\n agent_id=agent_id)\n build_id = data['id']\n ctx.invoke(build_queue_show, args=[build_id])\n if open_build_log:\n url = data['webUrl'] + '&tab=buildLog'\n webbrowser.open(url)\n if not wait_for_run:\n return\n while data['state'] == 'queued':\n data = ctx.obj.get_queued_build_by_build_id(build_id)\n click.echo('state: %s' % data['state'])\n time.sleep(1)\n ctx.invoke(build_queue_show, args=[build_id])", "def _run_a_job(self,\n job: 'JobAdapter',\n label: str,\n rerun: bool = False,\n ):\n self.run_job(job_type=job.job_type,\n conformer=job.conformer,\n cpu_cores=job.cpu_cores,\n dihedrals=job.dihedrals,\n directed_scan_type=job.directed_scan_type,\n ess_trsh_methods=job.ess_trsh_methods,\n fine=job.fine,\n irc_direction=job.irc_direction,\n job_adapter=job.job_adapter,\n label=label,\n level_of_theory=job.level,\n memory=job.job_memory_gb,\n max_job_time=job.max_job_time,\n rotor_index=job.rotor_index,\n reactions=job.reactions,\n trsh=list(job.args['trsh'].values()) if 'trsh' in job.args else '',\n torsions=job.torsions,\n times_rerun=job.times_rerun + int(rerun),\n tsg=job.tsg,\n xyz=job.xyz,\n )", "def fix_trigger(self, kwargs):\n trigger_kwargs = self.get_trigger_kwargs(**kwargs)\n if kwargs[\"trigger\"] == \"interval\":\n kwargs[\"trigger\"] = apscheduler.triggers.interval.IntervalTrigger(**trigger_kwargs)\n elif kwargs[\"trigger\"] == \"date\":\n kwargs[\"trigger\"] = apscheduler.triggers.date.DateTrigger(**trigger_kwargs)\n elif kwargs[\"trigger\"] == \"cron\":\n kwargs[\"trigger\"] = apscheduler.triggers.cron.CronTrigger(**trigger_kwargs)\n return kwargs", "def task_trigger(self, args):\n h, tmp = tempfile.mkstemp(\n dir=self._tmpdir, prefix='trigger_raw', suffix='.json')\n os.close(h)\n cmd = [\n '-user',\n 'joe@localhost',\n '-d',\n 'pool=default',\n '-dump-json',\n tmp,\n ]\n cmd.extend(args)\n assert not self._run_swarming('trigger',\n cmd), 'Failed to trigger a task. cmd=%s' % cmd\n with open(tmp, 'rb') as f:\n data = json.load(f)\n task_id = data['tasks'][0]['task_id']\n logging.debug('task_id = %s', task_id)\n return task_id", "def fire_trigger(self, trigger):\n if not self.exists():\n return\n if trigger in self.events:\n for action in self.events[trigger]:\n action(requestor=self)", "def run(\n self,\n cause: str = None,\n account_id: int = None,\n job_id: int = None,\n token: str = None,\n additional_args: dict = None,\n account_id_env_var_name: str = \"ACCOUNT_ID\",\n job_id_env_var_name: str = \"JOB_ID\",\n token_env_var_name: str = \"DBT_CLOUD_TOKEN\",\n wait_for_job_run_completion: bool = False,\n max_wait_time: int = None,\n domain: str = None,\n ) -> dict:\n if cause is None:\n raise ValueError(\n \"\"\"\n Cause cannot be None.\n Please provide a cause to trigger the dbt Cloud job.\n \"\"\"\n )\n\n if account_id is None and account_id_env_var_name in os.environ:\n account_id = int(os.environ[account_id_env_var_name])\n\n if account_id is None:\n raise ValueError(\n \"\"\"\n dbt Cloud Account ID cannot be None.\n Please provide an Account ID or the name of the env var that contains it.\n \"\"\"\n )\n\n if job_id is None and job_id_env_var_name in os.environ:\n job_id = int(os.environ[job_id_env_var_name])\n\n if job_id is None:\n raise ValueError(\n \"\"\"\n dbt Cloud Job ID cannot be None.\n Please provide a Job ID or the name of the env var that contains it.\n \"\"\"\n )\n\n if domain is None:\n domain = \"cloud.getdbt.com\"\n\n if token is None and token_env_var_name in os.environ:\n token = os.environ.get(token_env_var_name)\n\n if token is None:\n raise ValueError(\n \"\"\"\n dbt Cloud token cannot be None.\n Please provide a token or the name of the env var that contains it.\n \"\"\"\n )\n\n run = trigger_job_run(\n account_id=account_id,\n job_id=job_id,\n cause=cause,\n additional_args=additional_args,\n token=token,\n domain=domain,\n )\n if wait_for_job_run_completion:\n job_run_result = wait_for_job_run(\n account_id=account_id,\n run_id=run[\"id\"],\n token=token,\n max_wait_time=max_wait_time,\n domain=domain,\n )\n\n artifact_links = []\n try:\n artifact_links = list_run_artifact_links(\n account_id=account_id, run_id=run[\"id\"], token=token, domain=domain\n )\n\n markdown = f\"Artifacts for dbt Cloud run {run['id']} of job {job_id}\\n\"\n for link, name in artifact_links:\n markdown += f\"- [{name}]({link})\\n\"\n create_markdown_artifact(markdown)\n\n except DbtCloudListArtifactsFailed as err:\n self.logger.warn(\n f\"Unable to retrieve artifacts generated by dbt Cloud job run: {err}\"\n )\n\n job_run_result[\"artifact_urls\"] = [link for link, _ in artifact_links]\n\n return job_run_result\n\n else:\n return run", "def handler(event: Dict[str, Any], context: Any) -> None:\n client = boto3.client(\"batch\")\n\n job_a = client.submit_job(\n jobName=\"submit_job_a\",\n jobQueue=os.getenv(\"BATCH_JOB_QUEUE\"),\n jobDefinition=os.getenv(\"BATCH_JOB_A_DEFINITION\"))\n print(\"JobA Submitted: \", job_a)\n\n job_b = client.submit_job(\n jobName=\"submit_job_b\",\n jobQueue=os.getenv(\"BATCH_JOB_QUEUE\"),\n jobDefinition=os.getenv(\"BATCH_JOB_B_DEFINITION\"),\n dependsOn=[{\n \"jobId\": job_a[\"jobId\"]\n }])\n print(\"JobB Sumitted: \", job_b)", "def update(self, job_name, param_name, value, description=None):\n if job_name in self._jobs:\n getattr(self._jobs[job_name], param_name).update(value, description)\n else:\n self.log.error(\"Invalid job name: %s\", job_name)", "def trigger_batch_job(parent_batch_id, job_input, job_params):\n job_name = job_params[\"jobName\"]\n job_modality = job_params[\"jobModality\"]\n\n batch_id = f\"{parent_batch_id}-{job_name}\"\n\n output_path = (\n f\"s3://{batch_processing_bucket_name}/batch_manifests/{job_modality}/{batch_id}/output\"\n )\n\n # If a label category file wasn't provided as API input, use the previous\n # job's label category file.\n label_category_config_uri = job_input.label_category_s3_uri\n if \"labelCategoryConfigS3Uri\" in job_params:\n label_category_config_uri = job_params[\"labelCategoryConfigS3Uri\"]\n\n # batch_job_input_data = event[\"batch_job_input\"]\n labeling_job_request = construct_labeling_job_input(\n parent_batch_id=parent_batch_id,\n input_manifest_url=job_input.input_manifest_s3_uri,\n audit_label_attribute_name=job_input.label_attribute_name,\n label_category_config_uri=label_category_config_uri,\n job_params=job_params,\n output_path=output_path,\n )\n\n sagemaker.create_labeling_job(**labeling_job_request)\n s3_output_path = f\"{output_path}/{job_name}/manifests/output/output.manifest\"\n\n db.insert_job_level_metadata(\n parent_batch_id=parent_batch_id,\n batch_id=batch_id,\n batch_status=BatchStatus.WAIT_FOR_SMGT_RESPONSE,\n labeling_job_name=job_name,\n label_attribute_name=labeling_job_request[\"LabelAttributeName\"],\n label_category_s3_uri=labeling_job_request[\"LabelCategoryConfigS3Uri\"],\n job_input_s3_uri=labeling_job_request[\"InputConfig\"][\"DataSource\"][\"S3DataSource\"][\n \"ManifestS3Uri\"\n ],\n job_output_s3_uri=s3_output_path,\n )", "def trigger_job(revision, buildername, times=1, files=None, dry_run=False,\n extra_properties=None):\n repo_name = query_repo_name_from_buildername(buildername)\n builder_to_trigger = None\n list_of_requests = []\n LOG.info(\"We want to trigger '%s' on revision '%s' a total of %d time(s).\" %\n (buildername, revision, times))\n\n if not buildapi.valid_revision(repo_name, revision):\n return list_of_requests\n\n if not valid_builder(buildername):\n LOG.error(\"The builder %s requested is invalid\" % buildername)\n # XXX How should we exit cleanly?\n exit(-1)\n\n if files:\n builder_to_trigger = buildername\n _all_urls_reachable(files)\n else:\n builder_to_trigger, files = _determine_trigger_objective(\n revision,\n buildername,\n )\n\n if builder_to_trigger != buildername and times != 1:\n # The user wants to trigger a downstream job,\n # however, we need a build job instead.\n # We should trigger the downstream job multiple times, however,\n # we only trigger the upstream jobs once.\n LOG.debug(\"Since we need to trigger a build job we don't need to \"\n \"trigger it %s times but only once.\" % times)\n LOG.info(\"In order to trigger %s %i times, please run the script again after %s ends.\"\n % (buildername, times, builder_to_trigger))\n times = 1\n\n if builder_to_trigger:\n if dry_run:\n LOG.info(\"Dry-run: We were going to request '%s' %s times.\" %\n (builder_to_trigger, times))\n # Running with dry_run being True will only output information\n trigger(builder_to_trigger, revision, files, dry_run, extra_properties)\n else:\n for _ in range(times):\n req = trigger(builder_to_trigger, revision, files, dry_run, extra_properties)\n if req is not None:\n list_of_requests.append(req)\n else:\n LOG.debug(\"Nothing needs to be triggered\")\n\n return list_of_requests", "def job(self, job: str):\n\n self._job = job", "def job(self, job: str):\n\n self._job = job", "def setTrigger(self, *args):\n return _libsbml.Event_setTrigger(self, *args)", "def job_as_parameter(f):\n f.job_as_parameter = True\n return f", "def submit(cls, background_job):\n background_job.save()\n request_es_backup.schedule(args=(background_job.id,), delay=10)", "def submit(self, job_parameters):\n # FIX: Don't pass through the real job name. Bilby outputs the job files by whatever this parameter is, that\n # means that names containing special characters will break. Uniqueness is guaranteed by the folder structure\n job_parameters = json.loads(job_parameters)\n job_parameters['name'] = 'bilby'\n\n # Write the job parameters to a file\n json.dump(job_parameters, open(self.job_parameter_file, 'w'))\n\n # Run the job\n return super().submit(job_parameters)", "async def trigger(self, variables):\n variables[\"samples\"] = self._center.samples\n _LOGGER.debug(\"Triggered automation %s\", self.name)\n try:\n cond = self._cond_func(variables)\n except TemplateError as exc:\n _LOGGER.error(\"Failed to render condition for %s: %s\", self.name, exc)\n return\n if cond:\n _LOGGER.debug(\"Condition passed for %s\", self.name)\n await self._action_sequence(variables)", "def create_trigger(self, trigger, conditions=[], dampenings=[]):\n full_trigger = {'trigger': trigger, 'conditions': conditions, 'dampenings': dampenings}\n self._post(path='triggers/trigger', data=full_trigger)", "def job_execute(self, row):\n job_id = self.jobsTableWidget.item(row, 0).text()\n self.connection.job_start(job_id)\n self.refresh_jobs()", "def _submit_special(self, config, job_id, job_params):\n (module, method) = job_params[\"method\"].split(\".\")\n self.logger.log(\"Submit %s as a %s:%s job\" % (job_id, module, method))\n\n self.sr.run(\n config,\n job_params,\n job_id,\n callback=self.callback_url,\n fin_q=[self.jr_queue],\n )", "def send(self, job_command, payload):\n self.work_queue_client.send(job_command, payload)", "def apply(self):\n changed = False\n job_schedule_exists = False\n results = netapp_utils.get_cserver(self.server)\n cserver = netapp_utils.setup_ontap_zapi(\n module=self.module, vserver=results)\n netapp_utils.ems_log_event(\"na_ontap_job_schedule\", cserver)\n job_details = self.get_job_schedule()\n if job_details:\n job_schedule_exists = True\n if self.state == 'absent': # delete\n changed = True\n elif self.state == 'present': # modify\n if job_details['job_minutes'] != str(self.job_minutes):\n changed = True\n else:\n if self.state == 'present': # create\n changed = True\n if changed:\n if self.module.check_mode:\n pass\n else:\n if self.state == 'present': # execute create\n if not job_schedule_exists:\n self.create_job_schedule()\n else: # execute modify minute\n self.modify_minute_job_schedule()\n elif self.state == 'absent': # execute delete\n self.delete_job_schedule()\n self.module.exit_json(changed=changed)", "def run_job(job, interrupt_if_necessary):", "def job_execute(self, job_id):\n resp = self.backend.job_start(job_id)\n\n #if resp.status_code:\n #error(self.iface, str(resp))\n # warning(self.iface, str(resp))\n self.refresh_jobs()", "def schedule_immediate(self, job):\r\n assert(self.c.is_live())\r\n self.imm_jobs.put(job)", "def job(name):\n current_app.logger.info(\"Running custom job: {}\".format(name))", "def execute_workflow(self):\n logging.info(f\"called {self.job} with {self.file_type}\")\n\n job_params = {\n \"job\": self.job,\n \"job_run_dao\": self.job_run_dao,\n \"incident_dao\": self.incident_dao,\n \"subprocess\": self.file_type\n }\n\n with SmsJobController(**job_params) as controller:\n self.job_run = controller.job_run\n\n try:\n self.process_map[self.job]()\n controller.job_run_result = controller.run_result_enum.SUCCESS\n except KeyError:\n raise KeyError", "def set_wrapper(bot, update, args, job_queue, chat_data):\n chat_id = update.message.chat_id\n try:\n # args[0] should contain the time for the timer in seconds\n due = int(args[1])\n if due < 0:\n update.message.reply_text('Sorry we can not go back to future!')\n return\n\n job_name = args[0]\n if job_name not in settings.JOBS:\n update.message.reply_text(\n 'Sorry {0} is not a valid job'.format(job_name))\n return\n\n # Add job to queue\n job_queue.run_repeating(logparser_job, due, name=job_name, context=chat_id)\n\n update.message.reply_text('{0} job set!'.format(job_name))\n\n except (IndexError, ValueError):\n update.message.reply_text('Usage: /set <job_name> <seconds>')", "def work(self, job):\n pass", "def trigger_workflow(self, trigger_id, commit_sha='',\n status_url=None, collab_url=None):\n # Note: self.context will be None at this point as this is a\n # non-authenticated request.\n db_obj = objects.registry.Assembly.get_by_trigger_id(None,\n trigger_id)\n try:\n # get the trust\\impersonation context and authenticate it.\n self.context = keystone_utils.create_delegation_context(\n db_obj, self.context)\n except exception.AuthorizationFailure as auth_ex:\n LOG.warning(auth_ex)\n return\n\n plan_obj = objects.registry.Plan.get_by_id(self.context,\n db_obj.plan_id)\n\n artifacts = plan_obj.raw_content.get('artifacts', [])\n for arti in artifacts:\n if repo_utils.verify_artifact(arti, collab_url):\n self._build_artifact(assem=db_obj, artifact=arti,\n commit_sha=commit_sha,\n status_url=status_url)", "def trigger(hass, config, action):\n if CONF_AFTER in config:\n after = config.get(CONF_AFTER)\n hours, minutes, seconds = after.hour, after.minute, after.second\n else:\n hours = config.get(CONF_HOURS)\n minutes = config.get(CONF_MINUTES)\n seconds = config.get(CONF_SECONDS)\n\n @asyncio.coroutine\n def time_automation_listener(now):\n \"\"\"Listen for time changes and calls action.\"\"\"\n hass.async_add_job(action, {\n 'trigger': {\n 'platform': 'time',\n 'now': now,\n },\n })\n\n return track_time_change(hass, time_automation_listener,\n hour=hours, minute=minutes, second=seconds)", "def _launch_job(self, job):\n details = self.sm.get_job_details(job.jobId)\n handler = self.handlers[details[0]['method']]\n type = details[0]['type']\n resultId = details[0]['resultid']\n job.set_phase('EXECUTING')\n job.set_start_time(datetime.utcnow().isoformat())\n job.add_result(resultId, 'http://localhost:8000/%s/%s/results/details' % (type, job.jobId))\n self.sm.update_job(job = job)\n self.threads.append(Future(handler, job.jobId, job))", "def trigger(self, event_name, event_data=None):\n\n raise NotImplementedError()", "def sendjob(self,bashscript):", "def launchJob(self, job):\n try:\n # Print initial status\n self.queueStatus(job)\n\n # Start the clock\n start = clock()\n\n # Run the job\n job.run()\n\n # Stop the clock while in a thread lock\n stop = clock()\n\n with self.thread_lock:\n timing = stop - start\n self.shared_dags[job].setTime(timing)\n\n # Print final status\n self.queueStatus(job)\n\n except AttributeError:\n raise SchedulerError('run method is not defined')", "def flow_job(self, name=None, params=None):\n # Note: Use -B to avoid permission problems with .pyc files created from commandline test\n if self.func_name:\n script = \"export PYTHONPATH=\" + test_tmp_dir + \"\\n\"\n script += test_cfg.skip_job_load_sh_export_str() + \"\\n\"\n # Supply dummy args for the py.test fixtures\n dummy_args = ','.join(['0' for _ in range(self.func_num_params)])\n script += \"python -Bc &quot;from jenkinsflow.test.\" + self.file_name.replace('.py', '') + \" import *; test_\" + self.func_name + \"(\" + dummy_args + \")&quot;\"\n else:\n script = \"python -B \" + jp(pseudo_install_dir, 'demo', self.file_name)\n name = '0flow_' + name if name else '0flow'\n self._jenkins_job(name, exec_time=0.5, params=params, script=script)\n return (self.job_name_prefix or '') + name", "def activate_job(job_name):\n job = Job.from_name(job_name)\n job.set_active(True)\n return redirect(url_for('all_jobs_for_client', ClientID=job.ClientID))", "def triggered(self, *args, **kwargs): # real signature unknown\n pass", "def on_job_update(_job):\n nonlocal job\n job = _job\n # Asserts that job is either pending or canceled.\n assert job.state in ['PENDING', 'CANCELED'], (\n 'job that canceled immediately after submission has wrong '\n 'state `%s`' % job.state\n )", "def _exec_job(self, job: Job, eval_args: Tuple[Tuple, dict]) -> None:\n\n # Ensure we are on main scheduler thread.\n assert self.thread_id == threading.get_ident()\n\n # Evaluate task_name to specific task.\n # TaskRegistry is like an environment.\n job.task = self.task_registry.get(job.expr.task_name)\n assert job.task\n\n # Make sure the job can \"fit\" within the available resource limits.\n job_limits = job.get_limits()\n if not self._is_job_within_limits(job_limits):\n self._add_job_pending_limits(job, eval_args)\n return\n self._consume_resources(job_limits)\n\n # Make default arguments explicit so that we can hash them.\n args, kwargs = eval_args\n job.eval_args = set_arg_defaults(job.task, args, kwargs)\n\n # Set job caching preference.\n if not self.use_cache:\n job.task_options[\"cache\"] = False\n\n self.backend.record_job_start(job)\n\n # Preprocess arguments before sending them to task function.\n args, kwargs = job.eval_args\n args, kwargs = self.preprocess_args(job, args, kwargs)\n\n # Check cache using eval_hash as key.\n job.eval_hash, job.args_hash = self.get_eval_hash(job.task, args, kwargs)\n\n call_hash: Optional[str]\n if self.use_cache and job.get_option(\"cache\", True):\n result, job.was_cached, call_hash = self.get_cache(\n job,\n job.eval_hash,\n job.task.hash,\n job.args_hash,\n check_valid=job.get_option(\"check_valid\", \"full\"),\n )\n else:\n result, job.was_cached, call_hash = None, False, None\n if job.was_cached:\n # Evaluation was cached, so we proceed to done_job/resolve_job.\n self.log(\n \"{action} Job {job_id}: {task_call} (eval_hash={eval_hash}{check_valid})\".format(\n job_id=job.id[:8],\n action=\"Cached\".ljust(JOB_ACTION_WIDTH),\n task_call=format_task_call(job.task, args, kwargs),\n eval_hash=job.eval_hash[:8],\n check_valid=\", check_valid=shallow\" if call_hash else \"\",\n )\n )\n if call_hash:\n # We have a fully resolved result.\n return self._resolve_job(job, result, call_hash)\n else:\n # We have a result that needs further evaluation.\n return self._done_job(job, result)\n\n # Perform rollbacks due to Handles that conflict with past Handles.\n if not self.dryrun:\n self.perform_rollbacks(args, kwargs)\n\n # Determine executor.\n executor_name = job.get_option(\"executor\") or \"default\"\n executor = self.executors.get(executor_name)\n if not executor:\n return self._reject_job(\n job, SchedulerError('Unknown executor \"{}\"'.format(executor_name))\n )\n\n self.log(\n \"{action} Job {job_id}: {task_call} on {executor}\".format(\n job_id=job.id[:8],\n action=(\"Dryrun\" if self.dryrun else \"Run\").ljust(JOB_ACTION_WIDTH),\n task_call=format_task_call(job.task, args, kwargs),\n executor=executor_name,\n )\n )\n\n # Stop short of submitting jobs during a dryrun.\n if self.dryrun:\n return\n\n # Submit job.\n if not job.task.script:\n executor.submit(job, args, kwargs)\n else:\n executor.submit_script(job, args, kwargs)", "def on_job_update(_job):\n nonlocal job\n job = _job\n # Asserts that job is either pending or canceled.\n assert job.state in ['PENDING', 'CANCELED'], (\n 'Job that canceled immediately after submission has wrong '\n 'state `{job.state}`!'\n )", "def on_job_update(_job):\n nonlocal job\n job = _job\n # Asserts that job is either pending or canceled.\n assert job.state in ['PENDING', 'CANCELED'], (\n 'Job that canceled immediately after submission has wrong '\n f'state `{job.state}`!')", "def on_job_update(_job):\n nonlocal job\n job = _job\n # Asserts that job is either pending or canceled.\n assert job.state in ['PENDING', 'CANCELED'], (\n 'Job that canceled immediately after submission has wrong '\n f'state `{job.state}`!')", "def on_job_update(_job):\n nonlocal job\n job = _job\n # Asserts that job is either pending or canceled.\n assert job.state in ['PENDING', 'CANCELED'], (\n 'Job that canceled immediately after submission has wrong '\n f'state `{job.state}`!')", "def enqueue_job(self, job, timeout=None, set_meta_data=True):\n if set_meta_data:\n job.origin = self.name\n job.enqueued_at = times.now()\n\n if timeout:\n job.timeout = timeout # _timeout_in_seconds(timeout)\n else:\n job.timeout = 180 # default\n yield job.save()\n yield self.push_job(job)", "def new_job_with_custom_salesforce(salesforce_instance):\n return SalesforceBulkJob('update', 'Lead', salesforce=salesforce_instance)", "def trigger_range(buildername, revisions, times=1, dry_run=False, files=None):\n repo_name = query_repo_name_from_buildername(buildername)\n LOG.info(\"We want to have %s job(s) of %s on revisions %s\" %\n (times, buildername, str(revisions)))\n for rev in revisions:\n LOG.info(\"\")\n LOG.info(\"=== %s ===\" % rev)\n if not buildapi.valid_revision(repo_name, rev):\n LOG.info(\"We can't trigger anything on pushes that the revision is not valid for \"\n \"buildapi.\")\n continue\n\n LOG.info(\"We want to have %s job(s) of %s on revision %s\" %\n (times, buildername, rev))\n\n # 1) How many potentially completed jobs can we get for this buildername?\n jobs = query_jobs(repo_name, rev)\n matching_jobs = _matching_jobs(buildername, jobs)\n successful_jobs, pending_jobs, running_jobs = _status_summary(matching_jobs)[0:3]\n\n potential_jobs = pending_jobs + running_jobs + successful_jobs\n LOG.debug(\"We found %d pending jobs, %d running jobs and %d successful_jobs.\" %\n (pending_jobs, running_jobs, successful_jobs))\n\n if potential_jobs >= times:\n LOG.info(\"We have %d job(s) for '%s' which is enough for the %d job(s) we want.\" %\n (potential_jobs, buildername, times))\n\n else:\n # 2) If we have less potential jobs than 'times' instances then\n # we need to fill it in.\n LOG.info(\"We have found %d potential job(s) matching '%s' on %s. \"\n \"We need to trigger more.\" % (potential_jobs, buildername, rev))\n\n # If a job matching what we want already exists, we can\n # use the retrigger API in self-serve to retrigger that\n # instead of creating a new arbitrary job\n if len(matching_jobs) > 0:\n request_id = matching_jobs[0][\"requests\"][0][\"request_id\"]\n buildapi.make_retrigger_request(\n repo_name,\n request_id,\n count=(times - potential_jobs),\n dry_run=dry_run)\n\n # If no matching job exists, we have to trigger a new arbitrary job\n else:\n list_of_requests = trigger_job(\n revision=rev,\n buildername=buildername,\n times=(times - potential_jobs),\n dry_run=dry_run,\n files=files)\n\n if list_of_requests and any(req.status_code != 202 for req in list_of_requests):\n LOG.warning(\"Not all requests succeeded.\")\n\n # TODO:\n # 3) Once we trigger a build job, we have to monitor it to make sure that it finishes;\n # at that point we have to trigger as many test jobs as we originally intended\n # If a build job does not finish, we have to notify the user... what should it then\n # happen?", "async def trigger_build(self, *, branch=None, message=None):", "def updateBuildParams(self, job, item, params):\n\n # NOTE(jhesketh): The params need to stay in a key=value data pair\n # as workers cannot necessarily handle lists.\n\n if callable(job.parameter_function):\n pargs = inspect.getargspec(job.parameter_function)\n if len(pargs.args) == 2:\n job.parameter_function(item, params)\n else:\n job.parameter_function(item, job, params)\n self.log.debug(\"Custom parameter function used for job %s, \"\n \"change: %s, params: %s\" % (job, item.change,\n params))\n\n # NOTE(mmedvede): Swift parameter creation should remain after the call\n # to job.parameter_function to make it possible to update LOG_PATH for\n # swift upload url using parameter_function mechanism.\n if job.swift and self.swift.connection:\n\n for name, s in job.swift.items():\n swift_instructions = {}\n s_config = {}\n s_config.update((k, v.format(item=item, job=job,\n change=item.change))\n if isinstance(v, six.string_types)\n else (k, v)\n for k, v in s.items())\n\n (swift_instructions['URL'],\n swift_instructions['HMAC_BODY'],\n swift_instructions['SIGNATURE']) = \\\n self.swift.generate_form_post_middleware_params(\n params['LOG_PATH'], **s_config)\n\n if 'logserver_prefix' in s_config:\n swift_instructions['LOGSERVER_PREFIX'] = \\\n s_config['logserver_prefix']\n elif self.config.has_option('swift',\n 'default_logserver_prefix'):\n swift_instructions['LOGSERVER_PREFIX'] = \\\n self.config.get('swift', 'default_logserver_prefix')\n\n # Create a set of zuul instructions for each instruction-set\n # given in the form of NAME_PARAMETER=VALUE\n for key, value in swift_instructions.items():\n params['_'.join(['SWIFT', name, key])] = value", "def __setJobParam( self, name, value ):\n if not self.jobID:\n return S_ERROR( 'JobID not defined' )\n\n self.log.verbose( 'setJobParameter(%s, %s, %s)' % ( self.jobID, name, value ) )\n return RPCClient( 'WorkloadManagement/JobStateUpdate', timeout = 120 ).setJobParameter( int( self.jobID ), str( name ), str( value ) )", "def fire_event(self, event, target) -> None:\n self.log.debug(\"Sending event '{}({})' to host '{}' ({})\", event.fun, event.arg, target.host, target.id)\n\n task_message = ServerMsgFactory().create(jid=event.jid)\n task_message.ret.message = \"ping\"\n task_message.internal = {\n \"function\": event.fun,\n \"arguments\": event.arg,\n }\n proto = self.get_client_protocol(target.id) # This might be None due to the network issues (unregister fired)\n if proto is None and self.__retry_calls.get(target.id) != 0:\n self.__retry_calls.setdefault(target.id, 3)\n self.__retry_calls[target.id] -= 1\n pause = random.randint(3, 15)\n self.log.debug(\"Peer temporarily unavailable for peer {} to fire job {}. Waiting {} seconds.\",\n target.id, event.jid, pause)\n reactor.callLater(pause, self.fire_event, event, target)\n else:\n if target.id in self.__retry_calls:\n del self.__retry_calls[target.id]\n if proto is not None:\n proto.sendMessage(ServerMsgFactory.pack(task_message), isBinary=True)\n self.jobstore.set_as_fired(jid=event.jid, target=target)\n self.log.debug(\"Job '{}' has been fired successfully\", event.jid)\n else:\n self.log.debug(\"Job '{}' temporarily cannot be fired to the client {}.\", event.jid, target.id)", "def invoke(self):\n\n if self.value:\n value = 1\n else:\n value = 0\n\n base=\"data_request?id=action\"\n action = \"SetTarget\"\n svc = \"urn:micasaverde-com:serviceId:DoorLock1\"\n path = \"%s&DeviceNum=%d&serviceId=%s&action=%s&newTargetValue=%d&output_format=json\" \\\n % (base, self.device.id, svc, action, value)\n status = self.device.vera.get(path)\n\n job = Job()\n job.id = int(status[\"u:SetTargetResponse\"][\"JobID\"])\n job.vera = self.device.vera\n return job", "def fire_event(self, callback: Callable[..., bool], **kwargs):\n if not callable(callback):\n raise RuntimeError('Callback method (callback) is not a callable.')\n\n # get developer passed trigger_ids\n trigger_ids: list | None = kwargs.pop('trigger_ids', None)\n\n for trigger_id, config in list(self.configs.items()):\n if trigger_ids is not None and trigger_id not in trigger_ids:\n # skip config that don't match developer provided trigger ids\n continue\n\n try:\n # get a session_id specifically for this thread\n session_id: str = self.create_session_id()\n\n # only required for testing in tcex framework\n self._tcex_testing(session_id, trigger_id)\n\n # get an instance of PB module with current\n # session_id and outputs to pass to callback\n outputs: list | str = config.tc_playbook_out_variables or []\n if isinstance(outputs, str):\n outputs = outputs.split(',')\n playbook = self.get_playbook(context=session_id, output_variables=outputs)\n\n self.log.info(f'feature=trigger-service, event=fire-event, trigger-id={session_id}')\n\n # current thread has session_id as name\n self.service_thread(\n name=session_id,\n target=self.fire_event_trigger,\n args=(\n callback,\n playbook,\n session_id,\n trigger_id,\n config,\n ),\n kwargs=kwargs,\n session_id=session_id,\n trigger_id=trigger_id,\n )\n except Exception:\n self.log.trace(traceback.format_exc())", "def process(self, task):\n # Predict timestamp for the first run\n _, date = task.trigger(wait=False, **task.trigger_args)\n\n # Adding the task in schedule queue\n self.task_manager.schedule_task(task, date)", "def schedule(self, compute_env, scheduler_node, build_id=\"\", **kwargs):\n\n if not isinstance(self.data, Transform):\n raise NotImplementedError(\"cannot schedule non-Transform objects\")\n\n # this id is globally unique\n job_id = self.job_id\n\n if not build_id:\n build_id = str(uuid.uuid4())\n\n max_attempt = kwargs.pop(\"max_attempt\", 1)\n min_attempt = kwargs.pop(\"min_attempt\", 1)\n\n log.debug(\"build %s scheduling job %s...\", build_id, job_id)\n assert self._attempt is None\n\n def _submit_new_job(ctx, attempt_no=1):\n # fixme calculate attempts:\n if attempt_no < min_attempt:\n log.debug(\" jump starting job %s at attempt %d\",\n job_id, min_attempt)\n attempt_no = min_attempt\n\n if attempt_no > max_attempt:\n # allow the next try to start from attempt_no==1\n log.debug(\" maximum number of attempts (%d) exceeded. cancelling job.\", max_attempt)\n ctx.jobdata = \"\"\n ctx.jobattempt = 0\n ctx.save()\n scheduler_node.cancel()\n return\n\n # let the user's object calculate its resource requirements\n resources = self.data.task_resources(attempt=attempt_no) or {}\n\n remote_script_url = \"s3://%(bucket)s/jobs/%(envname)s/%(jobid)s/jobscript\" % {\n \"bucket\": config['storage']['tmp_bucket'],\n \"envname\": compute_env.name,\n \"jobid\": job_id\n }\n\n user_deps_prefix = \"s3://%(bucket)s/user_context/%(envname)s/\" % {\n \"bucket\": config['storage']['tmp_bucket'],\n \"envname\": compute_env.name,\n \"jobid\": job_id\n }\n\n user_deps_url = runtime.upload_user_context(user_deps_prefix)\n\n with io.BytesIO() as exec_fp:\n script_len = exec_fp.write(self.execution_transfer_script(resources).encode('utf-8'))\n exec_fp.seek(0)\n log.debug(\" uploading job script for job_id %s at %s ...\", job_id, remote_script_url)\n s3_streaming_put(exec_fp, remote_script_url, content_type=\"text/x-python\", content_length=script_len,\n logprefix=job_id + \" jobscript \")\n\n settings = {\n 'vcpus': resources.get('vcpus', None),\n 'memory': resources.get('memory', None),\n 'timeout': resources.get('timeout', -1),\n 'environment': {\n \"BUNNIES_VERSION\": __version__,\n \"BUNNIES_SUBMIT_TIME\": str(int(datetime.utcnow().timestamp()*1000)),\n \"BUNNIES_TRANSFER_SCRIPT\": remote_script_url,\n \"BUNNIES_USER_DEPS\": user_deps_url,\n \"BUNNIES_JOBID\": job_id,\n \"BUNNIES_ATTEMPT\": \"%d %d\" % (attempt_no, max_attempt),\n \"BUNNIES_RESULT\": os.path.join(self.data.output_prefix(), constants.TRANSFORM_RESULT_FILE),\n \"BUNNIES_BUILDID\": build_id\n }\n }\n\n if settings.get('timeout') <= 0:\n settings['timeout'] = 24*3600*7 # 7 days\n\n self._attempt = compute_env.submit_simple_batch_job(job_id, self._jobdef, **settings)\n self._attempt.meta['attempt_no'] = attempt_no\n self._attempt_ids.append({'attempt_no': attempt_no, 'job_id': self._attempt.job_id})\n # commit the new batch job id to the global kv store\n ctx.jobtype = \"batch\"\n ctx.jobdata = self._attempt.job_id\n ctx.jobattempt = attempt_no\n ctx.submitter = build_id\n ctx.save()\n scheduler_node.submit() # tell the bunnies scheduler that the job has been submitted\n return\n\n def _reuse_existing(ctx, job_obj, attempt_no):\n job_obj.meta['attempt_no'] = attempt_no\n self._attempt = compute_env.track_existing_job(job_obj)\n self._attempt_ids.append({'attempt_no': attempt_no, 'job_id': self._attempt.job_id})\n scheduler_node.submit() # tell the bunnies scheduler that the job has been submitted\n return\n\n with kvstore.submit_lock_context(build_id, job_id) as ctx:\n ctx.load()\n if ctx.jobtype != \"batch\":\n raise ValueError(\"unhandled job type\")\n\n if not ctx.jobdata:\n # has never been submitted\n log.debug(\" job %s has not yet been submitted\", job_id)\n return _submit_new_job(ctx, attempt_no=1)\n else:\n log.debug(\" job %s has an existing submission: %s\", job_id, ctx.jobdata)\n\n last_attempt_id = ctx.jobdata\n last_attempt_no = int(ctx.jobattempt)\n\n # see if it's still tracked by AWS Batch\n job_obj = AWSBatchSimpleJob.from_job_id(last_attempt_id)\n if not job_obj:\n # no longer tracked\n log.debug(\" job information no longer available for %s. submitting new.\", last_attempt_id)\n return _submit_new_job(ctx, attempt_no=1)\n\n job_desc = job_obj.get_desc()\n job_status = job_desc['status']\n if job_status == \"FAILED\":\n log.debug(\" %s state=%s attempt=%d. submitting new attempt=%d\",\n last_attempt_id, job_status, last_attempt_no, last_attempt_no + 1)\n return _submit_new_job(ctx, attempt_no=last_attempt_no + 1)\n else:\n log.debug(\" %s state=%s attempt=%d. can be reused\",\n last_attempt_id, job_status, last_attempt_no)\n return _reuse_existing(ctx, job_obj, last_attempt_no)", "def __do_trigger(self, request):\n dmp_trigger.DmpTrigger().trigger(request)\n return defines.ReturnCode.SUCC", "def _build_job_submission_call(self,\n name,\n job_settings,\n logger):\n raise NotImplementedError(\n \"'_build_job_submission_call' not implemented.\")", "def execute(self, context: Context) -> None:\n self.hook = AirbyteHook(airbyte_conn_id=self.airbyte_conn_id, api_version=self.api_version)\n job_object = self.hook.submit_sync_connection(connection_id=self.connection_id)\n self.job_id = job_object.json()[\"job\"][\"id\"]\n\n self.log.info(\"Job %s was submitted to Airbyte Server\", self.job_id)\n if not self.asynchronous:\n self.log.info(\"Waiting for job %s to complete\", self.job_id)\n self.hook.wait_for_job(job_id=self.job_id, wait_seconds=self.wait_seconds, timeout=self.timeout)\n self.log.info(\"Job %s completed successfully\", self.job_id)\n\n return self.job_id", "def test_jobs_are_updated_on_audit_success(self, mock_run_audit):\n fake_job_pk = 1\n\n # Call the task\n process_job(fake_job_pk) # magic number\n\n # Check if update_job was called with a success indicator\n self.mock_update_job.assert_called_once_with(\n fake_job_pk,\n success=True,\n report_path=mock_run_audit.return_value\n )", "def on_job_update(_job):\n nonlocal job\n job = _job", "def on_job_update(_job):\n nonlocal job\n job = _job", "def on_job_update(_job):\n nonlocal job\n job = _job", "def on_job_update(_job):\n nonlocal job\n job = _job", "def fire(self):\n if (self.job):\n job = self.job\n try:\n job.run()\n logger.debug(\"Job run. Setting status to done.\")\n self.status = 'done'\n except Exception:\n logger.error(\"Caught exception. Setting status to fail and deleting output.\")\n dfs.delete(self.outputpath)\n self.status = 'fail'", "def office_submit_solver(parser, args, params):\n parser.add_argument('-fj', type=int, help='First event to submit.',\n metavar='', required=True)\n parser.add_argument('-lj', type=int, help='Last event to submit.',\n metavar='', required=True)\n parser.add_argument('--run_type', type=str,\n help='Specify either adjoint_run, forward_run, or '\n 'line_search.',\n metavar='', required=True)\n\n local_args = parser.parse_known_args(args)\n first_job = local_args[0].fj\n last_job = local_args[0].lj\n run_type = local_args[0].run_type\n\n if run_type != 'adjoint_run' and run_type != 'forward_run' and run_type \\\n != 'line_search':\n raise ParameterError(\"Must specifiy either forward_run or \"\n \"adjoint_run\")\n\n control.submit_solver(params, first_job, last_job, run_type)", "def reschedule_job_once(self, id, **data):\n if self.jobconfig:\n jobs = self.loadconfig(self.jobconfig)\n else:\n LOGGER.error(\"Please provide 'SCHEDULER_JOBCONFIG' in your scheduler configuration.\")\n return None \n self._scheduler.reschedule_job(id, **data)\n modify_kwargs = self.fix_trigger([x for x in jobs if x[\"id\"] == id][0])\n modify_kwargs = self.remove_trigger_kwargs(modify_kwargs)\n modify_kwargs = self.rename_dictkey(modify_kwargs, \"id\", \"job_id\")\n self.scheduler.modify_job(**modify_kwargs)", "def _trigger_event(\n self, holder: T.Any, alt_name: str, action: str, *event_args: T.Any\n ) -> None:\n\n if isinstance(self.observable, Observable):\n observable = self.observable\n elif isinstance(self.observable, str):\n observable = getattr(holder, self.observable)\n elif isinstance(holder, Observable):\n observable = holder\n else:\n raise TypeError(\n \"This ObservableProperty is no member of an Observable \"\n \"object. Specify where to find the Observable object for \"\n \"triggering events with the observable keyword argument \"\n \"when initializing the ObservableProperty.\"\n )\n\n name = alt_name if self.event is None else self.event\n event = \"{}_{}\".format(action, name)\n observable.trigger(event, *event_args)", "def execute(self, context):\n # Validate the input parameters\n self.validate_input_params()\n\n # Identify the execution environment\n self.get_env()\n\n # Perform action based on the load type\n if self.parameters.get('load_type').lower() == 'incremental':\n # Snowflake Presteps execution\n self.submit_job_snowflake(self.sfPresteps_sql)\n logging.info(\"Snowflake pre-steps execution succeeded\")\n\n # Calling the spark job to load into snowflake table\n self.submit_job_emr(context)\n\n # Snowflake Poststeps execution\n self.submit_job_snowflake(self.sfPoststeps_sql)\n self.apply_grants()\n logging.info(\"Snowflake post-steps execution succeeded\")\n\n elif self.parameters.get('load_type').lower() == 'full':\n self.submit_job_emr(context)\n self.apply_grants()\n\n else:\n raise Exception(\"NOT a supported value for load_type: %s\\n\" % format(self.parameters.get('load_type')))", "async def trigger(self, action: str, args: typing.Iterable):\n for app in self.applications:\n await app.trigger(action, args)", "def perform_action(self, action_name, *action_parameters_):\n result = Being.perform_action(self, action_name, *action_parameters_)\n # return original result\n return result", "def run_job(\n self, name: str, command: str, afterok: list = None,afternotok: list = None, dry_run: bool = False,\n ) -> int:\n LOG.info(\"Submitting commands %s\", command)\n if afterok:\n LOG.info(\n \"Adding dependencies: %s\", \",\".join([str(dep) for dep in afterok])\n )\n jobid = 1\n if not dry_run:\n jobid = self._jobid\n LOG.info(\"Submitted job %s with job id: %s\", name, jobid)\n return jobid", "def office_process_synthetics(parser, args, params):\n parser.add_argument('-fj', type=int, help='First event to submit.',\n metavar='', required=True)\n parser.add_argument('-lj', type=int, help='Last event to submit.',\n metavar='', required=True)\n\n local_args = parser.parse_known_args(args)\n first_job = local_args[0].fj\n last_job = local_args[0].lj\n \n control.process_synthetics(params, first_job, last_job)", "def bcp_trigger(self, name, **kwargs):\n # ignore events which already came from bcp to prevent loops\n if \"_from_bcp\" in kwargs:\n return\n\n # Since player variables are sent automatically, if we get a trigger\n # for an event that starts with \"player_\", we need to only send it here\n # if there's *not* a player variable with that name, since if there is\n # a player variable then the player variable handler will send it.\n if name.startswith('player_'):\n try:\n if self.machine.game.player.is_player_var(name.lstrip('player_')):\n return\n\n except AttributeError:\n pass\n\n self.machine.bcp.transport.send_to_clients_with_handler(\n handler=name, bcp_command='trigger', name=name, **kwargs)", "def trigger_cb(file_name: str, commit_hash: str, project_name: str) -> None:\n\n cb_client = boto3.client(\"codebuild\")\n build = {\n \"projectName\": project_name,\n \"sourceVersion\": commit_hash,\n \"environmentVariablesOverride\": [\n {\"name\": \"REQ_FILENAME\", \"value\": file_name, \"type\": \"PLAINTEXT\"}\n ],\n }\n cb_client.start_build(**build)", "def update_job(self, job):\n call = subprocess.Popen(self.cli + [PlatformJenkinsJavaCLI.UPDATE_JOB, job.name], stdin=subprocess.PIPE)\n call.communicate(input=platform_ci.jjb.get_job_as_xml(job, self.template_dir))\n call.wait()\n if call.returncode != 0:\n raise PlatformJenkinsException(\"Updating job failed: \" + job.name)", "def start_job(package_id, job_id, config_file):\n logger.info('Starting {0}.{1}'.format(package_id, job_id))\n samza_job_deployer = runtime.get_deployer(DEPLOYER)\n samza_job_deployer.start(job_id, {\n 'package_id': package_id,\n 'config_file': config_file,\n })", "def updateTorqueJob(self, job_id, new_state,job_notes):\n try:\n con = self.getSFFDatabaseConnection()\n job_id = con.cursor().callproc('update_torque_job', [job_id, new_state, job_notes[-4000:]])\n return job_id[0]\n except Exception, e:\n print 'Exception caught: %s.\\nThe error is: %s' % (type(e), e)", "def run():\n print(\"\\n************************************** PARAMERTERS **************************************\\n\")\n print(f'TARGET_GROUP: {PARAM.TARGET_GROUP}\\n')\n print(f'ACQ_FILE: {PARAM.ACQ_FILE}\\n')\n print(f'FINAL_DATA_DIR: {PARAM.FINAL_DATA_DIR}\\n')\n print(f'FAULTY_EMPLOYEES_DIR: {PARAM.FAULTY_EMPLOYEES_DIR}\\n')\n print(f'NONE_MATCHED_DIR: {PARAM.NONE_MATCHED_DIR}\\n')\n print('*****************************************************************************************\\n')\n\n jti = JobTransitionInspector(PARAM.ACQ_FILE)\n jti.exec()", "def apply(self, job_name, args=(), kwargs={}, **opts):\n queue = QueueType(\n self.get_queue_name(job_name), connection=self.connection, is_async=False\n )\n return queue.enqueue_call(\n job_name,\n args=args,\n kwargs=kwargs,\n result_ttl=0,\n timeout=DEFAULT_JOB_TIMEOUT,\n **opts,\n )", "def alert(bot, update, args, job_queue):\n continue_on = 1\n chat_id = update.message.chat_id\n message_id = update.message.message_id\n user = str(update.message.from_user)\n if not args:\n update.message.reply_text('please enter a time')\n return\n if '|' in args:\n message = ' '.join(args)\n argstemp = message.split('|')\n due = alerts.lastDitchAttempt(argstemp[0])\n if due > 0:\n argstemp.pop(0)\n message = ' '.join(argstemp)\n continue_on = -1\n if continue_on == 1:\n due = alerts.parseADate(args[0])\n if due <= 0:\n due = alerts.regexmatch(args[0])\n args.pop(0)\n message = ' '.join(args)\n if due <= 0:\n update.message.reply_text('Sorry that is not a valid time')\n return\n\n # Add job to queue\n my_context = '' + str(chat_id) + ':' + str(message_id)\n job = Job(alarm, due, repeat=False, context=my_context)\n USERS[my_context] = user\n MESSAGES[my_context] = message\n TIMERS[my_context] = job\n job_queue.run_once(alarm, due, context=my_context)\n current_time = datetime.now()\n due = int((current_time - datetime(1970, 1, 1)).total_seconds() + due)\n fileIO.writeAlertJob(\"alerts\", str(chat_id),\n str(message_id), user, due, message)\n set_for = alerts.timeSetFor(due)\n bot.sendMessage(update.message.chat_id, 'Timer successfully set for: ' + str(set_for) +\n '\\nYour ID is:' + str(message_id))", "def submit_spot_batch_job( argv ):\n import logging.config\n if len(sys.argv) == 1:\n print 'ERROR: Missing log configuration file, first argument must be path/name.ext of the log configuration file'\n sys.exit(8)\n logging.config.fileConfig( sys.argv[1], disable_existing_loggers=False)\n logger = logging.getLogger(__name__)\n \n if len(sys.argv) == 2:\n logger.error( 'ERROR: Missing Batch Job Parm file, second argument must be path/name.ext of the log Batch Job Parm file' )\n sys.exit(8) \n \n try:\n logger.info(\"Starting\")\n \n path_batch_job_parm_file = sys.argv[2]\n if len(sys.argv) == 4: path_user_job_parm_file = sys.argv[3]\n else: path_user_job_parm_file = None\n \n with open( path_batch_job_parm_file ) as parm_file:\n raw_batch_job_parm_item = parm_file.read()\n \n if path_user_job_parm_file != None: \n with open( path_user_job_parm_file ) as parm_file:\n raw_user_job_parm_item = parm_file.read()\n else: raw_user_job_parm_item = None\n\n batch_job_parm_item = BatchJobParmItem( stringParmFile=raw_batch_job_parm_item )\n\n spot_master_sqs_message_durable = SqsMessageDurable( awsspotbatch.common.const.SPOT_MASTER_QUEUE_NAME, \n batch_job_parm_item.primary_region_name, \n profile_name=batch_job_parm_item.profile_name )\n \n spot_master_uuid = str(uuid.uuid1())\n logger.info('Submitting test batch message, spot_master_uuid=' + spot_master_uuid )\n spot_master_msg = SpotMasterMsg( spot_master_uuid=spot_master_uuid, spot_master_msg_type=SpotMasterMsg.TYPE_SUBMIT_BATCH,\n raw_batch_job_parm_item=raw_batch_job_parm_item, raw_user_job_parm_item=raw_user_job_parm_item)\n message_attributes = create_microsvc_message_attributes( awsspotbatch.common.const.MICROSVC_MASTER_CLASSNAME_SpotMasterMessageSubmitBatch )\n spot_master_sqs_message_durable.send_message( spot_master_msg.to_json(),\n message_attributes=message_attributes )\n logger.info( 'Completed Successfully' )\n\n except StandardError as e:\n logger.error( e )\n logger.error( traceback.format_exc() )\n sys.exit(8)", "def notify_job_by_email(info):\n\n # build params\n params = {}\n params[\"id\"] = info[\"job_id\"]\n params[\"rule_name\"] = info[\"rule\"][\"rule_name\"]\n params[\"username\"] = info[\"rule\"][\"username\"]\n kwargs = json.loads(info[\"rule\"][\"kwargs\"])\n params[\"emails\"] = kwargs[\"email_addresses\"]\n rule_hit = info[\"rule_hit\"]\n params[\"url\"] = rule_hit[\"_source\"][\"job\"][\"job_info\"][\"job_url\"]\n job = {\n \"type\": \"notify_job_by_email\",\n \"name\": \"action-notify_job_by_email-%s\" % info[\"job_id\"],\n \"tag\": params[\"rule_name\"],\n \"username\": params[\"username\"],\n \"params\": params,\n \"localize_urls\": [],\n }\n\n return job", "def trigger(self, trade) -> bool:\n pass", "def new_job(salesforce_session):\n return SalesforceBulkJob('update', 'Lead')", "def run_ci(project: str, branch: str, job: str, revision: Optional[str],\n tag: Optional[str]) -> None:\n config = get_config()\n\n if config.circleci_api_token is None:\n raise ValueError('circleci_api_token not configured. Run `sriracha '\n 'configure` to configure')\n\n resp = scci.trigger_job(api_token=config.circleci_api_token,\n project=project, branch=branch, job=job,\n revision=revision, tag=tag)\n\n click.echo_via_pager(json.dumps(resp, indent=4))", "def upsert_job(new_job, bulk_request):\n return SalesforceBulkJob('upsert', 'Lead', external_id_field='The_External_ID__c')", "def exec_job(self, job: Job, eval_args: Tuple[Tuple, dict]) -> Promise:\n self.events_queue.put(lambda: self._exec_job(job, eval_args))\n return job.result_promise" ]
[ "0.6365271", "0.6245668", "0.62099326", "0.61203486", "0.588978", "0.5762087", "0.5737363", "0.56618714", "0.5655177", "0.55685776", "0.5567557", "0.55617213", "0.5506177", "0.5457874", "0.5420332", "0.5409931", "0.53858936", "0.5350611", "0.53456885", "0.5329666", "0.5314936", "0.5295545", "0.5263992", "0.5262621", "0.5262621", "0.52589387", "0.524901", "0.52480626", "0.52473295", "0.5242411", "0.52184665", "0.52149165", "0.521458", "0.5182811", "0.5172219", "0.5157432", "0.5133891", "0.51178294", "0.5097624", "0.50730574", "0.5064673", "0.505176", "0.5050074", "0.5044743", "0.50188124", "0.5015519", "0.49971858", "0.49910915", "0.49877602", "0.49782085", "0.49694216", "0.49564275", "0.4951013", "0.4939203", "0.4938275", "0.4938275", "0.4938275", "0.4932471", "0.49252182", "0.49240804", "0.49136665", "0.4912997", "0.490957", "0.48984423", "0.48946777", "0.48889345", "0.48800275", "0.48700258", "0.48616177", "0.4848572", "0.48435956", "0.48390636", "0.48310402", "0.48310402", "0.48310402", "0.48310402", "0.48286724", "0.4823452", "0.48202997", "0.4816394", "0.48127073", "0.48118156", "0.48027295", "0.47977778", "0.4793562", "0.47915635", "0.47834662", "0.47805205", "0.47786492", "0.47772187", "0.47734857", "0.47686476", "0.47669795", "0.47534454", "0.4749872", "0.47472525", "0.47265378", "0.47263908", "0.47246766", "0.4721125" ]
0.7157829
0
Enables given job on Jenkins.
def enable_job(self, job): if subprocess.call(self.cli + [PlatformJenkinsJavaCLI.ENABLE_JOB, job.name]) != 0: raise PlatformJenkinsException("Enabling job failed: " + job.name)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def EnableJob(self, job_urn, token=None):\n cron_job = aff4.FACTORY.Open(job_urn, mode=\"rw\", aff4_type=\"CronJob\",\n token=token)\n cron_job.Set(cron_job.Schema.DISABLED(0))\n cron_job.Close()", "def disable_job(self, job):\n if subprocess.call(self.cli + [PlatformJenkinsJavaCLI.DISABLE_JOB, job.name]) != 0:\n raise PlatformJenkinsException(\"Disabling job failed: \" + job.name)", "def post_job_enable_with_http_info(self, name, **kwargs):\n\n all_params = ['name', 'jenkins_crumb']\n all_params.append('async')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method post_job_enable\" % key\n )\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'name' is set\n if ('name' not in params) or (params['name'] is None):\n raise ValueError(\"Missing the required parameter `name` when calling `post_job_enable`\")\n\n\n collection_formats = {}\n\n path_params = {}\n if 'name' in params:\n path_params['name'] = params['name']\n\n query_params = []\n\n header_params = {}\n if 'jenkins_crumb' in params:\n header_params['Jenkins-Crumb'] = params['jenkins_crumb']\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n # Authentication setting\n auth_settings = ['jenkins_auth']\n\n return self.api_client.call_api('/job/{name}/enable', 'POST',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type=None,\n auth_settings=auth_settings,\n async=params.get('async'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)", "def enable(self, subsystem=False):\n self.__dict__[\"enabled\"] = True\n\n if subsystem:\n self.subsystem.enable()", "def cli_enable_plugin(self, args) -> str:\n plugin_name = args.plugin_name\n if plugin_name not in self.name_to_plugin_class:\n return error(\"Plugin {} DNE\".format(plugin_name))\n\n self.name_to_enabled[plugin_name] = True\n return ok(\"Plugin {} enabled\".format(plugin_name))", "def register_job(self, job):\n self.job = job", "def post_job_enable(self, name, **kwargs):\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async'):\n return self.post_job_enable_with_http_info(name, **kwargs)\n else:\n (data) = self.post_job_enable_with_http_info(name, **kwargs)\n return data", "def enable(self, *args, **kwargs):\n pass", "def __set_job_status(self, job: Job):\n\n self.redis_client.set(f'jobstatus:{job.id}:{str(job.status)}', f'job:{job.id}')", "async def enable(self, ctx):\n self.bot.db.execute(\"UPDATE starboards SET enabled = 1 WHERE channel_id = ?\", (ctx.channel.id,))\n await ctx.say(\"star.enabled\")", "async def enable(self, **kwargs) -> None: # pylint: disable=unused-argument\r\n await self.set_ena(True)", "def mark(self, job, status='succeeded'):\n pass", "async def enable(self, ctx, *, channel: discord.Channel=None):\n\n server = ctx.message.server\n\n temp = self.bot.dota_ticker_settings.get(server.id)\n\n if temp is not None and temp['enabled']:\n await self.bot.say('The match ticker has already been enabled on this server.')\n return\n\n if channel is None:\n channel = server.default_channel\n\n settings = {'enabled': True, 'channel_id': channel.id}\n\n await self.bot.dota_ticker_settings.put(server.id, settings)\n await self.bot.say('The match ticker has been enabled on {0.mention}.'.format(channel))", "def enable(self):\n\t\tresponse = self.client.post(self._endpoint + \"/enable\")\n\t\treturn bool(response.json[\"success\"])", "def enable(self):\n if not self.labExperiment:\n super().enable()\n else:\n self.connection.command('open_dm', self.DMserial)\n status = self.connection.query('get_status')\n assert status == 0, 'Error connecting to DM. Error: ' + str(status)\n numActProfile = self.connection.query('num_actuators')\n assert numActProfile == self.numActProfile, 'Wrong number of profile actuators entered'\n print(\"'BM1k' is now enabled\")", "def enable(self, enable):\n\n self._enable = enable", "def enable(self, name, channel):\n if channel not in self.disabled_extensions:\n self.disabled_extensions[channel] = set()\n\n if name not in self.extension_names:\n return False\n\n logger.info('Enabling %s on %s' % (name, channel))\n self.disabled_extensions[channel].discard(name)\n\n return True", "def enable(service_name: str, print_action: bool = True):\n \n if print_action:\n print_log_status(3, f\"Enabling `{service_name}`\")\n \n run_command(f\"sudo systemctl enable {service_name}\")", "def set_saucelabs_job_status(jobid, passed=True):\r\n config = get_saucelabs_username_and_key()\r\n url = 'http://saucelabs.com/rest/v1/{}/jobs/{}'.format(config['username'], world.jobid)\r\n body_content = dumps({\"passed\": passed})\r\n base64string = encodestring('{}:{}'.format(config['username'], config['access-key']))[:-1]\r\n headers = {\"Authorization\": \"Basic {}\".format(base64string)}\r\n result = requests.put(url, data=body_content, headers=headers)\r\n return result.status_code == 200", "def modify_job(self, job, parameter):\n job.set_encut(parameter[0])\n job.set_kpoints(parameter[1])\n return job", "def _enable(self):\n sub = multiprocessing.Process(target=subproc)\n sub.start()", "def enable(self):\n self.switch.enable()\n self._enabled = True", "def enable():\n request = dict(id='gbn')\n _gbn_enable(request)", "def activate_job(job_name):\n job = Job.from_name(job_name)\n job.set_active(True)\n return redirect(url_for('all_jobs_for_client', ClientID=job.ClientID))", "def start(self, job: PandaJob) -> None:\n raise NotImplementedError(\"Base method not implemented\")", "def enable_plugin(self, plugin: str):\r\n with PluginStore.mutex:\r\n self._disabled.remove(plugin)", "def job(self, job: str):\n\n self._job = job", "def job(self, job: str):\n\n self._job = job", "def set_skill_enablement_v1(self, skill_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"set_skill_enablement_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stage}/enablement'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message=\"No Content; Confirms that enablement is successfully created/updated.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn&#39;t have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=409, message=\"The request could not be completed due to a conflict with the current state of the target resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"PUT\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None", "def __set_job(self, job: Job):\n\n serialized_job = self._serialize_entry(job)\n self.__delete_job_status(job)\n self.redis_client.set(f'job:{job.id}', serialized_job)\n self.__set_job_status(job)", "def _addjob(self, job: Job):\n self._job = copy.deepcopy(job)\n self._job.restart = True", "def enable(self):\n self.enabled = True", "def enable(self):\n self.enabled = True", "def enable_robot(self):\n self._franka_robot_enable_interface.enable()", "def enable_service(self, service):\n svc = self.service_path % service\n ret = self.rclient.put(svc)\n if ret.status != restclient.Status.ACCEPTED:\n exception_msg = (_(\"Cannot enable %s service.\") % service)\n raise exception.ShareBackendException(msg=exception_msg)", "def enable(self):\n self._enabled = True", "def enable(self):\n if not self.tm_started:\n for name, tm in self.air_traffic_manager.items():\n logging.debug(\"Starting tm %s\" % name)\n tm.start()\n tm_started = True\n\n logging.debug(\"Enabling switch %s\" % self.name)\n self.disabled = False", "def assignJob(self, jobname):\n c = \"/cli:python /app:matrix /sys:1 /cmd:assignjob /job:\"+jobname.lower() # convert jobname to lowercase as workaround\n print \"Assigning \", jobname\n self.sendCMDstring(c)\n time.sleep(0.5)", "async def admin_enable(self, ctx: commands.Context):\n if ctx.guild.id in self.guilds:\n await ctx.send('Team management is already enabled in this guild.')\n return\n await self._enable_guild(guild=ctx.guild)\n await ctx.send('Team management enabled.')", "def enable_health(self, enable_health):\n\n self._enable_health = enable_health", "def setJobId(self, jobid):\n self._ShREEKConfig.setJobId(jobid)", "def trigger_job(self, job, parameters=None):\n parameters = parameters or {}\n parameter_list = []\n for key in parameters:\n parameter_list.append(\"-p\")\n parameter_list.append(\"%s=%s\" % (key, parameters[key]))\n if subprocess.call(self.cli + [PlatformJenkinsJavaCLI.BUILD_JOB, job.name] + parameter_list) != 0:\n raise PlatformJenkinsException(\"Triggering job failed: \" + job.name)", "def setEnabled(self, enable: bool) -> None:\n self.enabled = ...", "def _set_enable(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=YANGBool, default=YANGBool(\"true\"), is_leaf=True, yang_name=\"enable\", rest_name=\"enable\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Represents whether the user account is enabled\\n(default=true)', u'cli-suppress-no': None}}, namespace='urn:brocade.com:mgmt:brocade-aaa', defining_module='brocade-aaa', yang_type='username-enable', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"enable must be of a type compatible with username-enable\"\"\",\n 'defined-type': \"brocade-aaa:username-enable\",\n 'generated-type': \"\"\"YANGDynClass(base=YANGBool, default=YANGBool(\"true\"), is_leaf=True, yang_name=\"enable\", rest_name=\"enable\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Represents whether the user account is enabled\\n(default=true)', u'cli-suppress-no': None}}, namespace='urn:brocade.com:mgmt:brocade-aaa', defining_module='brocade-aaa', yang_type='username-enable', is_config=True)\"\"\",\n })\n\n self.__enable = t\n if hasattr(self, '_set'):\n self._set()", "def enable_plugin(self, plugin_name):\n plugin = self.get_plugin_info(plugin_name)\n DISABLED = os.path.join(self.plugin_abspath, plugin[\"plugin_package_name\"], \"DISABLED\")\n if os.path.isfile(DISABLED):\n os.remove(DISABLED)\n self.__touch_file(os.path.join(self.plugin_abspath, plugin[\"plugin_package_name\"], \"ENABLED\"))", "def _enable_kit_component(self, kit, comp_name, comp_version,\n software_profile):\n kit_spec = (kit.getName(), kit.getVersion(), kit.getIteration())\n\n load_kits()\n installer = get_kit_installer(kit_spec)()\n comp_installer = installer.get_component_installer(comp_name)\n if not comp_installer.is_enableable(software_profile):\n self.getLogger().warning(\n 'Component cannot be enabled: {}'.format(\n comp_installer.spec\n )\n )\n return None\n comp_installer.run_action('pre_enable', software_profile.getName())\n\n best_match_component = self._add_component_to_software_profile(\n kit, comp_name, comp_version, software_profile)\n\n comp_installer.run_action('enable', software_profile.getName())\n comp_installer.run_action('post_enable',\n software_profile.getName())\n\n return best_match_component", "def enable(self, index, value=True, missingok=False):\n self._action(index, StateVariable.enable, missingok=missingok,\n value=value)", "def swo_enable(self, cpu_speed, swo_speed=9600, port_mask=0x01):\n if self.swo_enabled():\n self.swo_stop()\n\n res = self._dll.JLINKARM_SWO_EnableTarget(cpu_speed,\n swo_speed,\n enums.JLinkSWOInterfaces.UART,\n port_mask)\n if res != 0:\n raise errors.JLinkException(res)\n\n self._swo_enabled = True\n\n return None", "def setEnableCondition(*args):", "def setEnableCondition(*args):", "def setEnableCondition(*args):", "def setEnableCondition(*args):", "def setEnableCondition(*args):", "def setEnableCondition(*args):", "def setEnableCondition(*args):", "def setEnableCondition(*args):", "def setEnableCondition(*args):", "def setEnableCondition(*args):", "def setEnableCondition(*args):", "def enable(self, message):\n self.Enable()", "def set_continuous_meter(self, enable):\n if enable:\n self.amplifier.start_continuous_acquisition()\n else:\n self.amplifier.stop_continuous_acquisition()", "def setEnabled(self, enabled):\n def do(toUpdateList):\n self.enabled = enabled\n self.actions.addAction(do)", "def set_enable(self, enable):\n\n with AutoUpdater._lock:\n if isinstance(enable, Bus):\n AutoUpdater.remove_link(self._enable)\n AutoUpdater.add_link(\n enable,\n self._enable)\n else:\n raise ValueError(\n \"ERROR: Invalid Enable input. Enable must be a \"\n \"1-bit Bus or a Connector.\")", "def enableUser(username):\n result = subprocess.run(['powershell.exe', '-Command', 'Enable-LocalUser', '-name', '\"'+username+'\"'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)\n logger.info(\"Running command {}\".format(result.args))\n for line in iter(result.stdout.splitlines()):\n logger.debug(line)\n for line in iter(result.stderr.splitlines()):\n logger.warning(line)", "def enable_motor():\n print('Enabling motor')\n start_motor = '{\"id\" : \"Motor1\", \"enabled\" : \"1\"}'\n SERIAL_PARENT.send(start_motor)\n OUTGOING.append(start_motor)", "def enable_lock(self, lock_on=True):\n if lock_on:\n self.write('ScanM_Mode=2') #Search\n time.sleep(10)\n self.write('ScanM_Mode=3') #Lock, its unclear from manual if\n #this is redundant. i.e. autolocks\n #at end of search\n if not self.query_lock_status():\n raise ac_excepts.CouplingkError('Not meeting threshold power',\n self.enable_lock)\n if not lock_on:\n self.write('ScanM_Mode=0') #Off", "def enable(self):\r\n self.update(enabled=True)", "def enableDevice(*args, apply: bool=True, device: Union[AnyStr, bool]=\"\", enable: bool=True,\n monitor: bool=True, record: bool=True, q=True, query=True,\n **kwargs)->Union[None, Any]:\n pass", "def set_On(self):\n if not(self._locked):\n self.__dict__['statusOn']=True\n self._do_action()\n else:\n self._log.info('The JobProperty %s is blocked', self.__name__)", "def enable_sensor_power():\n sen = digital.SensorPower(\"senpwr\") \n sen.set()", "def enable(self, sid):\n return", "def cmd_enable(self, app_name=None):\n rc = self.socket_command_with_project('enable', app_name)\n return rc", "def add(self, job):\r\n self.jobs.put(job)", "def enable(self):\n pass", "def enable(\n verbose=False,\n silent=False,\n full_signature=True,\n copy_ok=True,\n calculate_memory=False,\n):\n\n auto_enable(verbose, silent, full_signature, copy_ok, calculate_memory)\n yield\n auto_disable()", "def enable_service(service_name, start_type='auto'):\n run_program(['sc', 'config', service_name, 'start=', start_type])", "def enable(self, item_id):\n pass", "def _enable(self):\n self.debug_log(\"Enabling...\")\n self._register_handlers()", "def enable(self) -> None:", "def setEnabled(*args):", "def setEnabled(*args):", "def setEnabled(*args):", "def setEnabled(*args):", "def setEnabled(*args):", "def setEnabled(*args):", "def setEnabled(*args):", "def setEnabled(*args):", "def setEnabled(*args):", "def setEnabled(*args):", "def setEnabled(*args):", "async def enable(self, ctx: Context, *, guild: int = None):\n\n if guild is None:\n guild = ctx.guild\n else:\n guild = self.bot.get_guild(guild)\n\n if not guild:\n return await ctx.message.add_reaction(\"⚠\")\n\n self._create_guild_config(guild)\n\n await ctx.message.add_reaction(\"✅\")", "def Enable(self, request, global_params=None):\n config = self.GetMethodConfig('Enable')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Enable(self, request, global_params=None):\n config = self.GetMethodConfig('Enable')\n return self._RunMethod(\n config, request, global_params=global_params)", "def _on_enable(\n self, entity: Union[str, dict], attribute: str, old: str, new: str, kwargs: dict\n ) -> None:\n self.on_enable()", "def add_option_enable(self):\n logger.debug(\"Adding enable option\")\n chkenable = ttk.Checkbutton(self.optsframe,\n variable=self.vars[\"enabled\"],\n text=\"Enable {}\".format(self.tabname),\n command=self.on_chkenable_change)\n chkenable.pack(side=tk.RIGHT, padx=5, anchor=tk.W)\n Tooltip(chkenable,\n text=\"Enable or disable {} display\".format(self.tabname),\n wraplength=200)", "def enable(self):\n try:\n self.bus.open(self.BUS_NUMBER)\n self.write(AntennaDeployerCommand.ARM_ANTS, 0x00)\n self.bus.close()\n return True\n except:\n return False", "async def enable(self) -> None:\n try:\n await self.adguard.request(\n \"parental/enable\", method=\"POST\", data=\"sensitivity=TEEN\"\n )\n except AdGuardHomeError as exception:\n raise AdGuardHomeError(\n \"Enabling AdGuard Home parental control failed\"\n ) from exception", "def enable():\n configdb = ConfigDBConnector()\n configdb.connect()\n queue_info = {}\n queue_info['FLEX_COUNTER_STATUS'] = 'enable'\n configdb.mod_entry(\"FLEX_COUNTER_TABLE\", \"QUEUE\", queue_info)", "async def cmd_galenable(self, ctx):\n\n # ===== SET LOCAL COG VARIABLE\n self.cogset['enable']= True\n\n # ===== ADD THE FUNCTION TO THE SCHEDULER\n self.scheduler.add_job(call_schedule,\n 'date',\n id=\"_delete_gallery_messages\",\n run_date=get_next(hours=self.cogset['text_expirein']),\n kwargs={\"func\": \"_delete_gallery_messages\"}\n )\n\n # ===== SAVE SETTINGS \n await cogset.SAVE(self.cogset, cogname=self.qualified_name)\n\n await ctx.channel.send(content=\"Galleries are **enabled**.\")\n\n return", "def update_job(self, job):\n call = subprocess.Popen(self.cli + [PlatformJenkinsJavaCLI.UPDATE_JOB, job.name], stdin=subprocess.PIPE)\n call.communicate(input=platform_ci.jjb.get_job_as_xml(job, self.template_dir))\n call.wait()\n if call.returncode != 0:\n raise PlatformJenkinsException(\"Updating job failed: \" + job.name)" ]
[ "0.6508622", "0.640465", "0.6054045", "0.589058", "0.5827825", "0.55788094", "0.5573359", "0.5561993", "0.5552079", "0.5471112", "0.54423046", "0.5440283", "0.543312", "0.5425502", "0.5387053", "0.53846234", "0.5364459", "0.5362083", "0.5344735", "0.53233784", "0.52979136", "0.5291625", "0.527096", "0.5260427", "0.52597964", "0.5251214", "0.5205945", "0.5205945", "0.5201516", "0.5185637", "0.5182723", "0.51599455", "0.51599455", "0.51462555", "0.51416194", "0.512574", "0.51221484", "0.51214015", "0.51192063", "0.5105148", "0.5101183", "0.5077893", "0.507047", "0.5056076", "0.50481486", "0.5033088", "0.5026935", "0.501307", "0.50115585", "0.50115585", "0.50115585", "0.50115585", "0.50115585", "0.50115585", "0.50115585", "0.50115585", "0.50115585", "0.50115585", "0.50115585", "0.49958837", "0.49945104", "0.49689433", "0.49550983", "0.49333933", "0.49298233", "0.49284422", "0.49280107", "0.49246842", "0.49032936", "0.4900086", "0.48755452", "0.48697752", "0.48597097", "0.4855919", "0.48547098", "0.48537764", "0.4844591", "0.4839513", "0.48388714", "0.48298916", "0.48298916", "0.48298916", "0.48298916", "0.48298916", "0.48298916", "0.48298916", "0.48298916", "0.48298916", "0.48298916", "0.48298916", "0.4823197", "0.48230338", "0.48230338", "0.48210472", "0.48198023", "0.4811578", "0.48103386", "0.48009765", "0.47872368", "0.47701144" ]
0.85384786
0
Disables given job on Jenkins.
def disable_job(self, job): if subprocess.call(self.cli + [PlatformJenkinsJavaCLI.DISABLE_JOB, job.name]) != 0: raise PlatformJenkinsException("Disabling job failed: " + job.name)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def DisableJob(self, job_urn, token=None):\n cron_job = aff4.FACTORY.Open(job_urn, mode=\"rw\", aff4_type=\"CronJob\",\n token=token)\n cron_job.Set(cron_job.Schema.DISABLED(1))\n cron_job.Close()", "def deactivate_job(job_name):\n job = Job.from_name(job_name)\n job.set_active(False)\n return redirect(url_for('active_jobs_for_client', ClientID=job.ClientID))", "def cancel_job(self, job):\n try:\n self.jobs.remove(job)\n except ValueError:\n pass", "def cli(ctx, job_id):\n return ctx.gi.jobs.cancel_job(job_id)", "def kill_job(self , index):\n job = self.jobs.__getitem__( index )\n if job:\n job.kill()", "def cancel_job(self, job_number):\n raise NotImplementedError", "def disable(self):\n if not self.labExperiment:\n super().disable()\n else:\n self.zero()\n self.connection.query('close_dm')\n print(\"'BM1k' is now disbaled\")", "def kill_job(self, job):\n\n if job.status == Job.STATUS_QUEUED:\n # case 1: job is in QUEUED state\n # remove it from the queue and mark as killed\n\n job_queue = job_queue_name(job.model)\n logger.info(\n \"killing job {} by removing from queue {}\".\n format(job.uuid, job_queue))\n\n command_dict = {'command': 'PROCESS_JOB', 'job_uuid': job.uuid}\n remove_command(redis_connection(), job_queue, command_dict)\n job.status = Job.STATUS_KILLED\n # save it\n Job[job.uuid] = job\n elif job.status == Job.STATUS_RUNNING:\n # case 2: job is in RUNNING state\n # send message to worker to kill the job\n worker = worker_name(job.worker_url, job.model)\n worker_channel = node_channel_name(worker)\n logger.info(\"sending command to kill job on channel {}\".\n format(worker_channel))\n command_dict = {'command': \"KILL_JOB\", 'job_uuid': job.uuid}\n publish_command(redis_connection(), worker_channel, command_dict)\n else:\n logger.info(\"kill called on job {} in incompatible state {}\".\n format(job.uuid, job.status))", "def jobFail(job):\n\tif 'a' in job.proc.config._notify.when['pipeline']:\n\t\tlogger.debug('Notifying job fails')\n\t\tEMAIL.send('job', job, 'abort')", "def stop_labeling_job(LabelingJobName=None):\n pass", "def on_disable(self) -> None:\n self._cancel_automation()", "def kill(self):\n return self._raw_execute(\"cancel\", {\"job_id\": self.job_id})", "def cancel_job(job_id: str, connection: Optional['Redis'] = None, serializer=None, enqueue_dependents: bool = False):\n Job.fetch(job_id, connection=connection, serializer=serializer).cancel(enqueue_dependents=enqueue_dependents)", "def unset_wrapper(bot, update, args, job_queue, chat_data):\n if len(args) == 0:\n update.message.reply_text('No parameter provided')\n return\n\n job_name = args[0]\n if len(args) == 0 or job_name not in settings.JOBS:\n update.message.reply_text(\n 'Sorry {0} is not a valid job'.format(job_name))\n return\n\n job = find_job(job_name, job_queue)\n\n if not job:\n update.message.reply_text('You have no active job')\n return\n\n job.schedule_removal()\n\n update.message.reply_text('{0} job successfully unset!'.format(job_name))", "def stop_compilation_job(CompilationJobName=None):\n pass", "def on_kill(self):\n if self.job_id:\n self.log.info(\"on_kill: cancel the airbyte Job %s\", self.job_id)\n self.hook.cancel_job(self.job_id)", "def delete_job(self, job):\n subprocess.call(self.cli + [PlatformJenkinsJavaCLI.DELETE_JOB, job.name])", "def stop_job(self):\n # DELETE /jobs/{job_id}/results\n pass", "async def cmd_galdisable(self, ctx):\n # ===== SET LOCAL COG VARIABLE\n self.cogset['enable']= False\n\n # ===== SAVE SETTINGS \n await cogset.SAVE(self.cogset, cogname=self.qualified_name)\n\n # ===== DELETE THE JOB IF IT EXISTS\n for job in self.jobstore.get_all_jobs():\n if [\"_delete_gallery_messages\"] == job.id.split(\" \"):\n self.scheduler.remove_job(job.id)\n\n await ctx.channel.send(content=\"Galleries are disabled.\")\n\n return", "def Disable(self):\n handler = self.get_command_object(\"Disable\")\n handler()", "def _delete_job(self, job):", "def unclaim(self, job, owner):\n raise NotImplementedError()", "async def stop(self):\n self._job.cancel()\n await super().stop()", "def deactivate_port(self, job_req):\n # STEP-0 Extract parameters from job_req\n #job_params\n #job_params[0]: Bot Command\n #job_params[1]: Switch IP Address\n #job_params[2]: Port Number\n devicon = chr(0x2757) + chr(0xFE0F)\n check_icon = chr(0x2705)\n job_params = job_req.split()\n if len(job_params) < 3:\n #Not Enough info provided\n message = f\" Job Request is incomplete, please provide Switch IP, Switch-Port, Vlan-ID ie _change-port-vlan 1.1.1.1 10 101 \\n\"\n else:\n ## STEP 0-1: Assign all the parameters to job variables\n ip_addr = \"\".join(re.findall(r'\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}', job_params[1])) #Well Formed IP Address\n port_id = \"\".join(re.findall(r'^\\d{1,2}$',job_params[2])) #Accepting up to numbers\n # STEP 1: Validations\n ## STEP 1-1: GET Switch Serial Number\n serial_id, switch_name = get_switch_serial(ip_addr, self.meraki_net)\n if serial_id in [\"\"]:\n message = f\"{devicon} **There is not switch with that IP**\"\n logger.error(\"VALIDATION failed Switch serial not Found %s\", ip_addr)\n return message\n else:\n logger.info(\"VALIDATION Succeeded Switch serial Found %s\", serial_id)\n\n ## STEP 1-2: Validate Port ID\n if validate_port(port_id, serial_id):\n logger.info(\"VALIDATION Succeeded Port ID Valid %s\", port_id)\n else:\n logger.error(\"VALIDATION failed Port ID not Found %s\", port_id)\n message = f\"{devicon} **Invalid Port ID**\"\n return message\n\n # STEP 2: Prepare the Payload\n port_payload = {}\n port_payload[\"name\"] = f\"Port disabled by {self.job_owner} via Teams\"\n port_payload[\"enabled\"] = False\n logger.info(\"JSON Data to Port Update %s \", json.dumps(port_payload))\n\n # STEP 3: Send The Change to API\n api_uri = f\"/v1/devices/{serial_id}/switch/ports/{int(port_id)}\"\n data = update_via_meraki_api(api_uri, port_payload)\n if data:\n logger.info(\"Port updated successfully job_owner %s : \", self.job_owner)\n message = f\" {check_icon} **Port Update has been applied Sucesfully** \\n\"\n message += F\"* Job Owner: **{self.job_owner}** \\n\"\n message += F\"* Switch Name: **{switch_name}** \\n\"\n message += f\"* PortID **{data['portId']}** \\n\"\n message += f\"* Port Name **{data['name']}** \\n\"\n message += f\"* Port Type **{data['enabled']}** \\n\"\n else: \n logger.error(\"Port update failed : \") \n message = f\"{devicon} Port Update incomplete\"\n return message", "async def request_job_stop(self, job_id: str, *args, **kwargs) -> bool:\n # TODO: implement\n raise NotImplementedError('{} function \"request_job_stop\" not implemented yet'.format(self.__class__.__name__))", "def run_job(job, interrupt_if_necessary):", "def set_Off(self):\n if not(self._locked):\n self.__dict__['statusOn']=False\n self._undo_action()\n else:\n self._log.info('The JobProperty %s is blocked', self.__name__)", "def cancel_vmware_protection_job(job_name):\n try:\n cohesity_client = _get_client()\n jobs = cohesity_client.protection_jobs.get_protection_jobs(\n is_deleted=False, names=job_name)\n if not jobs:\n return \"Job with name {} not available.\".format(job_name)\n for job in jobs:\n if job.name == job_name:\n job_id = job.id\n break\n if not job_id:\n return \"Job with name {} not available.\".format(job_name)\n\n # Get recent job run id and status.\n runs = cohesity_client.protection_runs.get_protection_runs(\n job_id=job_id)\n if not runs:\n return \"Job run details not available for job {}\".format(job_name)\n latest_run = runs[0]\n if latest_run.backup_run.status not in [\"kRunning\", \"kAccepted\"]:\n return \"No active job run available for job {}\".format(job_name)\n run_id = latest_run.backup_run.job_run_id\n body = CancelProtectionJobRunParam()\n body.job_run_id = run_id\n cohesity_client.protection_runs.create_cancel_protection_job_run(\n job_id, body)\n return \"Successfully cancelled the run for job {}\".format(job_name)\n except APIException as err:\n return \"Error while attempting to cancel the job {}, error : {}\".format(\n job_name, err)", "def cli_disable_plugin(self, args) -> str:\n plugin_name = args.plugin_name\n if plugin_name not in self.name_to_plugin_class:\n return error(\"Plugin {} DNE\".format(plugin_name))\n\n self.name_to_enabled[plugin_name] = False\n return ok(\"Plugin {} disabled\".format(plugin_name))", "def cancel_search(job):\n job.cancel()", "def enable_job(self, job):\n if subprocess.call(self.cli + [PlatformJenkinsJavaCLI.ENABLE_JOB, job.name]) != 0:\n raise PlatformJenkinsException(\"Enabling job failed: \" + job.name)", "def disable():\n request = dict(id='gbn')\n _gbn_disable(request)", "def unregister_job(self, job_id):\n ujs = self.__ujs_client()\n ujs.unshare_job(job_id, [self.nar_user])", "def post_job_disable_with_http_info(self, name, **kwargs):\n\n all_params = ['name', 'jenkins_crumb']\n all_params.append('async')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method post_job_disable\" % key\n )\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'name' is set\n if ('name' not in params) or (params['name'] is None):\n raise ValueError(\"Missing the required parameter `name` when calling `post_job_disable`\")\n\n\n collection_formats = {}\n\n path_params = {}\n if 'name' in params:\n path_params['name'] = params['name']\n\n query_params = []\n\n header_params = {}\n if 'jenkins_crumb' in params:\n header_params['Jenkins-Crumb'] = params['jenkins_crumb']\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n # Authentication setting\n auth_settings = ['jenkins_auth']\n\n return self.api_client.call_api('/job/{name}/disable', 'POST',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type=None,\n auth_settings=auth_settings,\n async=params.get('async'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)", "def stop_text_translation_job(JobId=None):\n pass", "def unset(bot, update, chat_data):\n if 'job' not in chat_data:\n update.message.reply_text('Sem notificacoes ativadas')\n return\n\n job = chat_data['job']\n job.schedule_removal()\n del chat_data['job']\n check = emojize(\":white_check_mark:\", use_aliases=True)\n update.message.reply_text('Notificacao cancelada com sucesso'+check+'')", "def ignore_job(\n self,\n ) -> Callable[[cloud_deploy.IgnoreJobRequest], cloud_deploy.IgnoreJobResponse]:\n # Generate a \"stub function\" on-the-fly which will actually make\n # the request.\n # gRPC handles serialization and deserialization, so we just need\n # to pass in the functions for each.\n if \"ignore_job\" not in self._stubs:\n self._stubs[\"ignore_job\"] = self.grpc_channel.unary_unary(\n \"/google.cloud.deploy.v1.CloudDeploy/IgnoreJob\",\n request_serializer=cloud_deploy.IgnoreJobRequest.serialize,\n response_deserializer=cloud_deploy.IgnoreJobResponse.deserialize,\n )\n return self._stubs[\"ignore_job\"]", "def disable(self, subsystem=False):\n self.__dict__[\"enabled\"] = False\n\n if subsystem:\n self.subsystem.disable()", "def stopBuild(reason=\"<no reason given>\"):", "def disable(self) -> None:", "async def meow_disable(self, ctx: vbu.Context):\n\n try:\n self.meow_chats.remove(ctx.channel)\n except KeyError:\n return await ctx.send(\"Meow chat is already disabled in this channel.\")\n await ctx.send(f\"Meow chat has been disabled in {ctx.channel.mention} :<\")\n\n # See if there's a running task to keep it alive\n _, current_task = self.meow_disable_tasks.pop(ctx.channel.id, (None, None))\n if current_task:\n current_task.cancel()", "def worker_disabled(name, workers=None, profile=\"default\"):\n if workers is None:\n workers = []\n return _bulk_state(\"modjk.bulk_disable\", name, workers, profile)", "def on_disable(self) -> None:\n self._cancel_notification_cycle()", "def stop_training_job(TrainingJobName=None):\n pass", "def on_job_update(_job):\n nonlocal job\n job = _job\n\n if job.state in ['WORKING', 'DONE', 'ERROR']:\n canceled = my_job.job_manager_class.cancel(job.id)\n assert not canceled, (\n f'Uncancelable job is canceled in the `{job.state}` state!')", "def cmd_disable(self, app_name=None):\n rc = self.socket_command_with_project('disable', app_name)\n return rc", "def swo_disable(self, port_mask):\n res = self._dll.JLINKARM_SWO_DisableTarget(port_mask)\n if res != 0:\n raise errors.JLinkException(res)\n return None", "def stop(self):\n\n self.surge_job.cancel()\n self.print('[!] Purge job stopped!')", "def cancel(self):\n\n query = f\"scancel {self.jobid}\"\n if self.cluster:\n query = f\"scancel {self.jobid} --clusters={self.cluster}\"\n\n cmd = BuildTestCommand(query)\n cmd.execute()\n logger.debug(f\"Cancelling Job: {self.jobid} by running: {query}\")\n\n self.poll()\n self._state = \"CANCELLED\"", "def killJob(appName, jobId):\n jobs = db.getJobs(jobId=jobId)\n job = None if len(jobs) == 0 else jobs[0]\n\n if job == None:\n return returnError (\"Job ID, %s, does not exist\" % jobId, 404)\n\n logging.info (\"[FLASKWEB] Asked to KILL job #%s. Current Job status is %s\" % (jobId, job['status']))\n # Separate check to kill orphaned jobs in Db\n # TODO: Merge Job with experiments to post updates to correct table\n if job['status'] == 'RUNNING' or job['status'] == 'SUBMITTED':\n db.updateJob(jobId, status='KILLED')\n\n if int(jobId) in dispatcher.getActiveJobs():\n status = 'KILLED'\n logging.debug('[FLASKWEB] Job %s is active. Signaling to kill in mesos.' % jobId)\n dispatcher.cancelJob(int(jobId), driverDispatch)\n else:\n status = 'ORPHANED and CLEANED'\n logging.debug('[FLASKWEB] Job # %s is ORPHANED and does not exist in current state. Cleaning up.' % jobId)\n\n ts = db.getTS_est() #datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')\n thisjob = dict(jobId=jobId, time=ts, url=dispatcher.getSandboxURL(jobId), status=status)\n if 'application/json' in request.headers['Accept']:\n return jsonify(thisjob)\n else:\n return render_template(\"last.html\", appName=appName, lastjob=thisjob)", "def makeJobAvailable(self, job):\n with self:\n with self.queues.jobsPossible:\n with self.queues.jobsAvailable:\n try:\n index = self.queues.jobsPossible.index(job)\n except ValueError, ex:\n raise BlackboardUpdateError(\"Job not found in jobsPossible: \" +\n job.getProperty(Props.NAME, \"(unidentified)\"))\n job = self.queues.jobsPossible.pop(index)\n self.queues.jobsAvailable.append(job)", "def stop_slave_worker():\n print(\"Stopping slave worker\")\n r = req.patch(f\"{SLAVE_API_URL}/formation/worker\", json=API_PAYLOAD_0, headers=SLAVE_API_HEADERS)\n if r.status_code != req.codes.ok:\n print(\"Unable to stop the worker dyno on slave\")\n print(r.text)\n return False\n #wait a bit for the worker process to stop\n print(\"Waiting a bit\")\n time.sleep(2)\n return True", "def stop_hyper_parameter_tuning_job(HyperParameterTuningJobName=None):\n pass", "def cancel(self):\n\t\treturn Job(SDK.PrlJob_Cancel(self.handle)[0])", "def job_stop(self, job_id):\n resp = self.backend.job_stop(job_id)\n\n self.refresh_jobs()", "def removeJob(self):\n job, name = self.getJob() \n answer = tkMessageBox.askyesno(\"Warning\",'Remove this job?')\n if answer == False:\n return \n try: \n self.jobManager.deleteJob(job)\n except:\n print 'job not in database, removing from peat'\n del self.DB.meta.peatsa_jobs[name]\n self.DB.meta.__p__changed = 1\n self.updateJobs()\n return", "async def async_turn_off(self):\n path = \"/queue/simple\"\n param = \".id\"\n value = None\n for uid in self._ctrl.data[\"queue\"]:\n if self._ctrl.data[\"queue\"][uid][\"name\"] == f\"{self._data['name']}\":\n value = self._ctrl.data[\"queue\"][uid][\".id\"]\n\n mod_param = \"disabled\"\n mod_value = True\n self._ctrl.set_value(path, param, value, mod_param, mod_value)\n await self._ctrl.async_update()", "def post_job_disable(self, name, **kwargs):\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async'):\n return self.post_job_disable_with_http_info(name, **kwargs)\n else:\n (data) = self.post_job_disable_with_http_info(name, **kwargs)\n return data", "async def disable(self, **kwargs) -> None: # pylint: disable=unused-argument\r\n await self.set_ena(False)", "def on_disable(self) -> None:\n self._on_stop_cycle({})", "def disable(self):", "async def job_kill(self, uid):\n self._require_running()\n await self._get_job(uid).kill()", "def enableJobCancellation(cls) -> None:\n if platform.system() == \"Linux\":\n ddbcpp.sessionimpl.enableJobCancellation()\n else:\n raise RuntimeError(\"This method is only supported on Linux.\")", "def cancel_job(self, job_id):\n self.send(JobCommands.CANCEL_JOB, CancelJobPayload(job_id))", "def disable(self, index):\n self._action(index, StateVariable.enable, missingok=False, value=False)", "def stopJob(self):\n if len(self.__jobQueue) > 0:\n _JobThread.stopJobThreadInstance(\n self.caller, self.__jobQueue[0].stopRun)", "def remove_job(self, job_specifier, _unprotect=False):\n self._project.remove_job(job_specifier=job_specifier, _unprotect=_unprotect)", "def disable(version_manager, request):\n version_manager.is_disabled = True\n version_manager.save()\n return version_manager", "def unblock_worker(self, worker_id, reason):\r\n params = {'WorkerId': worker_id, 'Reason': reason}\r\n\r\n return self._process_request('UnblockWorker', params)", "def on_job_update(_job):\n nonlocal job\n job = _job\n\n if job.state in ['DONE', 'ERROR', 'WORKING']:\n canceled = my_job_async.job_manager_class.cancel(job.id)\n assert not canceled, (\n f'Uncancelable job is canceled in the `{job.state}` state!')", "def disable(service_name: str, print_action: bool = True):\n\n if print_action:\n print_log_status(3, f\"Disabling `{service_name}`\")\n \n run_command(f\"sudo systemctl disable {service_name}\")", "def EnableJob(self, job_urn, token=None):\n cron_job = aff4.FACTORY.Open(job_urn, mode=\"rw\", aff4_type=\"CronJob\",\n token=token)\n cron_job.Set(cron_job.Schema.DISABLED(0))\n cron_job.Close()", "def quarantine(self, job, exc_info):\n job.ended_at = times.now()\n job.exc_info = exc_info\n job.status = Status.FAILED\n if self._enqueuejob:\n ret = yield self.enqueue_job(job, timeout=job.timeout, set_meta_data=False)\n else:\n ret = yield job.save()\n defer.returnValue(ret)", "def unassign_job(request):\n unassignment = False #flag\n for i in mex_list:\n if i.id == request.mex_id:\n i.job_id = None\n i.status = MExStatus.STANDBY\n unassignment = True\n if unassignment == True:\n response = UnassignJobFromMexResponse()\n response.success = True\n else:\n response = UnassignJobFromMexResponse()\n response.success = False\n return response", "def stop(self):\n if len(self.jobs) > 0:\n cmd = [\"scancel\"]+[str(j) for j in self.jobs]\n output = subprocess.check_output(cmd).decode(\"utf-8\")\n print(output, file=sys.stderr)", "def turn_off(self, **kwargs):\n self._lj.deactivate_load(self._index)", "def killJob(job_id):\n \n # mark all of the Ready tasks as Killed\n with transaction() as t:\n t.cur.execute(\"\"\"update Hydra_rendertask set status = 'K' \n where job_id = '%d' and status = 'R'\"\"\" % job_id)\n \n # get hostnames for tasks that were already started\n tuples = None # @UnusedVariable\n with transaction() as t:\n t.cur.execute(\"\"\"select host from Hydra_rendertask \n where job_id = '%d' and status = 'S'\"\"\" % job_id)\n tuples = t.cur.fetchall()\n \n # make flat list out of single-element tuples fetched from db\n hosts = [t for (t,) in tuples]\n \n # send a kill request to each host, note if any failures occurred\n error = False\n for host in hosts:\n try:\n error = error or not sendKillQuestion(host)\n except socketerror:\n logger.debug(\"There was a problem communicating with {:s}\"\n .format(host))\n error = True\n \n return error", "def stop(jira_url, jira_username, jira_api_key, toggl_api_key):\n click.echo(stop_task(jira_url, jira_username, jira_api_key, toggl_api_key))", "def stop(self) -> None:\n self._client.terminate_job(jobId = self.id, reason = self.STOP_REASON)", "def powerOff(self):\n self._sendCommand(self.SONY_CMD_ExtBackupCommunicator_ForcePowerOff, bufferSize=0)", "def kill_switch(disable_after, keys):\n watchdog(disable_after, keys)", "def _disable(self):\n self.enabled = False", "def disable(self):\n self.error_code = 'DISABLED'\n self.running = False", "def disable_irq() -> int:", "async def test_job_discarded(my_job):\n\n # Set up callback to get notifications when job state changes.\n job = None\n\n def on_job_update(_job):\n \"\"\"The callback to update `job`.\"\"\"\n nonlocal job\n job = _job\n # Asserts that job is either pending or canceled.\n assert job.state in ['PENDING', 'CANCELED'], (\n 'Job that canceled immediately after submission has wrong '\n f'state `{job.state}`!')\n\n my_job.set_on_update(on_job_update)\n\n # Submit a job.\n new_job = await my_job.job(mustfail=False)\n\n # It must be canceled OK, because we are sure it has not started.\n my_job.job_manager_class.cancel(new_job.id)\n\n # Process ASGI messages but do not wait for jobs (no jobs started).\n await my_job.process_jobs()\n\n # Check a state of the job.\n assert job.state == 'CANCELED', ('Canceled job has wrong state '\n f'`{job.state}`!')", "def disable_plugin(self, plugin: str):\r\n with PluginStore.mutex:\r\n if plugin not in self._disabled:\r\n self._disabled.append(plugin)", "def cancel(self):\n if not self.parent_node.is_job:\n return\n\n # First perform clean operation\n self.clean()\n\n self.winstance.send_event('Cancelling job..')\n result = self.winstance.execute_operation('hpc.interfaces.'\n 'lifecycle.cancel',\n kwargs={\"name\": self.name})\n self.winstance.send_event('.. job canceled')\n result.task.wait_for_terminated()\n\n self._status = 'CANCELLED'", "def __disable__(self) -> None:\n pass", "def __delete_job_status(self, job: Job):\n\n keys = self._get_keys(f'jobstatus:{job.id}:*')\n for key in keys:\n self.redis_client.delete(key)", "def rescheduleJob(self, job):\n with self:\n with self.queues.jobsInProgress:\n with self.queues.jobsDone:\n try:\n index = self.queues.jobsInProgress.index(job)\n except ValueError, ex:\n raise BlackboardUpdateError(\"Job not found in jobsInProgress: \" +\n job.getProperty(Props.NAME, \"(unidentified)\"))\n job = self.queues.jobsInProgress.pop(index)\n self.queues.jobsAvailable.append(job)", "def disable(self, item_id):\n pass", "def stop(self):\n self._refresh_job.cancel()\n super().stop()", "def _delete_job(self, job):\n with self.db_lock:\n return self.rcon.zrem(job)", "def cancel_unmute(self, id: int) -> None:\n\n self.tasks.remove_job(str(id), 'default')", "def stop_slurm_jobs():\n for job_id in slurm_jobs_scheduled:\n logger.info(\"Canceling previously scheduled job %s...\", job_id)\n cancel_command = [\"scancel\", str(job_id)]\n print(\" \".join(shlex.quote(part) for part in cancel_command))\n run(cancel_command, check=False)", "def disable(self, modname):\n if self.cfg.blacklist and modname not in self.cfg.blacklist: self.cfg.blacklist.append(modname)\n if self.cfg.loadlist and modname in self.cfg.loadlist: self.cfg.loadlist.remove(modname)\n self.cfg.save()", "def _kill_canceling(self, job):\n pidrecord = os.path.join(job.output_dir, \"jobpid\")\n if os.path.exists(pidrecord):\n with open(pidrecord, 'r') as f:\n pgid = int(f.read())\n self.logger.info(\"Signalling SIGTERM to process group: %d\", pgid)\n try:\n os.killpg(pgid, signal.SIGTERM)\n except OSError as e:\n self.logger.info(\"Unable to kill process group %d: %s\", pgid, e)\n os.unlink(pidrecord)", "def assignJobToScanFieldDisableAllOthers(self, jobname, wellx=1, welly=1, fieldx=1, fieldy=1):\n self.selectScanField(False, wellx, welly, fieldx, fieldy)\n self.assignJob(jobname)\n self.disableScanField(allfields=True)\n self.enableScanField(False, wellx, welly, fieldx, fieldy)", "def force_delete_job(self, token, job, context=None):\n return self._client.call_method(\n 'UserAndJobState.force_delete_job',\n [token, job], self._service_ver, context)", "def disable(self):\n self.rx.threadKill()\n self.tx.threadKill()\n time.sleep(1)\n self.fisica.close()" ]
[ "0.7047712", "0.66080946", "0.6542796", "0.61389357", "0.61290914", "0.6022472", "0.6003207", "0.596457", "0.59564406", "0.5941506", "0.59358865", "0.59051114", "0.58281994", "0.58168745", "0.5810319", "0.5772574", "0.57711303", "0.57645935", "0.57537967", "0.5739578", "0.5725265", "0.57167447", "0.568465", "0.56842715", "0.5678199", "0.5670605", "0.56671345", "0.56503046", "0.5645008", "0.56448716", "0.5644407", "0.56267995", "0.56166196", "0.5605216", "0.5584146", "0.55675375", "0.55562365", "0.55522716", "0.55504495", "0.55295247", "0.55268914", "0.5522792", "0.5518779", "0.5516299", "0.551489", "0.551218", "0.55036676", "0.54818165", "0.54733264", "0.5459864", "0.5457573", "0.54427695", "0.5436575", "0.5434581", "0.5425965", "0.54208875", "0.54052305", "0.5402388", "0.5396877", "0.5393868", "0.53822565", "0.53718245", "0.5357008", "0.53551376", "0.53412116", "0.533691", "0.5336844", "0.5332283", "0.53252584", "0.5321253", "0.53168166", "0.52989876", "0.52893716", "0.5288815", "0.528282", "0.52763426", "0.5274313", "0.52723217", "0.5270867", "0.52677625", "0.5264915", "0.52591836", "0.5256599", "0.52503484", "0.52470434", "0.52443564", "0.5241274", "0.5237801", "0.52373016", "0.5235591", "0.52342284", "0.52232325", "0.52225727", "0.5214821", "0.52112937", "0.5204303", "0.52037096", "0.52018976", "0.5195378", "0.51943004" ]
0.8205387
0
Create a given job on Jenkins.
def create_job(self, job): call = subprocess.Popen(self.cli + [PlatformJenkinsJavaCLI.CREATE_JOB, job.name], stdin=subprocess.PIPE) out, err = call.communicate(input=platform_ci.jjb.get_job_as_xml(job, self.template_dir)) call.wait() if call.returncode != 0: logging.info(out) logging.error(err) raise PlatformJenkinsException("Creating job failed: " + job.name)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create(cfg, jobs):\n server = jenkins_utils.server_factory(cfg)\n libjobs.createJobs(server, jobs)", "def create_job(api_instance, job):\n api_response = api_instance.create_namespaced_job(\n body=job, namespace=\"default\", pretty=True\n )\n logger.info(\"Job created with status='%s'\" % str(api_response.status))\n return api_response", "def create_job(jobtype, server):\n name = generate_job_name(jobtype)\n job = Job.objects.create(jobtype=jobtype, server=server, name=name)\n return job", "def create_job(self, name: str) -> Slurm:\n LOG.info(\"Create a slurm job with name %s\", name)\n job = Slurm(\n name,\n {\"account\": self.account, \"time\": self.time,},\n scripts_dir=str(self.scripts_dir),\n log_dir=str(self.log_dir),\n )\n return job", "def _create_job(self,\n name,\n environment_string,\n description='',\n platform='LINUX'):\n job = data_types.Job()\n job.name = name\n if environment_string.strip():\n job.environment_string = environment_string\n job.platform = platform\n job.descripton = description\n job.put()\n\n return job", "def createJob(self, joboptions, previousId=None):\n root = self.manifest.getRootResource()\n assert self.manifest.tosca\n job = Job(self, root, joboptions, previousId)\n\n if (\n self.manifest.localEnv\n and not joboptions.parentJob\n and not joboptions.startTime\n ):\n logPath = self.manifest.getJobLogPath(job.getStartTime(), \".log\")\n if not os.path.isdir(os.path.dirname(logPath)):\n os.makedirs(os.path.dirname(logPath))\n initLogging(logfile=logPath)\n path = self.manifest.path\n if joboptions.planOnly:\n logger.info(\"creating %s plan for %s\", joboptions.workflow, path)\n else:\n logger.info(\"starting %s job for %s\", joboptions.workflow, path)\n\n WorkflowPlan = Plan.getPlanClassForWorkflow(joboptions.workflow)\n if not WorkflowPlan:\n raise UnfurlError(\"unknown workflow: %s\" % joboptions.workflow)\n job.plan = WorkflowPlan(root, self.manifest.tosca, joboptions)\n return job", "def create_job(self, context=None):\n return self._client.call_method(\n 'UserAndJobState.create_job',\n [], self._service_ver, context)", "def create(self, resource, **data):\n body = ''\n if resource == 'robot/job':\n body = data['body']\n else:\n body = urllib.urlencode(data)\n\n return self.request('/' + resource, 'POST', body=body)", "def job_create(self, sender, name=None):\n self._require_running()\n name = name or self.DEFAULT_JOB_NAME\n job_id = uuid.uuid4().hex\n assert job_id not in self._jobs\n assert sender is not None\n assert sender.connection\n job = Job(\n job_id,\n name,\n self._session_root.joinpath(job_id),\n sender,\n self._loop\n )\n self._jobs[job_id] = job\n self._jobs_by_connection[sender.connection][job_id] = job\n self._log.debug('Created job %s', job)\n return job_id", "def create_job(script, args, inputs, outputs, tools):\n job = Job(script)\n for arg in args:\n job.addArguments(arg)\n\n # all jobs will have access to python scripts\n for tool in tools:\n job.uses(tools[tool], link=Link.INPUT)\n\n for input in inputs:\n job.uses(input, link=Link.INPUT)\n\n for output in outputs:\n job.uses(output, link=Link.OUTPUT, transfer=True)\n\n #job.addProfile(Profile(Namespace.PEGASUS, 'clusters.size', '20'))\n return job", "def create_job(project, description):\n randomnames = open(os.path.join(\"Anemone\", \"templates\", \"namegen.html\")).readlines()\n jobname = (\"Quick.\" +\n random.choice(randomnames)[:-1] + # for some reason choice gives extra space\n random.choice(randomnames)[:-1]) # for some reason choice gives extra space\n\n newjob = Job.create(project=project, name=jobname, description=description)\n newjob.name = newjob.name + \".{0:0=3d}\".format(newjob.id)\n newjob.save()\n return newjob", "def send_job(self):\n graph = self.processgraphEdit.toPlainText()\n # info(self.iface, graph)\n response = self.connection.job_create(json.loads(graph))\n if response.status_code == 201:\n info(self.iface, \"Successfully created new job, Response: {}\".format(response.status_code))\n else:\n warning(self.iface, \"Not able to created new job, Response: {}\".format(str(response.json())))", "def create(self, validated_data):\n return Job.objects.create(**validated_data)", "def create_jobs(release, project_yaml):\n logger = logging.getLogger(__file__)\n\n # We assume here project keep their subrepo jobs under the part\n # project name. Otherwise we'll have to look for jjb/<repo> for each\n # branch listed.\n project, _ = next(iter(project_yaml['branches'][0]['location'].items()))\n\n yaml_parser = YAML()\n yaml_parser.preserve_quotes = True\n yaml_parser.explicit_start = True\n # yaml_parser.indent(mapping=4, sequence=0, offset=0)\n # These are some esoteric values that produce indentation matching our jjb\n # configs\n # yaml_parser.indent(mapping=3, sequence=3, offset=2)\n # yaml_parser.indent(sequence=4, offset=2)\n yaml_parser.indent(mapping=2, sequence=4, offset=2)\n\n (job_files, skipped_files) = jjb_files(project, release)\n\n if skipped_files:\n logger.info(\"Jobs already exists for %s in files: %s\",\n project, ', '.join(skipped_files))\n # Exit if there are not jobs to create\n if not job_files:\n return\n logger.info(\"Creating Jenkins Jobs for %s in files: %s\",\n project, ', '.join(job_files))\n\n stable_branch_stream = \"\"\"\\\n %s:\n branch: 'stable/{stream}'\n gs-pathname: '/{stream}'\n disabled: false\n \"\"\" % release\n\n stable_branch_yaml = yaml_parser.load(stable_branch_stream)\n stable_branch_yaml[release].yaml_set_anchor(release, always_dump=True)\n\n for job_file in job_files:\n yaml_jjb = yaml_parser.load(open(job_file))\n if 'stream' not in yaml_jjb[0]['project']:\n continue\n\n # TODO: Some JJB files don't have 'stream'\n project_config = yaml_jjb[0]['project']['stream']\n # There is an odd issue where just appending adds a newline before the\n # branch config, so we append (presumably after master) instead.\n project_config.insert(1, stable_branch_yaml)\n\n # NOTE: In the future, we may need to override one or multiple of the\n # following ruamal Emitter methods:\n # * ruamel.yaml.emitter.Emitter.expect_block_sequence_item\n # * ruamel.yaml.emitter.Emitter.write_indent\n # To hopefully replace the need to shell out to sed...\n yaml_parser.dump(yaml_jjb, open(job_file, 'w'))\n args = ['sed', '-i', 's/^ //', job_file]\n subprocess.Popen(args, stdout=subprocess.PIPE, shell=False)", "def create_custom_job(\n type,\n project,\n location,\n payload,\n gcp_resources,\n):\n remote_runner = job_remote_runner.JobRemoteRunner(\n type, project, location, gcp_resources\n )\n\n try:\n # Create custom job if it does not exist\n job_name = remote_runner.check_if_job_exists()\n if job_name is None:\n job_name = remote_runner.create_job(\n create_custom_job_with_client,\n insert_system_labels_into_payload(payload),\n )\n\n # Poll custom job status until \"JobState.JOB_STATE_SUCCEEDED\"\n remote_runner.poll_job(get_custom_job_with_client, job_name)\n except (ConnectionError, RuntimeError) as err:\n error_util.exit_with_internal_error(err.args[0])", "def create_job_object(message, environment_image):\n\n PYTHONUNBUFFERED_ENV = client.V1EnvVar(name=\"PYTHONUNBUFFERED\", value=\"1\")\n AUTH_TOKEN_ENV = client.V1EnvVar(name=\"AUTH_TOKEN\", value=AUTH_TOKEN)\n EVALAI_API_SERVER_ENV = client.V1EnvVar(\n name=\"EVALAI_API_SERVER\", value=EVALAI_API_SERVER\n )\n MESSAGE_BODY_ENV = client.V1EnvVar(name=\"BODY\", value=json.dumps(message))\n submission_pk = message[\"submission_pk\"]\n image = message[\"submitted_image_uri\"]\n # Configureate Pod agent container\n agent_container = client.V1Container(\n name=\"agent\", image=image, env=[PYTHONUNBUFFERED_ENV]\n )\n # Configureate Pod environment container\n environment_container = client.V1Container(\n name=\"environment\",\n image=environment_image,\n env=[\n PYTHONUNBUFFERED_ENV,\n AUTH_TOKEN_ENV,\n EVALAI_API_SERVER_ENV,\n MESSAGE_BODY_ENV,\n ],\n resources=client.V1ResourceRequirements(\n limits={\"nvidia.com/gpu\": \"1\"}\n ),\n )\n # Create and configurate a spec section\n template = client.V1PodTemplateSpec(\n metadata=client.V1ObjectMeta(labels={\"app\": \"evaluation\"}),\n spec=client.V1PodSpec(\n containers=[environment_container, agent_container],\n restart_policy=\"Never\",\n ),\n )\n # Create the specification of deployment\n spec = client.V1JobSpec(backoff_limit=1, template=template)\n # Instantiate the job object\n job = client.V1Job(\n api_version=\"batch/v1\",\n kind=\"Job\",\n metadata=client.V1ObjectMeta(\n name=\"submission-{0}\".format(submission_pk)\n ),\n spec=spec,\n )\n return job", "def created_job(new_job, bulk_request):\n bulk_request.return_value = '''<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n <jobInfo xmlns=\"http://www.force.com/2009/06/asyncapi/dataload\">\n <id>THEJOBID</id>\n <operation>update</operation>\n <object>Lead</object>\n </jobInfo>\n '''\n new_job.create()\n return new_job", "def test_post_job(self):\n body = UnitTesterJobCreateReq()\n response = self.client.open(\n '/v1/job',\n method='POST',\n data=json.dumps(body),\n content_type='application/json')\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def trigger_job(self, job, parameters=None):\n parameters = parameters or {}\n parameter_list = []\n for key in parameters:\n parameter_list.append(\"-p\")\n parameter_list.append(\"%s=%s\" % (key, parameters[key]))\n if subprocess.call(self.cli + [PlatformJenkinsJavaCLI.BUILD_JOB, job.name] + parameter_list) != 0:\n raise PlatformJenkinsException(\"Triggering job failed: \" + job.name)", "def _create_jenkins_build(self, step):\n # we also have to inject the correct build_type here in order\n # to generate the correct params and to generate the correct\n # commands later on\n builder = self.get_builder(build_type=self.shard_build_type)\n\n builder.create_jenkins_build(step, job_name=step.data['job_name'],\n script=step.data['cmd'].format(\n test_names=' '.join(step.data['tests']),\n ),\n setup_script=self.shard_setup_script,\n teardown_script=self.shard_teardown_script,\n path=step.data['path'],\n )", "def build_job(job_url, pr_number, run_full, has_params):\n log.debug(\n \"job_url: %s pr_num: %s run_full: %s has_params: %s\",\n job_url,\n pr_number,\n run_full,\n has_params,\n )\n if has_params:\n pr_url = \"{}/job/PR-{}/buildWithParameters?runFull={}\".format(\n job_url.rstrip(\"/\"), pr_number, \"true\" if run_full else \"false\"\n )\n else:\n pr_url = \"{}/job/PR-{}/build\".format(\n job_url.rstrip(\"/\"),\n pr_number,\n )\n res = requests.get(\n uri + \"/crumbIssuer/api/json\",\n auth=requests.auth.HTTPBasicAuth(user, password),\n verify=verify,\n )\n if res.status_code != 200:\n raise Exception(\"Jenkins returned non 200 response\")\n data = res.json()\n res = requests.post(\n pr_url,\n headers={\n \"Content-Type\": \"application/x-www-form-urlencoded\",\n data[\"crumbRequestField\"]: data[\"crumb\"],\n },\n auth=requests.auth.HTTPBasicAuth(user, password),\n verify=verify,\n )\n if res.status_code == 201:\n log.info(\"Build started: %s\", pr_url)\n else:\n log.info(\"Build request received non 201 status: %s\", res.status_code)", "def _create_job(self, tjc, machine, build_url, project, revision, build_type, build_abi,\n build_platform, build_sdk, builder_type, t):\n logger = utils.getLogger()\n logger.debug('AutophoneTreeherder.create_job: %s', t)\n assert self.url and revision, 'AutophoneTreeherder.create_job: no url/revision'\n\n if len(revision) != 40:\n logger.warning('AutophoneTreeherder using revision with length %d: %s',\n len(revision), revision)\n\n logger.info('creating Treeherder job %s for %s %s, revision: %s',\n t.job_guid, t.name, project, revision)\n if not t.job_guid:\n logger.error(\n '_create_job: invalid job_guid %s for test %s, '\n 'machine: %s, build_url: %s, project: %s, revision: %s, '\n 'build_type: %s, build_abi: %s, build_platform: %s, '\n 'build_sdk: %s, builder_type: %s',\n t.name, t.job_guid, machine, build_url, project,\n revision, build_type, build_abi, build_platform,\n build_sdk, builder_type)\n raise Exception('Can not create Treeherder Job with invalid test job_guid')\n\n logger.debug('AutophoneTreeherder.create_job: test config_file=%s, config sections=%s',\n t.config_file, t.cfg.sections())\n\n tj = tjc.get_job()\n tj.add_tier(self.options.treeherder_tier)\n tj.add_revision(revision)\n tj.add_project(project)\n tj.add_job_guid(t.job_guid)\n tj.add_job_name(t.job_name)\n tj.add_job_symbol(t.job_symbol)\n tj.add_group_name(t.group_name)\n tj.add_group_symbol(t.group_symbol)\n tj.add_product_name('fennec')\n\n tj.add_machine(machine)\n build_platform = platform(architecture(build_abi),\n build_platform,\n build_sdk)\n build_architecture = architecture(build_abi)\n machine_platform = platform(architecture(t.phone.abi),\n t.phone.os,\n build_sdk)\n machine_architecture = architecture(t.phone.abi)\n tj.add_build_info('android', build_platform, build_architecture)\n tj.add_machine_info('android', machine_platform, machine_architecture)\n tj.add_option_collection({build_type: True})\n\n # Add job details for storing information regarding the build (so we can\n # retrigger them)\n job_details = [\n {'title': title, 'value': str(value)} for (title, value) in [\n ('config_file', t.config_file),\n ('chunk', t.chunk),\n ('builder_type', builder_type)\n ]\n ]\n job_details.append({'title': 'build_url',\n 'value': 'build_url',\n 'url': build_url})\n tj.add_artifact('Job Info', 'json', {\n 'job_details': job_details\n })\n\n return tj", "async def create_job(response: Response,\n request: Request,\n job: Job = Body(\n ...,\n example={\n \"id_video\": \"bbb_0.mp4\",\n \"bitrate\": 7000,\n \"speed\": \"ultrafast\",\n },\n )\n ): \n \n\n # get an ID and return to client\n id_job = mngr.getID()\n logger.debug(\"got id_job %s\" %id_job)\n resp = [\"http:/\"]\n resp.append(request.headers['host'])\n resp.append(id_job)\n response.headers[\"Location\"] = \"/\".join(resp)\n\n # create the task\n mngr.newJob(id_job, \n job.id_video, \n job.bitrate, \n job.speed)\n\n return id_job", "def create(cls, job_id: str) -> \"JobManifest\":\n now = datetime.datetime.now(datetime.timezone.utc)\n return JobManifest(creation_time=now, job_id=job_id, orbit_ids=[], task_ids=[])", "def create_job(self, employer_id, compensation, location, description, category_id, group_id):\n\n job = Job(employer_id=employer_id, group_id=group_id, compensation=compensation, location=location, category_id=category_id, description=description) \n db.session.add(job)\n db.session.commit()", "def jobserver_job():\n return _MakeJob()", "def add_new_job():\n ClientID = request.form['ClientID']\n job_name = request.form['job_name']\n rate = int(float(request.form['rate']) * 100)\n\n job = Job(Name=job_name, ClientID=ClientID, DefaultRate=rate, Active=True)\n\n get_module_logger().info(\"Created job %s\", job)\n\n job.insert()\n\n return redirect(url_for('all_jobs_for_client', ClientID=ClientID))", "def post(self):\n data, errors = JobSchema().loads(request.data)\n\n if errors:\n return Response().send(\n data=None, status=400, code=\"bad_request\", message=errors\n )\n return self.job.create(request.json)", "def test_create_job(self):\n engine = Engine(self.config_file, self.api_token)\n\n engine.create_job()\n\n assert engine.ingest_job_id == 23", "def create_job_object(job_type: int = 0,\n team_id: int = 0,\n destination_name: str = None,\n destination_lat: float = 0,\n destination_lng: float = 0,\n destination_text: str = None,\n destination_url: str = None,\n text_dispatcher: str = None,\n text_receiver: str = None,\n contact_name: str = None,\n contact_phone: str = None,\n contact_email: str = None,\n day: int = None,\n priority: int = None,\n number: int = None,\n on_site_seconds: int = None,\n window_start: int = None,\n window_end: int = None,\n order_id: int = None,\n dispatcher_uid: str = None,\n place_uid: str = None,\n worker: str = None,\n items_to_dropoff: int = None,\n items_to_pickup: int = None,\n custom_attributes: dict = None) -> dict:\n\n job = {\n \"type\": job_type,\n \"teamId\": team_id,\n \"destinationName\": destination_name,\n \"destinationLat\": destination_lat,\n \"destinationLng\": destination_lng,\n \"destinationText\": destination_text,\n \"destinationUrl\": destination_url,\n \"textDispatcher\": text_dispatcher,\n \"textReceiver\": text_receiver,\n \"contactName\": contact_name,\n \"contactPhone\": contact_phone,\n \"contactEmail\": contact_email,\n \"day\": day,\n \"priority\": priority,\n \"number\": number,\n \"onSiteSeconds\": on_site_seconds,\n \"windowStart\": window_start,\n \"windowEnd\": window_end,\n \"orderId\": order_id,\n \"dispatcherUid\": dispatcher_uid,\n \"placeUid\": place_uid,\n \"worker\": worker,\n \"itemsToDropoff\": items_to_dropoff,\n \"itemsToPickup\": items_to_pickup\n }\n job_without_none = {k: v for k, v in job.items() if v is not None}\n job.clear()\n job.update(job_without_none)\n\n if custom_attributes:\n job.update({f\"custom_{k}\": v for k, v in custom_attributes.items() if k})\n\n return job", "def make_instance(self, include_optional):\n # model = kloudio.models.new_job.NewJob() # noqa: E501\n if include_optional :\n return NewJob(\n destination = 'email', \n report_name = 'mysql-report', \n report_id = '57d3273aed8c3e1e1c0d3746', \n report_params = None, \n frequency = 'Hourly', \n am_pm = 'am', \n hour = '01', \n minute = '45', \n day = 'Monday', \n description = 'This is a sample query', \n spreadsheet_id = '1-sl-_DtdBUmbi-FyJOwc2dXGd6xX0xZstX7UzlsU_EA', \n sheet_id = '193832851', \n sheet_name = 'Sales-v2', \n timezone = 'PST', \n select_cols = users, \n tags = 'users', \n email_on_success = True, \n email_on_error = True, \n metadata = None, \n template_id = 'Y-z-jjFZ0H3u3maN', \n template_name = 'Template2404a', \n job_type = 'EMAIL'\n )\n else :\n return NewJob(\n destination = 'email',\n report_name = 'mysql-report',\n report_id = '57d3273aed8c3e1e1c0d3746',\n frequency = 'Hourly',\n am_pm = 'am',\n hour = '01',\n minute = '45',\n day = 'Monday',\n )", "def create_new_job(self, search_id: Hashable) -> Hashable:\n partial_id = (\n self._redis.incr(f\"search:{search_id}.job_id_counter\", amount=1) - 1\n )\n partial_id = f\"{partial_id}\" # converting to str\n job_id = f\"{search_id}.{partial_id}\"\n self._redis.rpush(f\"search:{search_id}.job_id_list\", job_id)\n self._redis.json().set(\n f\"job:{job_id}\", \".\", {\"in\": None, \"metadata\": {}, \"out\": None}\n )\n return job_id", "def sfdcCreateJob(**kwargs):\n api_ver = kwargs.get('api_ver', '')\n session_id = kwargs.get('session_id', '')\n instance = kwargs.get('instance', '')\n job_id = kwargs.get('job_id', '')\n sfdcXml = kwargs.get('sfdcXml', {})\n\n bodyXml = sfdcXml.get('job', {}).get('body')\n url = sfdcXml.get('job', {}).get('url')\n headers = sfdcXml.get('job', {}).get('headers')\n\n bodyXml = unicode(bodyXml, \"utf-8\")\n url = url.format(instance=instance, api_ver=api_ver)\n headers['X-SFDC-Session'] = self.session_id\n\n resp = requests.post(url=url, headers=headers, data=bodyXml)\n dictResp = xmltodict.parse(resp.text)\n job_id = str(dictResp['jobInfo']['id'])\n\n self.job_id = job_id\n return job_id", "def submit_job(self, batch_id):\n\n job_name = self.bot_id + \"_\" + batch_id\n job_queue = self.jobQueueName\n job_definition = self.job_def\n command = self.bot_cmd\n\n kwargs = {'jobName': job_name,\n 'jobQueue': job_queue,\n 'jobDefinition': job_definition,\n 'containerOverrides': {'command': [command]}}\n print(\">>> Going to create job: \" + str(kwargs))\n submit_job_response = self.batch_client.submit_job(jobName=job_name,\n jobQueue=job_queue,\n jobDefinition=job_definition,\n # containerOverrides={'command': [command]}\n )\n\n print(\">>> submit job response is :\" + str(submit_job_response))\n job_id = submit_job_response['jobId']\n print('Submitted job [%s - %s] to the job queue [%s]' % (job_name, job_id, job_queue))", "def prepare(cls, username, **kwargs):\n\n # first prepare a job record\n job = background_helper.create_job(username, cls.__action__,\n queue_id=huey_helper.queue_id, )\n return job", "def get_jenkins(cls, url, template_dir=None):\n return PlatformJenkinsJavaCLI(template_dir, url)", "def create(self, skeleton):\n data = dict(skeletonSSHKey=skeleton)\n return Job(self.client.post(self.endpoint, data, codes.accepted))", "def add_default_job():\n new_job = Job(name='job50')\n new_job.insert() \n return ('', 204)", "def create_job_for_element(self, parameters: List[Parameter]) -> str:\n\n row = JobParameterSet(parameters)\n source = row.get_source()\n if not source:\n raise JobCreationException(\n \"No source identifier found. I can't create a job without knowing\"\n \" where to get the data\"\n )\n\n try:\n if row.is_pacs_type(source):\n response = self.client_tool.create_pacs_job(\n server=self.get_active_server(), **row.as_kwargs()\n )\n elif row.is_path_type(source):\n response = self.client_tool.create_path_job(\n server=self.get_active_server(), **row.as_kwargs()\n )\n else:\n raise JobCreationException(f\"Unknown source '{source}'\")\n\n except (APIClientError, PersistenceError) as e:\n raise JobCreationException(\n f\"Error creating job for source {source}\"\n ) from e\n\n return str(response.job_id)", "def create(self, name, login, password, email, address=\"\", vat=\"\", jobguid=\"\", executionparams=None):", "def createjob(args):\n ncell = args.ncell\n nmg = args.nmg\n nsi = args.nsi\n nvac = args.nvac\n a0 = args.a0\n temp = args.temp\n nseeds = args.nseeds\n seeds = args.seeds\n nsteps = args.nsteps\n foldername_append = args.foldername_append\n pot = args.pot\n submit = args.submit\n submitdebug = args.submitdebug\n submittime_hours = args.submittime_hours\n test = args.test\n testfiles = args.testfiles\n nodes = args.nodes\n verbose = args.verbose\n\n\n ### check if ase runner/quippy/lammpps-data formats are known\n ase_formats = mu.ase_get_known_formats_class(verbose=True)\n ase_formats.check_if_default_formats_known(copy_and_adapt_formatspy_anyhow=False)\n\n # definex ffsocket inet/unix\n if nodes == 1:\n ffsocket = \"unix\"\n elif nodes > 1:\n ffsocket = \"inet\"\n else:\n sys.exit(\"Number of nodes has to be positive!\")\n\n\n # define ntasks, neval\n lmp_par = 2 # = OMP_NUM_THREADS\n ntasks = cores = nodes * 28\n ipi_inst = 4 # for sure best on fidis\n neval = ipi_inst*2 # was alwasy better, for ompi and impi\n\n ##### get the seed(s).\n if type(seeds) == bool:\n seeds = random.sample(range(1, 999999), nseeds)\n print('seeds',seeds)\n if test == True:\n nseeds = 1\n seeds = [1]\n print('seeds',seeds)\n nseeds = len(seeds)\n\n ##### a few checks\n scripts = mu.scripts()\n mypot = mu.mypot(pot)\n if submit is True or submitdebug is True:\n hostcheck = os.environ[\"myhost\"]\n if hostcheck == \"\":\n sys.exit('host unknown 87')\n\n\n ##### here only chck if the potential can be set up. (in.lmp)\n ##### the same command is then executed for every kmc folder\n ace = mu.ase_calculate_ene(pot=pot,\n potpath=False,\n units='eV',geopt=False,kmc=True,verbose=verbose)\n ace.pot_get_and_ase_lmp_cmd(kmc=True,temp=temp,nsteps=nsteps,ffsocket=ffsocket)\n\n ##### if test\n if test == True:\n nsteps = 50\n\n file_ipi_input_runner = scripts + \"/i-pi-mc_scripts/input-runner.xml\"\n\n\n ####################################\n # get directory\n ####################################\n if verbose:\n print(\"get directory\")\n pcsi = nsi/ncell**3.*100\n pcmg = nmg/ncell**3.*100\n pcvac = nvac/ncell**3.*100\n if args.cubic == True:\n pc = \"cubic\"\n else:\n pc = \"primitive\"\n directory = str(ncell)+\"x\"+str(ncell)+\"x\"+str(ncell)+\"_\"+pc+\"_\"+pot+\"_\"+\\\n str(temp)+\"K_\"+\\\n str(nvac)+\"Vac_\"+str(nmg)+\"Mg_\"+str(nsi)+\"Si__\"+\\\n str(round(pcvac,3))+\"pctVac_\"+str(round(pcmg,3))+\"pctMg_\"+str(round(pcsi,3))+\"pctSi\"\n if foldername_append != \"\":\n directory = directory+\"_\"+foldername_append\n\n ###############################################\n # make the structure\n ###############################################\n atomsc_fakevac = mu.get_ase_atoms_object_kmc_al_si_mg_vac(ncell,nsi,nmg,nvac,a0,create_fake_vacancy = True,cubic=args.cubic)\n atomsc = mu.get_ase_atoms_object_kmc_al_si_mg_vac(ncell,nsi,nmg,nvac,a0,cubic=args.cubic)\n\n # make the atomic structure\n # this was to play ... not necessary now?\n if False:\n nndist = a0/np.sqrt(2.)\n\n from ase.io import read as ase_read\n from ase.io import write as ase_write\n\n ###############################################\n # get the amount of 1NN in a relly large cell\n ###############################################\n atomsc_fakevac_i = ase_read('dataxx.extxyz3',index=\":\",format='extxyz') # works, cell ist not changed\n #atomsc_fakevac_i = mu.get_ase_atoms_object_kmc_al_si_mg_vac(ncell=10,nsi=0,nmg=0,nvac=1,a0=a0,cubic=False,create_fake_vacancy = True,normal_ordering=\"XX_0\")\n #nn = mu.ase_get_neighborlist(atomsc_fakevac_i,atomnr=0,cutoff=3.,skin=0.1)\n #print(\"nn\",nn,'len',len(nn))\n #nn = mu.ase_get_neighborlist(atomsc_fakevac_i,atomnr=0,cutoff=8.5,skin=0.1)\n #print(\"nn\",nn,'len',len(nn))\n #sys.exit()\n\n print(len(atomsc_fakevac_i),type(atomsc_fakevac_i))\n\n for idx,i in enumerate(atomsc_fakevac_i):\n print('aa',atomsc_fakevac_i[idx].positions[0])\n #print('aa',i.positions[0])\n print('ipi')\n atomsc_fakevac_i = ase_read('dataxx.ipi2',index=\":\",format='ipi') # works, cell ist not changed\n print(len(atomsc_fakevac_i),type(atomsc_fakevac_i))\n for idx,i in enumerate(atomsc_fakevac_i):\n print('aa',atomsc_fakevac_i[idx].positions[0])\n #print('aa',i.positions[0])\n print('quippy')\n atomsc_fakevac_i = ase_read('dataxx.quippy.xyz2',index=\":\",format='quippy') # works, cell ist not changed\n\n\n\n filename = '../sim.xyz'\n filename = '../simulation.pos_0.xyz'\n mu.count_amount_1NN_around_vacancies(filename,cutoffa=nndist,cutoffb=a0,skin=0.1,format='ipi')\n sys.exit()\n\n def mysave_quippy_xyz(atomsc_fakevac,text=False):\n if type(text) == bool:\n sys.exit('define text')\n atomsc_fakevac.write('data.quippy.xyz',format='quippy',append=True)\n #atomsc_fakevac.write('data.xyz',format=\"extxyz\",append=True)\n atomsc_fakevac.write('data'+text+'.quippy.xyz',format='quippy',append=True)\n #atomsc_fakevac.write('data'+text+'.xyz',format=\"extxyz\",append=True)\n return\n\n # create Al with single vacancy\n atomsc_fakevac = mu.get_ase_atoms_object_kmc_al_si_mg_vac(ncell=5,nsi=0,nmg=0,nvac=1,a0=a0,cubic=False,create_fake_vacancy = True,normal_ordering=\"XX_0\")\n NN_1_indices, NN_2_indices = mu.ase_get_neighborlist_1NN_2NN(atomsc_fakevac,atomnr=0,cutoffa=nndist,cutoffb=a0,skin=0.1)\n #print('from ....',(atomsc_fakevac.positions)[0])\n #for i in NN_1_indices:\n # print((atomsc_fakevac.positions)[i])\n print('NN_1_indices (orig ):',NN_1_indices)\n print('NN_2_indices (orig ):',NN_2_indices)\n #sys.exit()\n atomsc_fakevac.write('dataxx.quippy.xyz',format='quippy',append=True)\n atomsc_fakevac.write('dataxx.poscar',format='vasp',append=True)\n atomsc_fakevac.write('dataxx.ipi',format='ipi',append=True) # works, currently so implemented that it canges cell\n atomsc_fakevac.write('dataxx.xyz',format='xyz',append=True)\n atomsc_fakevac.write('dataxx.extxyz',format='extxyz',append=True)\n atomsc_fakevac.write('dataxx.lammps-data',format='lammps-data',append=True)\n atomsc_fakevac.write('dataxx.lammps-runner',format='lammps-runner',append=True)\n\n atomsc_fakevac_a = ase_read('dataxx.extxyz',format='extxyz') # works, cell ist not changed\n atomsc_fakevac_a.write('dataxx.extxyz2',format='extxyz',append=True) # works, cell is not changed\n\n atomsc_fakevac_b = ase_read('dataxx.xyz',format='xyz') # not working # but this should work\n atomsc_fakevac_b.write('dataxx.xyz2',format='xyz',append=True) # this is working\n\n atomsc_fakevac_c = ase_read('dataxx.ipi',format='ipi') # works, currently so implemented that it canges cell\n #print('ipi cell',atomsc_fakevac_c.get_cell())\n\n atomsc_fakevac_c.write('dataxx.ipi2',format='ipi',append=True) # works, just writes the cell it gests.\n atomsc_fakevac_c.write('dataxx.ipi2_poscar',format='vasp',append=True) # works, just writes the cell it gests.\n NN_1_indices, NN_2_indices = mu.ase_get_neighborlist_1NN_2NN(atomsc_fakevac_c,atomnr=0,cutoffa=nndist,cutoffb=a0,skin=0.1)\n print('NN_1_indices (ipi ):',NN_1_indices)\n print('NN_2_indices (ipi ):',NN_2_indices)\n #print('from ....',(atomsc_fakevac_c.positions)[0])\n #for i in NN_1_indices:\n # print((atomsc_fakevac_c.positions)[i])\n\n atomsc_fakevac_cc = ase_read('dataxx.ipi2_poscar',format='vasp') # works, currently so implemented that it canges cell\n atomsc_fakevac_cc.write('dataxx.ipi2_poscar2',format='vasp',append=True)\n atomsc_fakevac_cc.write('dataxx.ipi2_poscar2_ipi',format='ipi',append=True) # works, just writes the cell it gests.\n #print('ipi cell2 (ext):',atomsc_fakevac_cc.get_cell())\n #print()\n #print('now quippy')\n atomsc_fakevac_d = ase_read('dataxx.quippy.xyz',format='quippy')\n #print('quippy cell (ext)',atomsc_fakevac_d.get_cell())\n atomsc_fakevac_d.write('dataxx.quippy.xyz2',format='quippy',append=True)\n atomsc_fakevac_d.write('dataxx.quippy.xyz2_extxyz',format='extxyz',append=True)\n NN_1_indices, NN_2_indices = mu.ase_get_neighborlist_1NN_2NN(atomsc_fakevac_d,atomnr=0,cutoffa=nndist,cutoffb=a0,skin=0.1)\n print('NN_1_indices (quippy):',NN_1_indices)\n print('NN_2_indices (quippy):',NN_2_indices)\n #print('from ....',(atomsc_fakevac_d.positions)[0])\n #for i in NN_1_indices:\n # print((atomsc_fakevac_d.positions)[i])\n path = \"/home/glensk/kmc/run_michele/Si6Mg6V1.1_/simulation.pos_libatom_2struct.xyz\"\n atomsc_fakevac_e = ase_read(path,format='quippy')\n\n NN_1_indices, NN_2_indices = mu.ase_get_neighborlist_1NN_2NN(atomsc_fakevac_e,atomnr=0,cutoffa=nndist,cutoffb=a0,skin=0.1)\n print('NN_1_indices (kmc ):',NN_1_indices)\n print('NN_2_indices (kmc ):',NN_2_indices)\n sys.exit()\n\n NN_1_indices = mu.ase_get_neighborlist(atomsc_fakevac,atomnr=0,cutoff=nndist,skin=0.1)\n NN_1_2_indices_tmp = mu.ase_get_neighborlist(atomsc_fakevac,atomnr=0,cutoff=a0,skin=0.1)\n print('NN_1_indices :',NN_1_indices)\n NN_2_indices = np.sort(np.array(mu.diff(NN_1_2_indices_tmp,NN_1_indices)))\n print('NN_2_indices :',NN_2_indices)\n NN_1_2_indices = np.concatenate((NN_1_indices, NN_2_indices ))\n print('NN_1_2_indices:',NN_1_2_indices)\n\n\n # fill only 1NN (with one species)\n for i in [ 'Mg', 'Si' ]:\n atomsc_fakevac = mu.get_ase_atoms_object_kmc_al_si_mg_vac(ncell=5,nsi=0,nmg=0,nvac=1,a0=a0,cubic=False,create_fake_vacancy = True,normal_ordering=\"XX_0\")\n mysave_quippy_xyz(atomsc_fakevac,text=\"1NN\")\n for ii in NN_1_indices:\n atomsc_fakevac[ii].symbol = i\n mysave_quippy_xyz(atomsc_fakevac,text=\"1NN\")\n\n # fill only 2NN (with one species)\n for i in [ 'Mg', 'Si' ]:\n atomsc_fakevac = mu.get_ase_atoms_object_kmc_al_si_mg_vac(ncell=5,nsi=0,nmg=0,nvac=1,a0=a0,cubic=False,create_fake_vacancy = True,normal_ordering=\"XX_0\")\n mysave_quippy_xyz(atomsc_fakevac,text=\"2NN\")\n for ii in NN_2_indices:\n atomsc_fakevac[ii].symbol = i\n mysave_quippy_xyz(atomsc_fakevac,text=\"2NN\")\n\n # fill 1NN and 2NN (with one species)\n for i in [ 'Mg', 'Si' ]:\n atomsc_fakevac = mu.get_ase_atoms_object_kmc_al_si_mg_vac(ncell=5,nsi=0,nmg=0,nvac=1,a0=a0,cubic=False,create_fake_vacancy = True,normal_ordering=\"XX_0\")\n mysave_quippy_xyz(atomsc_fakevac,text=\"1and2NN\")\n for ii in NN_1_2_indices:\n atomsc_fakevac[ii].symbol = i\n mysave_quippy_xyz(atomsc_fakevac,text=\"1and2NN\")\n\n # dif compositions in 1NN shell\n filling = [ 2,4,6,8,10]\n for fi in filling:\n atomsc_fakevac = mu.get_ase_atoms_object_kmc_al_si_mg_vac(ncell=5,nsi=0,nmg=0,nvac=1,a0=a0,cubic=False,create_fake_vacancy = True,normal_ordering=\"XX_0\")\n mysave_quippy_xyz(atomsc_fakevac,text=\"1NN_diffcomp\")\n for idx,ii in enumerate(NN_1_indices):\n if idx < fi: ch = \"Mg\"\n else: ch = \"Si\"\n atomsc_fakevac[ii].symbol = ch\n mysave_quippy_xyz(atomsc_fakevac,text=\"1NN_diffcomp\")\n\n\n sys.exit()\n\n #mu.ase_get_known_formats(show=True, add_missing_formats=False, copy_formats=False, verbose=False,show_formatspy=True)\n for i in [ 'Mg', 'Si' ]:\n for ii in [ 0,1,2,3,4,5]:\n atomsc_fakevac = mu.get_ase_atoms_object_kmc_al_si_mg_vac(ncell=5,nsi=0,nmg=0,nvac=1,a0=a0,cubic=False,create_fake_vacancy = True,normal_ordering=i+'_'+str(ii))\n\n\n sys.exit()\n\n\n # show the input variables\n print('--------------------------- check the input --------------------------------')\n print('JOBS (nseeds) ',nseeds,'(defined by -nseeds / or -seeds)')\n print('seeds ',seeds)\n print('nsteps ',nsteps)\n print()\n print('ncell ',ncell,\"(\",atomsc.get_number_of_atoms(),\"atoms )\")\n print('nsi ',nsi, \"(\",pcsi,\"at%)\")\n print('nmg ',nmg,\"(\",pcmg,\"at%)\")\n print('nvac ',nvac,\"(\",pcvac,\"at%)\")\n print('a0 ',a0,\"angstrom\")\n print('temp ',temp,\"K\")\n print()\n print('mypot.pot ',mypot.pot)\n print('mypot.potpath ',mypot.potpath)\n print()\n print('directory ',directory)\n print('submit ',submit)\n print('submitdebug ',submitdebug)\n print()\n print('nodes ',nodes)\n print('ffsocket ',ffsocket)\n #print('python ver ',sys.version_info[0])\n #print()\n print('--------------------------- check the input --------------------------------')\n if submit == True or submitdebug == True:\n mu.get_from_prompt_Yy_orexit(\"Are the ine input variables ok? [y]es: \")\n\n # make the directory\n if os.path.isdir(directory):\n mu.get_from_prompt_Yy_orexit(\"This main directory exists already, shall I add jobs? [y]es: \")\n mu.mkdir(directory)\n\n # create README.md\n IPI_COMMAND = os.environ[\"IPI_COMMAND\"]\n LAMMPS_COMMAND = os.environ[\"LAMMPS_COMMAND\"]\n mu.create_READMEtxt(directory,add=[\"# to start manually (1): python \"+IPI_COMMAND+\" input-runner.xml\",\"# to start manually (2):\"+LAMMPS_COMMAND+\" < in.lmp\"])\n\n for seed in seeds:\n\n # make jobdirectory\n jobdir = directory+'/seed'+str(seed)\n print('jobdir',jobdir)\n if os.path.exists(jobdir):\n sys.exit(\"jobdirectory \"+str(jobdir)+\" already exists!\")\n mu.mkdir(jobdir)\n\n # get data.lmp and data.ipi\n atomsc.write(jobdir+'/data.runnerformat.lmp',format='lammps-runner')\n atomsc_fakevac.write(jobdir+'/data.ipi',format='ipi')\n atomsc_fakevac.write(jobdir+'/data.extxyz',format='extxyz')\n #atomsc_fakevac.write(jobdir+'/data_fakevac.ipi',format='ipi')\n\n if testfiles == True:\n atomsc.write(jobdir+'/data.lmp',format='lammps-data')\n atomsc.write(jobdir+'/data.POSCAR',format='vasp')\n atomsc.write(jobdir+'/data.xyz',format='xyz')\n atomsc.write(jobdir+'/data.extxyz',format='extxyz')\n atomsc.write(jobdir+'/data.espresso-in',format='espresso-in')\n\n # create in.lmp\n ace = mu.ase_calculate_ene(pot=pot,potpath=mypot.potpath,\n units='eV',geopt=False,kmc=True,verbose=verbose)\n address = socket.gethostname()+\"_\"+os.path.basename(jobdir)\n print('address',address)\n ace.pot_get_and_ase_lmp_cmd(kmc=True,temp=temp,nsteps=nsteps,ffsocket=ffsocket,address=address)\n mu.lammps_write_inputfile(folder=jobdir,filename='in.lmp',positions='data.runnerformat.lmp',ace=ace)\n\n # create input-runner.xml (should be made without copying)\n mu.create_ipi_kmc_inputfile(jobdir,filename=\"input-runner.xml\",nsteps=nsteps,stride=100,seed=seed,a0=a0,ncell=ncell,nsi=nsi,nmg=nmg,nvac=nvac,neval=neval,temp=temp,nodes=nodes,address=address,testrun=test,cubic=args.cubic)\n\n # create submit-ipi-kmc.sh (should be made without copying)\n mu.create_submitskript_ipi_kmc(jobdir+\"/submit-ipi-kmc.sh\",nodes,ntasks,\n lmp_par=lmp_par,\n ipi_inst=ipi_inst,\n ffsocket=ffsocket,\n submittime_hours=submittime_hours,\n SBATCH=True)\n\n # create osubmit-ipi-kmc.sh (should be made without copying)\n mu.create_submitskript_ipi_kmc(jobdir+\"/osubmit-ipi-kmc.sh\",nodes,ntasks,\n lmp_par=lmp_par,\n ipi_inst=ipi_inst,\n ffsocket=ffsocket,\n submittime_hours=submittime_hours,\n SBATCH=False)\n\n # submit the job (execute either this or submit-ipi-kmc.sh_all3, not both)\n #mu.submitjob(submit=submit,submitdebug=submitdebug,jobdir=jobdir,submitskript=\"submit-ipi-kmc.sh\")\n\n # get submit-ipi-kmc.sh_all3 (should be made without copying)\n if nseeds == 3:\n mu.create_submitskript_ipi_kmc(directory+\"/submit-ipi-kmc.sh_all3\",nodes,ntasks,\n lmp_par=lmp_par,\n ipi_inst=ipi_inst,\n ffsocket=ffsocket,\n submittime_hours=submittime_hours,\n SBATCH=True,\n LOOPFOLDER=True)\n\n # submit the job (execute either this or submit-ipi-kmc.sh_all3, not both)\n #mu.submitjob(submit=submit,submitdebug=submitdebug,jobdir=directory,submitskript=\"submit-ipi-kmc.sh_all3\")\n if submit == True:\n mu.submitjob(submit_to_que=True,submit_to_debug_que=False,jobdir=directory,submitskript=\"submit-ipi-kmc.sh_all3\")\n\n\n print('done')\n return", "def run(params):\n jobs_config_file = os.path.join(CONFIG_PATH, 'jobs.yaml')\n\n jenkins_config_file = os.path.join(RESOURCE_PATH, 'jobs', 'config')\n\n jobs_path = os.path.join(RESOURCE_PATH, 'jobs')\n\n jobs = list(JobGenerator(jobs_config_file).jobs())\n\n if params.jobs:\n jobs = [job for job in jobs if fnmatch.fnmatch(job.name, params.jobs)]\n yaml_obj = [job.get_object() for job in jobs]\n if params.config:\n yaml_file = open(params.config, 'w')\n yaml_path = params.config\n else:\n yaml_file = tempfile.NamedTemporaryFile(\n prefix='libvirt_ci-jobs-', suffix='.yaml',\n dir=jobs_path, delete=False)\n yaml_path = yaml_file.name\n try:\n yaml.dump(yaml_obj, stream=yaml_file, indent=4,\n default_flow_style=False)\n yaml_file.close()\n\n if params.only_config:\n return\n\n cmd = \"jenkins-jobs\"\n cmd += \" --conf %s\" % jenkins_config_file\n if params.test:\n cmd += \" test\"\n else:\n cmd += \" update\"\n\n cmd += \" -r %s\" % jobs_path\n if params.jobs:\n cmd += \" %s\" % params.jobs\n # Ignore standard output of jenkins-job-builder\n cmd += \" > /dev/null\"\n\n utils.run(cmd, debug=True, ignore_fail=False, timeout=3600)\n finally:\n if params.only_config:\n LOGGER.info('Keep job file %s', yaml_path)\n else:\n try:\n LOGGER.info('Removing job file %s', yaml_path)\n os.remove(yaml_path)\n except (OSError, IOError) as details:\n LOGGER.warning('Failed to remove job file %s: %s',\n yaml_file.name, details)", "def getJob(uniq):\n return Job(Cuebot.getStub('job').GetJob(\n job_pb2.JobGetJobRequest(id=uniq), timeout=Cuebot.Timeout).job)", "def create_mission_and_run_job(\n self,\n name,\n directory,\n station_id,\n lz_id=None,\n description=\"\",\n source_storage_id=None,\n destination_storage_id=None,\n source_path=None,\n destination_path=None,\n mission_type_id=None,\n settings=None,\n ):\n request = CreateMissionRequest(\n name=name,\n description=description,\n source_storage_id=source_storage_id,\n destination_storage_id=destination_storage_id,\n source_path=source_path,\n destination_path=destination_path,\n mission_type_id=mission_type_id,\n settings=settings,\n )\n return self._missions_service.create_mission_and_run_job(\n request=request, directory=directory, station_id=station_id, lz_id=lz_id,\n )", "def create_training_job(TrainingJobName=None, HyperParameters=None, AlgorithmSpecification=None, RoleArn=None, InputDataConfig=None, OutputDataConfig=None, ResourceConfig=None, VpcConfig=None, StoppingCondition=None, Tags=None, EnableNetworkIsolation=None):\n pass", "def create_job_schedule(self):\n job_schedule_create = netapp_utils.zapi\\\n .NaElement.create_node_with_children(\n 'job-schedule-cron-create',\n **{'job-schedule-name': self.name})\n job_schedule_create.add_node_with_children(\n 'job-schedule-cron-minute',\n **{'cron-minute': str(self.job_minutes)})\n try:\n self.server.invoke_successfully(job_schedule_create,\n enable_tunneling=True)\n except netapp_utils.zapi.NaApiError as error:\n self.module.fail_json(msg='Error creating job schedule %s: %s'\n % (self.name, to_native(error)),\n exception=traceback.format_exc())", "def insert_job(sess, filetype, status, type_id, submission, job_id=None, filename=None,\n file_size=None, num_rows=None):\n job = Job(\n file_type_id=filetype,\n job_status_id=status,\n job_type_id=type_id,\n submission_id=submission,\n original_filename=filename,\n file_size=file_size,\n number_of_rows=num_rows\n )\n if job_id:\n job.job_id = job_id\n sess.add(job)\n sess.commit()\n return job", "def build(cfg, jobs, watch):\n libjobs.buildJobs(cfg, jobs, watch)", "def create_compilation_job(CompilationJobName=None, RoleArn=None, InputConfig=None, OutputConfig=None, StoppingCondition=None):\n pass", "def create_job_detail(company_name, job_title, application_deadline, job_listing_url, state, city, application_listed, salary):\n\n job_detail = JobDetail(company_name = company_name, job_title = job_title, application_deadline = application_deadline, job_listing_url = job_listing_url, state = state , city = city, application_listed = application_listed, salary = salary)\n db.session.add(job_detail)\n db.session.commit()\n\n return job_detail", "def add_job(self, data):\n job_id = str(uuid.uuid4()).replace('-', '')\n try:\n self._session.add(JobEntity(\n id=job_id,\n workflow_id=data['workflow_id'],\n name=data['name'],\n username=data['username'],\n work_uri=data['work_uri'],\n no_output_hash=data['no_output_hash'],\n inputs=data['inputs'],\n parameters=data['parameters'],\n output_uri=data['output_uri'],\n final_output=data['final_output'],\n exec_context=data['exec_context'],\n exec_method=data['exec_method'],\n exec_parameters=data['exec_parameters'],\n notifications=data['notifications']\n ))\n except SQLAlchemyError as err:\n Log.an().error('sql exception [%s]', str(err))\n return False\n\n return job_id", "def submit_job(rjc_api_client, publisher, job_request_body):\n response = rjc_api_client.submit(**job_request_body)\n job_id = str(response[\"job_id\"])\n\n log.info(\"submitted job:{0}\".format(job_id))\n message = {\n \"progress\": build_progress_message(running={\"total\": 1, \"job_ids\": [job_id]})\n }\n status_running = 1\n publisher.publish_workflow_status(\n job_request_body[\"workflow_uuid\"], status_running, message=message\n )\n return job_id\n\n # FIXME: Call `job_status = poll_job_status(rjc_api_client, job_id)` instead of\n # checking job success/failure via `jobfinished`/`jobfailed` files in .snakemake?\n # In that case we would probably need to implement our own `_wait_for_jobs` method.", "def make_job(self, script, factors=None):\n job = {'script': script}\n if factors is not None:\n job['factors'] = factors\n return job", "def create_job2(self, params, context=None):\n return self._client.call_method(\n 'UserAndJobState.create_job2',\n [params], self._service_ver, context)", "def test_create(self, client, job, agent_token):\n stage_url = '{base}/stages/teststage'.format(base=job_url_for(job))\n response = client.put(\n stage_url,\n headers={'x_dockci_api_key': agent_token},\n data={'success': 'true'},\n )\n\n assert response.status_code == 200 # TODO 201\n\n response_data = json.loads(response.data.decode())\n assert response_data.pop('success') == True\n\n response = client.get(stage_url)\n response_data = json.loads(response.data.decode())\n assert response_data.pop('success') == True", "def jobs(\n ctx: typer.Context,\n op_id: str = typer.Argument(\n ...,\n autocompletion=completion_op_id,\n callback=check_for_op_id,\n help=\"A valid op-id. e.g. get_markets_prices\",\n ),\n param_string: Optional[str] = typer.Option(\n None,\n \"--param-string\",\n \"-p\",\n help=\"Optional. Full or partial parameters as a json encoded dictionary string. \"\n \"Keys must be valid parameters for selected op_id.\",\n ),\n default_params: bool = typer.Option(\n False,\n \"-d\",\n \"--default-params\",\n help=\"Include all parameters that are required, or have default values. \"\n \"Missing values will be 'NOTSET'.\",\n ),\n callback_path: Optional[Path] = typer.Option(\n None,\n \"-c\",\n \"--callbacks\",\n help=\"Optional. Path to custom callbacks to be used. \",\n ),\n file_name: str = typer.Option(\n \"created-jobs/${esi_job_op_id}-${esi_job_uid}\",\n \"-n\",\n \"--file-name\",\n help=(\n \"File name for the new job, must be unique if multiple jobs. \"\n \"Can include directories, \"\n \"and the file type suffix will be added based on --format-id.\"\n ),\n ),\n data_path: Optional[Path] = typer.Option(\n None,\n \"--data-file\",\n \"-i\",\n help=(\n \"Optional. Path to json, csv, or yaml file with full or partial parameters. \"\n \"Must result in a list of dicts.\"\n ),\n ),\n format_id: FormatChoices = typer.Option(\n FormatChoices.json,\n \"-f\",\n \"--format-id\",\n show_choices=True,\n help=\"Output file format.\",\n ),\n path_out: Path = typer.Argument(\n \"./tmp\",\n help=\"Parent path for saving the new jobs, will be prepended to --file-name.\",\n ),\n):\n operation_manifest: OperationManifest = ctx.obj[\"operation_manifest\"]\n # path_out = optional_object(path_out, Path, \".\")\n if path_out.is_file:\n typer.BadParameter(\"path_out must not be a file.\")\n file_data: Optional[List[Dict]] = get_params_from_file(data_path)\n parameters: Dict = decode_param_string(param_string)\n if callback_path is None:\n callback_collection = default_callback_collection()\n else:\n callback_collection = load_callbacks(callback_path)\n jobs_: List[EsiJob] = []\n try:\n op_info = operation_manifest.op_info(op_id)\n if not file_data:\n job = op_info.create_job(\n parameters,\n callback_collection,\n include_default_params=default_params,\n # only_required_default_params=False,\n # allow_notset=False,\n )\n jobs_.append(job)\n else:\n for params in file_data:\n params.update(parameters)\n job = op_info.create_job(\n params,\n callback_collection,\n include_default_params=default_params,\n # only_required_default_params=False,\n # allow_notset=False,\n )\n jobs_.append(job)\n except Exception as ex:\n raise typer.BadParameter(\n f\"Exception creating job. {ex.__class__.__name__}: {ex}\"\n )\n for job in jobs_:\n file_path = resolve_job_file_path(job, file_name, path_out)\n try:\n save_path = job.serialize_file(file_path, format_id)\n except Exception as ex:\n raise typer.BadParameter(\n f\"Error saving job to {save_path}. {ex.__class__.__name__}, {ex}\"\n )\n logger.info(\"Saved job %s at %s\", job.uid, file_path)\n typer.echo(f\"{len(jobs_)} jobs saved to {path_out}\")\n report_finished_task(ctx)", "def createJob(self, executable ):\n thisfileFolderPath = os.path.dirname(inspect.getfile( inspect.currentframe() ))\n inp = open( os.path.join(thisfileFolderPath,\"job.oar.tpl\"), 'r')\n t = Template(inp.read())\n \n s = t.substitute(executable=executable)\n \n completePath = os.path.join(self.oarRunFolder,\"job.oar.sh\")\n outp = open(completePath, 'w')\n outp.write(s)\n outp.close()\n \n os.system('chmod +x ' + completePath)\n print \"OAR: created job file: \", completePath\n return completePath", "def create_project(name=None, defaultJobTimeoutMinutes=None):\n pass", "def update_job(self, job):\n call = subprocess.Popen(self.cli + [PlatformJenkinsJavaCLI.UPDATE_JOB, job.name], stdin=subprocess.PIPE)\n call.communicate(input=platform_ci.jjb.get_job_as_xml(job, self.template_dir))\n call.wait()\n if call.returncode != 0:\n raise PlatformJenkinsException(\"Updating job failed: \" + job.name)", "def create(\n cls,\n func: FunctionReferenceType,\n args: Union[List[Any], Optional[Tuple]] = None,\n kwargs: Optional[Dict[str, Any]] = None,\n connection: Optional['Redis'] = None,\n result_ttl: Optional[int] = None,\n ttl: Optional[int] = None,\n status: Optional[JobStatus] = None,\n description: Optional[str] = None,\n depends_on: Optional[JobDependencyType] = None,\n timeout: Optional[int] = None,\n id: Optional[str] = None,\n origin: str = '',\n meta: Optional[Dict[str, Any]] = None,\n failure_ttl: Optional[int] = None,\n serializer=None,\n *,\n on_success: Optional[Union['Callback', Callable[..., Any]]] = None, # Callable is deprecated\n on_failure: Optional[Union['Callback', Callable[..., Any]]] = None, # Callable is deprecated\n on_stopped: Optional[Union['Callback', Callable[..., Any]]] = None, # Callable is deprecated\n ) -> 'Job':\n if args is None:\n args = ()\n if kwargs is None:\n kwargs = {}\n\n if not isinstance(args, (tuple, list)):\n raise TypeError('{0!r} is not a valid args list'.format(args))\n if not isinstance(kwargs, dict):\n raise TypeError('{0!r} is not a valid kwargs dict'.format(kwargs))\n\n job = cls(connection=connection, serializer=serializer)\n if id is not None:\n job.set_id(id)\n\n if origin:\n job.origin = origin\n\n # Set the core job tuple properties\n job._instance = None\n if inspect.ismethod(func):\n job._instance = func.__self__\n job._func_name = func.__name__\n elif inspect.isfunction(func) or inspect.isbuiltin(func):\n job._func_name = '{0}.{1}'.format(func.__module__, func.__qualname__)\n elif isinstance(func, str):\n job._func_name = as_text(func)\n elif not inspect.isclass(func) and hasattr(func, '__call__'): # a callable class instance\n job._instance = func\n job._func_name = '__call__'\n else:\n raise TypeError('Expected a callable or a string, but got: {0}'.format(func))\n job._args = args\n job._kwargs = kwargs\n\n if on_success:\n if not isinstance(on_success, Callback):\n warnings.warn(\n 'Passing a string or function for `on_success` is deprecated, pass `Callback` instead',\n DeprecationWarning,\n )\n on_success = Callback(on_success) # backward compatibility\n job._success_callback_name = on_success.name\n job._success_callback_timeout = on_success.timeout\n\n if on_failure:\n if not isinstance(on_failure, Callback):\n warnings.warn(\n 'Passing a string or function for `on_failure` is deprecated, pass `Callback` instead',\n DeprecationWarning,\n )\n on_failure = Callback(on_failure) # backward compatibility\n job._failure_callback_name = on_failure.name\n job._failure_callback_timeout = on_failure.timeout\n\n if on_stopped:\n if not isinstance(on_stopped, Callback):\n warnings.warn(\n 'Passing a string or function for `on_stopped` is deprecated, pass `Callback` instead',\n DeprecationWarning,\n )\n on_stopped = Callback(on_stopped) # backward compatibility\n job._stopped_callback_name = on_stopped.name\n job._stopped_callback_timeout = on_stopped.timeout\n\n # Extra meta data\n job.description = description or job.get_call_string()\n job.result_ttl = parse_timeout(result_ttl)\n job.failure_ttl = parse_timeout(failure_ttl)\n job.ttl = parse_timeout(ttl)\n job.timeout = parse_timeout(timeout)\n job._status = status\n job.meta = meta or {}\n\n # dependency could be job instance or id, or iterable thereof\n if depends_on is not None:\n depends_on = ensure_list(depends_on)\n depends_on_list = []\n for depends_on_item in depends_on:\n if isinstance(depends_on_item, Dependency):\n # If a Dependency has enqueue_at_front or allow_failure set to True, these behaviors are used for\n # all dependencies.\n job.enqueue_at_front = job.enqueue_at_front or depends_on_item.enqueue_at_front\n job.allow_dependency_failures = job.allow_dependency_failures or depends_on_item.allow_failure\n depends_on_list.extend(depends_on_item.dependencies)\n else:\n depends_on_list.extend(ensure_list(depends_on_item))\n job._dependency_ids = [dep.id if isinstance(dep, Job) else dep for dep in depends_on_list]\n\n return job", "def launch_job(self,\n job_id: Text,\n project: Text,\n training_input: Dict[Text, Any],\n job_labels: Optional[Dict[Text, Text]] = None) -> None:\n\n parent = 'projects/{}'.format(project)\n job_spec = self._create_job_spec(job_id, training_input, job_labels)\n\n # Submit job to AIP Training\n logging.info('TrainingInput=%s', training_input)\n logging.info('Submitting job=\\'%s\\', project=\\'%s\\' to AI Platform.',\n job_id, parent)\n request = self._client.projects().jobs().create(\n body=job_spec, parent=parent)\n self._job_name = '{}/jobs/{}'.format(parent, job_id)\n request.execute()", "def _create_job_spec(\n self,\n job_id: Text,\n training_input: Dict[Text, Any],\n job_labels: Optional[Dict[Text, Text]] = None) -> Dict[Text, Any]:\n\n job_spec = {\n 'display_name': job_id,\n 'job_spec': training_input,\n 'labels': job_labels,\n }\n return job_spec", "def build_job(self, name, parameters=None, build_timeout=30, name_suffix=''):\n queue_item_id = self._server.build_job(name, parameters)\n logger.debug(\n 'Triggered build with queue item {queue_item}'.format(queue_item=queue_item_id))\n build_executable = wait_for(\n lambda: self._server.get_queue_item(queue_item_id).get('executable'),\n timeout=5,\n poll_interval=0.1)\n build_number = build_executable['number']\n logger.debug(\n \"Build of '{job}/{build_number}' started\".format(job=name, build_number=build_number))\n\n def get_build_info():\n build_info = self._server.get_build_info(name, build_number)\n if build_info.get('result'):\n return build_info\n else:\n return False\n\n build_info = wait_for(get_build_info, timeout=build_timeout)\n logger.debug(\n \"Build of '{job}/{build_number}' completed'\".format(\n job=name, build_number=build_number))\n\n def get_console_log():\n console_log = self._server.get_build_console_output(name, build_number)\n if 'Finished: {result}'.format(result=build_info['result']) in console_log:\n return console_log\n else:\n return False\n\n console_log = wait_for(get_console_log, timeout=10, poll_interval=1.0)\n\n # Get build info again to make sure everything including artifacts are included\n build_info = self._server.get_build_info(name, build_number)\n\n for line in console_log.split('\\n'):\n logger.debug(\n '{name}{name_suffix}: {line}'.format(name=name, name_suffix=name_suffix, line=line))\n return BuildInfo(\n build_number, build_info['result'], console_log,\n [artifact['relativePath'] for artifact in build_info['artifacts']])", "def submit_job(job_path, depends=None):\n\n # tell sbatch to work in dir containing script\n job_dir = os.path.dirname(job_path)\n cmd = ['sbatch', '--workdir='+job_dir]\n\n # add dependency if any\n if depends is not None:\n cmd.append('--dependency=afterok:'+depends)\n\n # run sbatch command\n cmd.append(job_path)\n out = subprocess.check_output(cmd, universal_newlines=True)\n\n # return job id\n job_id = out.rstrip('\\n').split(' ')[-1]\n return job_id", "def create(cls, process, *args, **kwargs):\r\n job = cls(process=process, *args, **kwargs)\r\n job.save()\r\n ret_tasks = []\r\n if job.status != 'finished':\r\n tasks = Task.objects.filter(is_active=True, process=process)\r\n ret_tasks = [JobTask.create(job, t) for t in tasks]\r\n return job, ret_tasks", "def __create_jobs_bin__(self):\n # | - __create_jobs_bin__\n folder_dir = os.path.join(self.root_dir, self.working_dir, \"jobs_bin\")\n # folder_dir = self.root_dir + \"/jobs_bin\"\n\n if not os.path.exists(folder_dir):\n # print(\"KDJFDI__\")\n # print(folder_dir)\n os.makedirs(folder_dir)\n # __|", "def start(self):\n url = \"/jobs.start\"\n payload = {\n \"id\": self.id\n }\n response = self.engine.request(\"POST\", url, json=payload)\n if response.status_code != 200:\n raise EngineError(\"Failed to upload the job payload. ({} - {})\".format(response.status_code, response.text[:100]))\n\n d = response.json()\n if d['ok'] is False:\n raise EngineError(\"Failed to upload the job payload. ({})\".format(d['error']))\n\n self._update(d['job'])", "def create_dummy_build_result():\n\n date_time = datetime.utcnow()\n return BuildResults.create(\n job_name=\"my_jobname\",\n job_link=\"my_joburl\",\n build_date_time=str(date_time),\n build_id=\"1234\",\n platform=\"Linux-x86_64\",\n product=\"MyProduct\",\n )", "def _create_job_spec(\n self,\n job_id: Text,\n training_input: Dict[Text, Any],\n job_labels: Optional[Dict[Text, Text]] = None) -> Dict[Text, Any]:\n pass", "def create_job_id() -> str:\n return str(uuid.uuid1())", "async def create_pipeline_job(\n self,\n request: pipeline_service.CreatePipelineJobRequest = None,\n *,\n parent: str = None,\n pipeline_job: gca_pipeline_job.PipelineJob = None,\n pipeline_job_id: str = None,\n retry: retries.Retry = gapic_v1.method.DEFAULT,\n timeout: float = None,\n metadata: Sequence[Tuple[str, str]] = (),\n ) -> gca_pipeline_job.PipelineJob:\n # Create or coerce a protobuf request object.\n # Sanity check: If we got a request object, we should *not* have\n # gotten any keyword arguments that map to the request.\n has_flattened_params = any([parent, pipeline_job, pipeline_job_id])\n if request is not None and has_flattened_params:\n raise ValueError(\n \"If the `request` argument is set, then none of \"\n \"the individual field arguments should be set.\"\n )\n\n request = pipeline_service.CreatePipelineJobRequest(request)\n\n # If we have keyword arguments corresponding to fields on the\n # request, apply these.\n if parent is not None:\n request.parent = parent\n if pipeline_job is not None:\n request.pipeline_job = pipeline_job\n if pipeline_job_id is not None:\n request.pipeline_job_id = pipeline_job_id\n\n # Wrap the RPC method; this adds retry and timeout information,\n # and friendly error handling.\n rpc = gapic_v1.method_async.wrap_method(\n self._client._transport.create_pipeline_job,\n default_timeout=None,\n client_info=DEFAULT_CLIENT_INFO,\n )\n\n # Certain fields should be provided within the metadata header;\n # add these here.\n metadata = tuple(metadata) + (\n gapic_v1.routing_header.to_grpc_metadata(((\"parent\", request.parent),)),\n )\n\n # Send the request.\n response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)\n\n # Done; return the response.\n return response", "def _launch_job(self, job):\n details = self.sm.get_job_details(job.jobId)\n handler = self.handlers[details[0]['method']]\n type = details[0]['type']\n resultId = details[0]['resultid']\n job.set_phase('EXECUTING')\n job.set_start_time(datetime.utcnow().isoformat())\n job.add_result(resultId, 'http://localhost:8000/%s/%s/results/details' % (type, job.jobId))\n self.sm.update_job(job = job)\n self.threads.append(Future(handler, job.jobId, job))", "def _create_job_spec(\n self,\n job_id: Text,\n training_input: Dict[Text, Any],\n job_labels: Optional[Dict[Text, Text]] = None) -> Dict[Text, Any]:\n\n job_spec = {\n 'jobId': job_id,\n 'trainingInput': training_input,\n 'labels': job_labels,\n }\n return job_spec", "def create_template(issue, cpu_count, memory, work_dir, cmd):\n # Prepare SLURM shell script contents\n template = \"#!/bin/bash\\n\" \\\n \"#SBATCH -N 1\\n\" \\\n \"#SBATCH --ntasks={cpu_count}\\n\" \\\n \"#SBATCH --mem={memory}\\n\" \\\n \"#SBATCH --time=1-00:00\\n\" \\\n \"#SBATCH --job-name={jobid}\\n\" \\\n \"#SBATCH -o {work_dir}/job_%j.out\\n\" \\\n \"#SBATCH -e {work_dir}/job_%j.err\\n\" \\\n \"source /mnt/nas2/redmine/applications/.virtualenvs/OLCRedmineAutomator/bin/activate\\n\" \\\n \"{cmd}\".format(cpu_count=cpu_count,\n memory=memory,\n jobid=issue.id,\n work_dir=work_dir,\n cmd=cmd)\n\n # Path to SLURM shell script\n file_path = os.path.join(BIO_REQUESTS_DIR, str(issue.id), str(issue.id) + '_slurm.sh')\n\n # Write SLURM job to shell script\n with open(file_path, 'w+') as file:\n file.write(template)\n\n make_executable(file_path)\n\n return file_path", "def spawn_update_job(ip_address, headers, job_payload):\n job_id = -1\n job_url = 'https://%s/api/JobService/Jobs' % ip_address\n job_resp = requests.post(job_url, headers=headers,\n json=job_payload,\n verify=False)\n if job_resp.status_code == 201:\n job_id = (job_resp.json())['Id']\n print(\"Successfully spawned update job\", job_id)\n else:\n print(\"Unable to spawn update job .. Exiting\")\n return job_id", "def save_job(self, job):\n data = {\n 'class': 'Job',\n 'id': job.id,\n 'attrs': {\n 'type': job.type,\n 'task': job.task,\n 'command': job.command,\n 'status': job.status.value,\n 'runner': job.runner_id,\n },\n }\n self.db_client.send_request('update', json.dumps(data))", "def test_py_job_create(self):\n test_app = self._create_py_app()\n class_path = \"example_jobs.word_count.WordCountSparkSessionJob\"\n conf = \"input.strings = ['a', 'b', 'a', 'b']\"\n job = self._create_job(test_app, class_path, conf,\n ctx=self._get_functional_py_context())\n time.sleep(3)\n self.assertTrue(len(job.jobId) > 0)\n self.assertTrue(job.status == \"STARTED\")\n self._wait_till_job_is_done(job)", "def create_recurring_run(\n self,\n experiment_id: str,\n job_name: str,\n description: Optional[str] = None,\n start_time: Optional[str] = None,\n end_time: Optional[str] = None,\n interval_second: Optional[int] = None,\n cron_expression: Optional[str] = None,\n max_concurrency: Optional[int] = 1,\n no_catchup: Optional[bool] = None,\n params: Optional[dict] = None,\n pipeline_package_path: Optional[str] = None,\n pipeline_id: Optional[str] = None,\n version_id: Optional[str] = None,\n enabled: bool = True,\n enable_caching: Optional[bool] = None,\n service_account: Optional[str] = None,\n ) -> kfp_server_api.V1Job:\n\n job_config = self._create_job_config(\n experiment_id=experiment_id,\n params=params,\n pipeline_package_path=pipeline_package_path,\n pipeline_id=pipeline_id,\n version_id=version_id,\n enable_caching=enable_caching,\n )\n\n if all([interval_second, cron_expression\n ]) or not any([interval_second, cron_expression]):\n raise ValueError(\n 'Either interval_second or cron_expression is required')\n if interval_second is not None:\n trigger = kfp_server_api.models.V1Trigger(\n periodic_schedule=kfp_server_api.models.V1PeriodicSchedule(\n start_time=start_time,\n end_time=end_time,\n interval_second=interval_second))\n if cron_expression is not None:\n trigger = kfp_server_api.models.V1Trigger(\n cron_schedule=kfp_server_api.models.V1CronSchedule(\n start_time=start_time,\n end_time=end_time,\n cron=cron_expression))\n\n job_body = kfp_server_api.models.V1Job(\n enabled=enabled,\n pipeline_spec=job_config.spec,\n resource_references=job_config.resource_references,\n name=job_name,\n description=description,\n no_catchup=no_catchup,\n trigger=trigger,\n max_concurrency=max_concurrency,\n service_account=service_account)\n return self._job_api.create_job(body=job_body)", "def create_job(self,\n model_id: str,\n geolevel: Optional[str] = None,\n response_format: str = 'csv',\n sites: Optional[List[Tuple[float, float, str]]] = None,\n portfolio_id: Optional[str] = None,\n market_id: Optional[str] = None,\n geoid_list: Optional[List[int]] = None,\n buffers: Optional[List[str]] = None) -> None:\n assert isinstance(model_id, str), f'model_id must be str (was {model_id})'\n assert portfolio_id is None or isinstance(portfolio_id, str), \\\n f'portfolio_id must be str (was {portfolio_id})'\n assert market_id is None or isinstance(market_id, str), \\\n f'market_id must be str (was {market_id})'\n assert geolevel is None or isinstance(geolevel, str), \\\n f'geolevel must be str (was {geolevel})'\n assert response_format in {'csv', 'json'}\n assert geoid_list is None or isinstance(geoid_list, list)\n assert geoid_list is None or all(isinstance(geoid, int) for geoid in geoid_list)\n assert buffers is None or isinstance(buffers, list)\n assert buffers is None or all(buffer in cc.BUFFERS_TUPLE for buffer in buffers), \\\n f'Invalid buffers: {buffers} (must be from {cc.BUFFERS_TUPLE})'\n assert sites is None or isinstance(sites, list)\n assert sites is None or all(isinstance(site, tuple) for site in sites)\n\n # Must either be a geolevel, geoid_list combo, or a portfolio_id, or market_id\n if not geolevel and not any([portfolio_id, market_id]):\n raise ValueError('Job requires one of \"geolevel\", \"portfolio_id\", or \"market_id\"')\n\n if geolevel:\n assert geolevel in {\n 'US', 'METRO', 'GEOID2', 'GEOID5', 'ZIP', 'GEOID11', 'site-addresses'}, \\\n '\"geolevel\" must be one of \"US\", \"METRO\", \"GEOID2\", \"GEOID5\", \"ZIP\", \"GEOID11\"'\n assert portfolio_id is None, 'Cannot have both \"geolevel\" and \"portfolio_id\"'\n assert market_id is None, 'Cannot have both \"geolevel\" and \"market_id\"'\n\n if geoid_list is None:\n geoid_list = []\n\n if buffers is None:\n buffers = []\n\n if sites is None:\n sites = []\n\n if self._logging:\n print('Sending create job request to API service')\n\n r = requests.post(\n f'https://{cc.ROUTE_PREFIX}.stratodem.com/jobs/create',\n headers=dict(\n Authorization=f'Bearer {get_api_token()}',\n ),\n json=dict(\n model_id=model_id,\n portfolio_id=portfolio_id,\n market_id=market_id,\n geolevel=geolevel,\n response_format=response_format,\n geoid_list=geoid_list,\n buffers=buffers,\n sites=sites,\n )\n )\n\n res = r.json()\n if res['success']:\n self._job_id = res['message']['job_id']\n self._response_format = response_format\n\n if self._logging:\n print('Successfully created job request')\n else:\n raise APIQueryFailedException(res['message'])", "def getJob(workload):\n job = Job()\n job[\"task\"] = workload.getTask(\"reco\").getPathName()\n job[\"workflow\"] = workload.name()\n job[\"location\"] = \"T1_US_FNAL\"\n job[\"owner\"] = \"evansde77\"\n job[\"group\"] = \"DMWM\"\n return job", "def __init__(self, job: str=None): # noqa: E501\n self.swagger_types = {\n 'job': str\n }\n\n self.attribute_map = {\n 'job': 'job'\n }\n self._job = job", "def get_job(\n self, job_id: Union[str, int], *, params: Optional[dict] = None\n ) -> \"resource_types.Job\":\n\n return communicator.Job(self.__requester).from_id(\n job_id=job_id, parameters=params\n )", "def job(username, root_wf_id, wf_id, job_id, job_instance_id):\n dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id)\n job = dashboard.get_job_information(wf_id, job_id, job_instance_id)\n job_states = dashboard.get_job_states(wf_id, job_id, job_instance_id)\n job_instances = dashboard.get_job_instances(wf_id, job_id)\n\n previous = None\n\n for state in job_states:\n timestamp = state.timestamp\n state.timestamp = datetime.fromtimestamp(state.timestamp).strftime('%a %b %d, %Y %I:%M:%S %p')\n\n if previous is None:\n state.interval = 0.0\n else:\n state.interval = timestamp - previous\n\n previous = timestamp\n\n if not job:\n return 'Bad Request', 400\n\n return render_template('workflow/job/job_details.html', root_wf_id=root_wf_id, wf_id=wf_id, job_id=job_id, job=job,\n job_instances=job_instances, job_states=job_states)", "def add_job(self, jobInfo, resultId, method, run = False):\n job = Job()\n job.set_job_info(jobInfo)\n jobid = self._get_job_id()\n job.set_job_id(jobid)\n if run:\n job.set_phase('QUEUED')\n else:\n job.set_phase('PENDING')\n self.sm.register_job(job.tostring(), job.jobId, phase = job.phase, resultid = resultId, method = method)\n if run:\n self._launch_job(job)\n return jobid", "def setup_classic_job(self, create_job_path=True, upload_id=None):\n upload = self.setup_upload(upload_id)\n oqp = OqParams()\n oqp.job_type = \"classical\"\n oqp.upload = upload\n oqp.region_grid_spacing = 0.01\n oqp.min_magnitude = 5.0\n oqp.investigation_time = 50.0\n oqp.component = \"gmroti50\"\n oqp.imt = \"pga\"\n oqp.truncation_type = \"twosided\"\n oqp.truncation_level = 3\n oqp.reference_vs30_value = 760\n oqp.imls = [\n 0.005, 0.007, 0.0098, 0.0137, 0.0192, 0.0269, 0.0376, 0.0527,\n 0.0738, 0.103, 0.145, 0.203, 0.284, 0.397, 0.556, 0.778]\n oqp.poes = [0.01, 0.10]\n oqp.realizations = 1\n from django.contrib.gis import geos\n oqp.region = geos.Polygon(\n ((-122.2, 38.0), (-121.7, 38.0), (-121.7, 37.5),\n (-122.2, 37.5), (-122.2, 38.0)))\n oqp.save()\n job = OqJob(oq_params=oqp, owner=upload.owner, job_type=\"classical\")\n job.save()\n if create_job_path:\n job.path = os.path.join(upload.path, str(job.id))\n os.mkdir(job.path)\n os.chmod(job.path, 0777)\n job.save()\n return job", "def createJobs():\n jobs_list = []\n for job in raw_jobs:\n cur_job = Job(int(job[0]), int(job[1]), int(job[2]))\n print(\"Created job: index:\", cur_job.number, \"Length:\", cur_job.length, \"Type\", cur_job.type, file=debug_file)\n jobs_list.append(cur_job)\n print(\"-----------------FINISHED CREATING JOB OBJECTS----------------------\\n\\n\", file=debug_file)\n return jobs_list", "def get_job(self) -> CustomJob:\n return self._client.get_custom_job(name=self._job_name)", "def add_job(state, county, start, end, categories, status=\"submitted\"):\n jid = _generate_jid()\n job_dict = _instantiate_job(jid, status, state, county, start, end, categories)\n _save_job(_generate_job_key(jid), job_dict)\n _queue_job(jid)\n return job_dict", "def add_job():\n content = request.get_json()\n print(content)\n DB.insert(collection='jobs', data=content)\n return ('', 204)", "def _create_jobstep(self, phase, phase_cmd, phase_path, weight, test_list, shard_count=1, force_create=False,\n cluster=None):\n test_names = ' '.join(test_list)\n label = md5(test_names).hexdigest()\n\n where = {\n 'job': phase.job,\n 'project': phase.project,\n 'phase': phase,\n 'label': label,\n }\n if force_create:\n # uuid is unique so forces JobStep to be created\n where['id'] = uuid.uuid4()\n\n step, created = get_or_create(JobStep, where=where, defaults={\n 'data': {\n 'cmd': phase_cmd,\n 'path': phase_path,\n 'tests': test_list,\n 'expanded': True,\n 'shard_count': shard_count,\n 'job_name': self.job_name,\n 'build_no': None,\n 'weight': weight,\n },\n 'status': Status.queued,\n 'cluster': cluster,\n })\n assert created or not force_create\n BuildStep.handle_debug_infra_failures(step, self.debug_config, 'expanded')\n db.session.add(step)\n return step", "def submit_job(configuration, repo_ref, ci_submit_url=None, ci_submit_token=None, **kwargs):\n if 'BUILD_RECIPE' not in configuration['variables']:\n return\n if not ci_submit_url:\n ci_submit_url = _get_url_from_env_vars('trigger')\n\n if not ci_submit_token:\n ci_submit_token = os.getenv('TRIGGER_TOKEN')\n if not ci_submit_token:\n raise ValueError(\"Did not get value for TRIGGER_TOKEN. \"\n \"You must provide ci_submit_url arg if not \"\n \"running under a gitlab ci build. Also, you must\"\n \"set the TRIGGER_TOKEN secret environment \"\n \"variable for your project.\")\n configuration.update({\n 'token': ci_submit_token,\n 'ref': repo_ref,\n })\n\n response = requests.post(ci_submit_url, json=configuration)\n assert response.ok, \"Failed to submit job. Error message was: %s\" % response.text\n return response.json()['id']", "def flow_job(self, name=None, params=None):\n # Note: Use -B to avoid permission problems with .pyc files created from commandline test\n if self.func_name:\n script = \"export PYTHONPATH=\" + test_tmp_dir + \"\\n\"\n script += test_cfg.skip_job_load_sh_export_str() + \"\\n\"\n # Supply dummy args for the py.test fixtures\n dummy_args = ','.join(['0' for _ in range(self.func_num_params)])\n script += \"python -Bc &quot;from jenkinsflow.test.\" + self.file_name.replace('.py', '') + \" import *; test_\" + self.func_name + \"(\" + dummy_args + \")&quot;\"\n else:\n script = \"python -B \" + jp(pseudo_install_dir, 'demo', self.file_name)\n name = '0flow_' + name if name else '0flow'\n self._jenkins_job(name, exec_time=0.5, params=params, script=script)\n return (self.job_name_prefix or '') + name", "def _create(cls, builder_name, bucket=None, properties=None,\n buildbot_changes=None, tags=None, critical=None):\n if not isinstance(buildbot_changes, (types.NoneType, list)):\n raise ValueError('buildbot_changes must be a list')\n\n return cls(\n bucket=bucket,\n builder_name=builder_name,\n properties=properties,\n buildbot_changes=buildbot_changes,\n tags=tags,\n critical=bool(critical) if critical is not None else (True),\n )", "def findJob(name):\n return Job(Cuebot.getStub('job').FindJob(\n job_pb2.JobFindJobRequest(name=name), timeout=Cuebot.Timeout).job)", "def from_training_job(cls, job: TrainingJob) -> Job:\n return Job(\n job.hyperparameters[\"model\"],\n job.hyperparameters[\"dataset\"],\n _extract_configuration(job),\n _extract_performance(job),\n source_job=job,\n )", "def insert_job(self, process_id, camera, start, logname, version='1.0'):\n\n camera = self.insert_camera(camera)\n\n job = Job(\n process_id=process_id,\n camera_id=camera,\n start=start,\n logname=logname,\n version=version\n )\n job.save()\n\n return job", "def submit_slurm_job(redmine_instance, issue, work_dir, cmd, job_type, cpu_count=8, memory=12000):\n # Set status of issue to In Progress\n redmine_instance.issue.update(resource_id=issue.id,\n status_id=2,\n notes='Your {} job has been submitted to the OLC Slurm cluster.'.format(\n job_type.upper()))\n\n logging.info('Updated job status for {} to In Progress'.format(issue.id))\n\n # Create shell script\n slurm_template = create_template(issue=issue, cpu_count=cpu_count, memory=memory, work_dir=work_dir, cmd=cmd)\n\n # Submit job to slurm\n logging.info('Submitting job {} to Slurm'.format(issue.id))\n os.system('sbatch ' + slurm_template)\n logging.info('Output for {} is available in {}'.format(issue.id, work_dir))", "def launch_job(self,\n job_id: Text,\n project: Text,\n training_input: Dict[Text, Any],\n job_labels: Optional[Dict[Text, Text]] = None) -> None:\n\n parent = 'projects/{project}/locations/{location}'.format(\n project=project, location=self._region)\n\n job_spec = self._create_job_spec(job_id, training_input, job_labels)\n\n # Submit job to AIP Training\n logging.info('TrainingInput=%s', training_input)\n logging.info('Submitting custom job=\\'%s\\', project=\\'%s\\''\n ' to AI Platform (Unified).', job_id, parent)\n response = self._client.create_custom_job(parent=parent,\n custom_job=job_spec)\n self._job_name = response.name", "def do_add_job(self, args):\n\n if self.hostname_list is None:\n print(\"Error, no machines defined. Use the commnad 'machines'\")\n return\n if self.plugin is None:\n print(\"Error, no plugins defined. Use the 'plugin set' command\")\n return\n\n print(\"***********************************\")\n print(\"********* Job information *********\")\n print(\"***********************************\")\n print(\"Plugin: \", self.plugin.LEET_PG_NAME)\n param = self.plugin.get_plugin_parameters()\n if param is not None:\n print(\"\\tParameters:\")\n for name, value in param.items():\n print(\"\\t\", name, \"=\", value)\n print(\"***********************************\")\n print(\"Amount of machines: \", len(self.hostname_list))\n print(\"Machine list: \", \",\".join(self.hostname_list))\n print(\"***********************************\")\n print(\"The job(s) will be sent for processing.\")\n confirm = input(\"Confirm? (y/n) \")\n if confirm.strip().lower() == \"y\":\n self._leet.schedule_jobs(self.plugin, self.hostname_list)\n print(\"Job scheduled. Cleaning parameters.\")\n self.hostname_list = None\n self.plugin = None\n else:\n print(\"Job cancelled.\")", "def _register_job(self, job, client):\n new_job = copy.deepcopy(job)\n new_job['client_id'] = client['id']\n\n return int(self.create_object(self.JOB_ENDPOINT, new_job))" ]
[ "0.7520048", "0.7107826", "0.7052509", "0.70495903", "0.7000261", "0.6887487", "0.68790656", "0.6827953", "0.6707598", "0.66782993", "0.6650939", "0.6482045", "0.6402906", "0.63344806", "0.6322465", "0.62909174", "0.6267784", "0.62481385", "0.6242675", "0.6240722", "0.62374854", "0.62191284", "0.6191502", "0.6134481", "0.61327434", "0.608689", "0.6004062", "0.592998", "0.5928104", "0.5915662", "0.59147674", "0.5881377", "0.5840418", "0.58124965", "0.57783353", "0.57725126", "0.57663435", "0.5750881", "0.573023", "0.5672515", "0.5659271", "0.5657142", "0.56433296", "0.56405723", "0.5627455", "0.56093276", "0.5607419", "0.560394", "0.560374", "0.5602201", "0.55818737", "0.55454504", "0.5543123", "0.55356413", "0.55333275", "0.55318844", "0.55088633", "0.5505038", "0.5501343", "0.5491168", "0.5466163", "0.5456733", "0.54533815", "0.5452962", "0.5448905", "0.5443788", "0.544157", "0.54260164", "0.5413926", "0.53922755", "0.53884244", "0.53783566", "0.5364843", "0.5342717", "0.5338266", "0.53294903", "0.53281516", "0.5328092", "0.5313313", "0.52990884", "0.5289232", "0.528801", "0.52822495", "0.528196", "0.52762175", "0.526974", "0.5268842", "0.5266522", "0.5260017", "0.5257366", "0.5256171", "0.524933", "0.52433294", "0.5242251", "0.5242115", "0.5238621", "0.5225548", "0.5191459", "0.518822", "0.51860344" ]
0.82524973
0
Update a given job on Jenkins.
def update_job(self, job): call = subprocess.Popen(self.cli + [PlatformJenkinsJavaCLI.UPDATE_JOB, job.name], stdin=subprocess.PIPE) call.communicate(input=platform_ci.jjb.get_job_as_xml(job, self.template_dir)) call.wait() if call.returncode != 0: raise PlatformJenkinsException("Updating job failed: " + job.name)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update(cfg, jobs):\n server = jenkins_utils.server_factory(cfg)\n libjobs.updateJobs(server, jobs)", "def update(self, job_name, param_name, value, description=None):\n if job_name in self._jobs:\n getattr(self._jobs[job_name], param_name).update(value, description)\n else:\n self.log.error(\"Invalid job name: %s\", job_name)", "def update(self):\n self._log.debug(\"About to update job {0}\".format(self.id))\n resp = self._api.get_job(self.id)\n\n if resp.success:\n self.submission = self._format_submission(resp.result)\n return True\n\n else:\n raise resp.result", "def on_job_update(_job):\n nonlocal job\n job = _job", "def on_job_update(_job):\n nonlocal job\n job = _job", "def on_job_update(_job):\n nonlocal job\n job = _job", "def on_job_update(_job):\n nonlocal job\n job = _job", "def update_job(self, job, token, status, est_complete, context=None):\n return self._client.call_method(\n 'UserAndJobState.update_job',\n [job, token, status, est_complete], self._service_ver, context)", "def on_job_update(_job):\n nonlocal job, job_update_counter\n\n # Cancel the job when it updates in the `WORKING` state for the\n # second time. We do it just to be sure it is somewhere in the\n # middle of executions.\n if (job is not None and\n _job.state == job.state == 'WORKING'):\n my_job_gen.job_manager_class.cancel(job.id)\n\n job = _job\n job_update_counter += 1", "def put(self, job_id):\n form = request.get_json()\n try:\n the_job = jobs.find_job_by_id(job_id)\n if the_job is None:\n return get_message_json('任务不存在'), HTTPStatus.NOT_FOUND\n\n if the_job.account_id != current_user.account_id:\n return get_message_json('用户无法修改他人任务'), HTTPStatus.FORBIDDEN\n\n # The job state must be valid and can not go back\n form_job_state = form.get('job_state')\n if not(validate_job_state_code(form_job_state) and form_job_state >= the_job.job_state):\n return get_message_json('任务状态不合法'), HTTPStatus.BAD_REQUEST\n\n # Client can edit label id if and only if the job is 'unlabeled'\n form_label_id = form.get('label_id')\n if the_job.job_state == ConstantCodes.Unlabeled:\n if not form_label_id:\n return get_message_json('必须为该任务提供对应的标注'), HTTPStatus.BAD_REQUEST\n elif the_job.job_state == ConstantCodes.Labeling:\n # Can NOT change the label id\n if form_label_id is not None and form_label_id != the_job.label_id:\n return get_message_json('用户无法替换任务的标注'), HTTPStatus.FORBIDDEN\n elif the_job.job_state == ConstantCodes.Finished:\n return get_message_json('用户无法修改已完成的任务'), HTTPStatus.FORBIDDEN\n\n # Update finished date automatically when the job is updated to be finished\n finished_date = None\n if form_job_state == ConstantCodes.Finished:\n finished_date = datetime.date.today()\n\n if not form_label_id:\n form_label_id = the_job.label_id\n\n result = jobs.update_job_by_id(\n job_id,\n form_label_id,\n finished_date,\n form_job_state,\n the_job.image_id,\n the_job.account_id\n )\n if result == 1:\n json_res = form.copy()\n json_res['message'] = '成功编辑任务'\n\n return json_res, HTTPStatus.OK\n else:\n return get_message_json('未知的任务更新失败'), HTTPStatus.BAD_REQUEST\n\n except IntegrityError as err:\n if err.orig.args[0] == DBErrorCodes.FOREIGN_KEY_FAILURE:\n return get_message_json('指定的用户或标注不存在'), HTTPStatus.BAD_REQUEST\n else:\n return handle_internal_error(err.orig.args[1])\n except Exception as err:\n return handle_internal_error(str(err))", "def on_job_update(_job):\n nonlocal job, job_update_counter\n\n # Cancel the job when it updates in the `WORKING` state for the\n # second time. We do it just to be sure it is somewhere in the\n # middle of execution.\n if (job is not None and\n _job.state == job.state == 'WORKING'):\n my_job_async_gen.job_manager_class.cancel(job.id)\n\n job = _job\n job_update_counter += 1", "def update(self) -> None:\n self.previous_status = self.status\n\n jobs = self._client.describe_jobs(jobs = [ self.id ])[\"jobs\"]\n\n try:\n self.state = jobs[0]\n except IndexError:\n raise ValueError(\"Invalid or unknown job id %s\" % self.id) from None", "def on_job_update(_job):\n nonlocal job\n job = _job\n # Asserts that job is either pending or canceled.\n assert job.state in ['PENDING', 'CANCELED'], (\n 'job that canceled immediately after submission has wrong '\n 'state `%s`' % job.state\n )", "def on_job_update(_job):\n nonlocal job\n job = _job\n # Asserts that job is either pending or canceled.\n assert job.state in ['PENDING', 'CANCELED'], (\n 'Job that canceled immediately after submission has wrong '\n 'state `{job.state}`!'\n )", "def on_job_update(_job):\n nonlocal job\n job = _job\n # Asserts that job is either pending or canceled.\n assert job.state in ['PENDING', 'CANCELED'], (\n 'Job that canceled immediately after submission has wrong '\n f'state `{job.state}`!')", "def on_job_update(_job):\n nonlocal job\n job = _job\n # Asserts that job is either pending or canceled.\n assert job.state in ['PENDING', 'CANCELED'], (\n 'Job that canceled immediately after submission has wrong '\n f'state `{job.state}`!')", "def on_job_update(_job):\n nonlocal job\n job = _job\n # Asserts that job is either pending or canceled.\n assert job.state in ['PENDING', 'CANCELED'], (\n 'Job that canceled immediately after submission has wrong '\n f'state `{job.state}`!')", "def update_job_state(self, job):", "def update(self, request, pk=None):\n\n job = Job.objects.get(pk=pk)\n job.title = request.data[\"title\"]\n job.description = request.data[\"description\"]\n job.city = request.data[\"city\"]\n job.state = request.data[\"state\"]\n job.application = request.data[\"application\"]\n user = request.auth.user\n job.user = user\n job.save()\n\n return Response({}, status=status.HTTP_204_NO_CONTENT)", "def run_job(playerID, rF2root, job, config):\n _j = Job(job, config)\n # read the file to be edited\n try:\n _j.read_json_file_to_be_edited()\n # do the edits\n try:\n _edit_count = _j.run_edits()\n if _edit_count:\n # if successful:\n # backup 'filepath'\n # save new contents to 'filepath\n _report = _j.backup_file()\n _j.write()\n else:\n _report = ''\n return _report\n except (KeyError, ValueError, EmptyJsonError) as e:\n raise JobFailedError\n except JsonContentError:\n raise FileNotFoundError", "def on_job_update(_job):\n nonlocal job\n job = _job\n\n if job.state in ['WORKING', 'DONE', 'ERROR']:\n canceled = my_job.job_manager_class.cancel(job.id)\n assert not canceled, (\n f'Uncancelable job is canceled in the `{job.state}` state!')", "def update_job_metrics(self, job_id:int)->None:\n with connection.cursor() as cursor:\n cursor.execute(f\"SELECT update_job_metrics({job_id})\")\n ##TODO: this should return something ", "def update_job_status(self, job_id, status):\n\n if not (job_id and status):\n raise ValueError(\"Please provide both job_id and status\")\n\n job = self.get_mongo_util().get_job(job_id=job_id)\n self._test_job_permissions(job, job_id, JobPermissions.WRITE)\n\n job.status = status\n with self.get_mongo_util().mongo_engine_connection():\n job.save()\n\n return str(job.id)", "def on_job_update(_job):\n nonlocal job\n job = _job\n\n if job.state in ['DONE', 'ERROR', 'WORKING']:\n canceled = my_job_async.job_manager_class.cancel(job.id)\n assert not canceled, (\n f'Uncancelable job is canceled in the `{job.state}` state!')", "def job_status(bot, update, args, job_queue, chat_data):\n if len(args) == 0:\n update.message.reply_text('No parameter provided')\n return\n\n job_name = args[0]\n if job_name not in settings.JOBS:\n update.message.reply_text(\n 'Sorry {0} is not a valid job'.format(job_name))\n return\n\n job = find_job(job_name, job_queue)\n\n if not job:\n update.message.reply_text('{0} job is not running'.format(job_name))\n return\n\n update.message.reply_text('{0} job is running'.format(job_name))", "def job(self, job: str):\n\n self._job = job", "def job(self, job: str):\n\n self._job = job", "def _update(self):\n _logme.log('Updating job.', 'debug')\n self._updating = True\n if self.done or not self.submitted:\n self._updating = False\n return\n self.queue.update()\n if self.id:\n queue_info = self.queue[self.id]\n if queue_info:\n assert self.id == queue_info.id\n self.queue_info = queue_info\n self.state = self.queue_info.state\n if self.state == 'completed':\n if not self._got_exitcode:\n self.get_exitcode()\n if not self._got_times:\n self.get_times()\n self._updating = False", "def update_job_status(jid, new_status):\n jrd.hset(_generate_job_key(jid), 'status', new_status)", "def save_job(self, job):\n data = {\n 'class': 'Job',\n 'id': job.id,\n 'attrs': {\n 'type': job.type,\n 'task': job.task,\n 'command': job.command,\n 'status': job.status.value,\n 'runner': job.runner_id,\n },\n }\n self.db_client.send_request('update', json.dumps(data))", "def update_job_name(self, job_name):\n if strtobool(os.getenv(\"TP_UPDATE_JOB_NAME\")):\n logging.info(f\"Updating job name to: {job_name}\")\n try:\n response = self.send_request(\n \"PUT\",\n urljoin(self._remote_address, Endpoint.DevelopmentSession.value),\n {\"jobName\": job_name},\n )\n if not response.passed:\n logging.error(\"Failed to update job name\")\n except requests.exceptions.RequestException:\n logging.error(\"Failed to update job name\")", "def trigger_job(self, job, parameters=None):\n parameters = parameters or {}\n parameter_list = []\n for key in parameters:\n parameter_list.append(\"-p\")\n parameter_list.append(\"%s=%s\" % (key, parameters[key]))\n if subprocess.call(self.cli + [PlatformJenkinsJavaCLI.BUILD_JOB, job.name] + parameter_list) != 0:\n raise PlatformJenkinsException(\"Triggering job failed: \" + job.name)", "def job_updates(request, job):\n \n # Retrieve all of the job updates for the job, ordered by the time they\n # were created.\n updates = job.jobupdate_set.all().order_by('time')\n updates = [update.message for update in updates]\n \n # Delete the updates so that they are not sent multiple times.\n job.jobupdate_set.all().delete()\n \n # Send a response including the updates and indicating that the job is\n # not yet finished.\n data = {'Finished': False, 'Updates': updates}\n return HttpResponse(json.dumps(data))", "def __set_job(self, job: Job):\n\n serialized_job = self._serialize_entry(job)\n self.__delete_job_status(job)\n self.redis_client.set(f'job:{job.id}', serialized_job)\n self.__set_job_status(job)", "def update_all():\n req_data = request.get_json()\n jobs = JobModel.get_one_job(job_id)\n if not jobs:\n return custom_response({'Error': 'Job Not Found'}, 404)\n\n data, error = job_schema.load(req_data, partial=True)\n if error:\n return custom_response(error, 400)\n\n for job in jobs:\n job.update(data)\n job_message = job_schema.dump(job)\n\n return custom_response(job_message, 200)", "def update_job(self, job_id, end, status, output_path):\n\n # Close the DB connections\n django.db.connection.close()\n\n try:\n Job.objects.filter(id=job_id).update(\n end=end,\n status=status\n )\n\n qas = list()\n\n for product in glob.glob(output_path):\n qa = self.create_qa_bulk(product, job_id)\n if not qa:\n logger.warning('Error to create QA: {}'.format(product))\n continue\n\n qas.append(qa)\n\n QA.objects.bulk_create(qas)\n\n logger.info('Job {} updated.'.format(job_id))\n except Exception as err:\n logger.error('Job {} failed.'.format(job_id))\n logger.error(err)", "def job_update(request):\n try:\n\n if request.method == 'GET':\n query_dict = request.GET\n else:\n query_dict = json.loads(request.body)\n\n update = {}\n p_update = {}\n\n for key in ['t_id', 'file_link']:\n if key in query_dict:\n update['job.' + key] = query_dict[key]\n if 'status' in query_dict:\n p_update['job.status'] = {\n 'status': query_dict['status'],\n 'time': datetime.now()\n }\n\n for key in ['customer_count', 'sms_sent', 'sms_failed', 'errors']:\n if key in query_dict:\n update['job.report.' + key] = query_dict[key]\n\n if 'id' not in query_dict or not (update or p_update):\n return jsonResponse({\"success\": False, \"query\": query_dict, \"update\": update, \"p_update\": p_update})\n else:\n oid = query_dict['id']\n if oid.endswith('_segment'):\n oid = oid.replace('_segment', '')\n collection = db.segment_jobs\n else:\n collection = db.jobs\n\n final_update = {}\n if update:\n final_update[\"$set\"] = update\n if p_update:\n final_update[\"$push\"] = p_update\n\n collection.update_one({\"_id\": ObjectId(oid)}, final_update)\n return jsonResponse({\"success\": True})\n except Exception, e:\n return basic_error(e)", "def put(self, _id):\n payload = self.request.json\n # TODO: validate the json before updating the db\n self.app.db.jobs.update({'_id': int(_id)}, {'$set': {'status': payload.get('status'), 'activity': payload.get('activity')}})", "def update_job_status(jid, new_status):\n rd.hset(_generate_job_key(jid), 'status', new_status)", "def update(\n self,\n email,\n company_name,\n location,\n job_profile,\n salary,\n username,\n password,\n security_question,\n security_answer,\n notes,\n date_applied,\n status,\n):", "def edit_job_file(job_file_name, out_file_name, edits):\n o_job = JsonJobsFile()\n o_job._raw_read(job_file_name)\n o_job._edit_job_file(edits)\n o_job.write_as(out_file_name)", "def update(self, workspace, params={}, **options):\n path = \"/workspaces/%s\" % (workspace)\n return self.client.put(path, params, **options)", "def spawn_update_job(ip_address, headers, job_payload):\n job_id = -1\n job_url = 'https://%s/api/JobService/Jobs' % ip_address\n job_resp = requests.post(job_url, headers=headers,\n json=job_payload,\n verify=False)\n if job_resp.status_code == 201:\n job_id = (job_resp.json())['Id']\n print(\"Successfully spawned update job\", job_id)\n else:\n print(\"Unable to spawn update job .. Exiting\")\n return job_id", "def on_job_update(_job):\n nonlocal job, job_update_counter\n job = _job\n job_update_counter += 1\n # Make sure job state tells the job is cancelable when job is in\n # `PENDING`, `WORKING`, and `CANCELING` state and is not\n # cancelable in other states.\n if job.state in ['PENDING', 'WORKING', 'CANCELING']:\n assert job.is_cancelable, ('Job is not cancelable when it '\n 'must be cancelable!')\n else:\n assert not job.is_cancelable, ('Job is cancelable when it '\n 'must not be cancelable!')", "def on_job_update(_job):\n nonlocal job, job_update_counter\n job = _job\n job_update_counter += 1\n # Make sure job state tells the job is cancelable when job is in\n # `PENDING`, `WORKING`, and `CANCELING` state and is not\n # cancelable in other states.\n if job.state in ['PENDING', 'WORKING', 'CANCELING']:\n assert job.is_cancelable, ('Job is not cancelable when it '\n 'must be cancelable!')\n else:\n assert not job.is_cancelable, ('Job is cancelable when it '\n 'must not be cancelable!')", "def updateTorqueJob(self, job_id, new_state,job_notes):\n try:\n con = self.getSFFDatabaseConnection()\n job_id = con.cursor().callproc('update_torque_job', [job_id, new_state, job_notes[-4000:]])\n return job_id[0]\n except Exception, e:\n print 'Exception caught: %s.\\nThe error is: %s' % (type(e), e)", "def modify_job(self, job, parameter):\n job.set_encut(parameter[0])\n job.set_kpoints(parameter[1])\n return job", "def __setJobParam( self, name, value ):\n if not self.jobID:\n return S_ERROR( 'JobID not defined' )\n\n self.log.verbose( 'setJobParameter(%s, %s, %s)' % ( self.jobID, name, value ) )\n return RPCClient( 'WorkloadManagement/JobStateUpdate', timeout = 120 ).setJobParameter( int( self.jobID ), str( name ), str( value ) )", "def resurrectJob(job_id):\n \n with transaction() as t:\n t.cur.execute(\"\"\"update Hydra_rendertask \n set status = 'R' \n where job_id = '%d' and \n status = 'K' or status = 'F'\"\"\" % job_id)", "def set_saucelabs_job_status(jobid, passed=True):\r\n config = get_saucelabs_username_and_key()\r\n url = 'http://saucelabs.com/rest/v1/{}/jobs/{}'.format(config['username'], world.jobid)\r\n body_content = dumps({\"passed\": passed})\r\n base64string = encodestring('{}:{}'.format(config['username'], config['access-key']))[:-1]\r\n headers = {\"Authorization\": \"Basic {}\".format(base64string)}\r\n result = requests.put(url, data=body_content, headers=headers)\r\n return result.status_code == 200", "def update_building(request):\n body = json.loads(request.body)\n # Will be a dict representation of a hydrated building, incl pk.\n building = body.get('building')\n org_id = body['organization_id']\n canon = CanonicalBuilding.objects.get(pk=building['canonical_building'])\n old_snapshot = canon.canonical_snapshot\n\n new_building = models.update_building(old_snapshot, building, request.user)\n\n resp = {'status': 'success',\n 'child_id': new_building.pk}\n\n AuditLog.objects.log_action(request, canon, org_id, resp)\n return resp", "def update_bill(\n self, bill: Bill, reference: str, customer_id: int, jobs: List[Job]\n ) -> Bill:\n self.db.transaction()\n dateFormat = \"yyyy-MM-dd\"\n date = QDate.currentDate()\n query = QSqlQuery()\n query.prepare(\"UPDATE bill SET c_id=?, reference=?, bill_date=? WHERE id=?\")\n query.addBindValue(customer_id)\n query.addBindValue(reference)\n query.addBindValue(date.toString(dateFormat))\n query.addBindValue(bill.id)\n\n query.exec_()\n sql = \"delete from jobs where b_id='{0}'\".format(bill.id)\n query.exec_(sql)\n for j in jobs:\n self.add_job(j.price, j.number, j.text, bill)\n self.db.commit()\n return self.__fetch_bill(query, bill.id)", "def rescheduleJob(self, job):\n with self:\n with self.queues.jobsInProgress:\n with self.queues.jobsDone:\n try:\n index = self.queues.jobsInProgress.index(job)\n except ValueError, ex:\n raise BlackboardUpdateError(\"Job not found in jobsInProgress: \" +\n job.getProperty(Props.NAME, \"(unidentified)\"))\n job = self.queues.jobsInProgress.pop(index)\n self.queues.jobsAvailable.append(job)", "def updateJobDB(request,Q={}):\n\tuser = request.user\n\t# Get metadata\n\tresponse = agaveRequestMetadataList(user,Q=Q)\n\t# Add job if not in db\n\tfor metadata in response['result']:\n\t\tvalue = metadata['value']\n\t\tif 'jobName' in value and 'parameters' in value:\n\t\t\tlogger.info('SetName: ' + value['jobName'] + ', Parameters: [' + ', '.join(value['parameters']) + '], Length: ' + str(len(value['parameters'])))\n\t\t\tif len(value['parameters']) == 2: \n\t\t\t\tjobName = value['jobName']\n\t\t\t\tpara1name = value['parameters'][0]\n\t\t\t\tpara2name = value['parameters'][1]\n\t\t\t\tjobsInDB = Job.objects.filter(name=jobName)\n\n\t\t\t\t# Update status if not 'FINISHED'\n\t\t\t\tfor job in jobsInDB:\n\t\t\t\t\tif job.status not in ['FINISHED']:\n\t\t\t\t\t\tjobResponse = agaveRequestJobSearch(user,jobId=job.jobid)\n\t\t\t\t\t\tstatus = jobResponse['result'][0]['status']\n\t\t\t\t\t\tcolor = 'red'\n\t\t\t\t\t\tif status == 'FINISHED':\n\t\t\t\t\t\t\tcolor = 'blue'\n\t\t\t\t\t\telif status not in ['FINISHED','FAILED','STOPPED']: # Running\n\t\t\t\t\t\t\tcolor = 'orange'\n\t\t\t\t\t\t# else failed or stopped (color = 'red')\n\t\t\t\t\t\tjob.status = status\n\t\t\t\t\t\tjob.color = color\n\t\t\t\t\t\tjob.save()\n\n\t\t\t\t# Create new job entries\n\t\t\t\tjobsInDB = [job.jobid for job in Job.objects.filter(name=jobName)]\n\t\t\t\tjobsNotInDB = (set(jobsInDB) ^ set(metadata['associationIds'])) & set(metadata['associationIds'])\n\t\t\t\tfor jobId in jobsNotInDB:\n\t\t\t\t\tjobResponse = agaveRequestJobSearch(user,jobId=jobId)\n\t\t\t\t\tstatus = jobResponse['result'][0]['status']\n\t\t\t\t\tcolor = 'red'\n\t\t\t\t\tif status == 'FINISHED':\n\t\t\t\t\t\tcolor = 'blue'\n\t\t\t\t\telif status == 'RUNNING':\n\t\t\t\t\t\tcolor = 'orange'\n\t\t\t\t\tpara1value = value['paraValues'][jobId][para1name]\n\t\t\t\t\tpara2value = value['paraValues'][jobId][para2name]\n\t\t\t\t\tJob(name=jobName,\n\t\t\t\t\t\tjobid=jobId,\n\t\t\t\t\t\tuser=user,\n\t\t\t\t\t\tvalue=8,\n\t\t\t\t\t\tpara1name=para1name,\n\t\t\t\t\t\tpara1value=para1value,\n\t\t\t\t\t\tpara2name=para2name,\n\t\t\t\t\t\tpara2value=para2value,\n\t\t\t\t\t\tstatus=status,\n\t\t\t\t\t\tcolor=color).save()", "def record_task_update(job_name, task_id, version, task_info, cjrdb_conn, print_progress=False):\n if print_progress:\n print(\"Update Job...\")\n db_ses_obj = cjrdb_conn.get_db_session()\n\n qury_rslt = db_ses_obj.query(CJRTaskInfo).filter(CJRTaskInfo.JobName == job_name). \\\n filter(CJRTaskInfo.TaskID == task_id). \\\n filter(CJRTaskInfo.Version == version).one_or_none()\n\n if qury_rslt is not None:\n if qury_rslt.TaskCompleted:\n db_ses_obj.close()\n raise Exception(\"The task '{} - {} v{}' has already been finished - check inputs.\". \\\n format(job_name, task_id, version))\n\n update_time = datetime.datetime.now()\n task_updates_info = qury_rslt.TaskUpdates\n if task_updates_info is None:\n lcl_task_updates_info = dict()\n else:\n lcl_task_updates_info = copy.deepcopy(task_updates_info)\n lcl_task_updates_info[update_time.isoformat()] = task_info\n qury_rslt.TaskUpdates = lcl_task_updates_info\n else:\n db_ses_obj.close()\n raise Exception(\"The task '{} - {} v{}' could not be found - check inputs.\". \\\n format(job_name, task_id, version))\n\n db_ses_obj.commit()\n db_ses_obj.close()", "def modify_job(self, id, jobstore=None, **changes):\n\n fix_job_def(changes)\n\n if 'trigger' in changes:\n trigger, trigger_args = pop_trigger(changes)\n self._scheduler.reschedule_job(id, jobstore, trigger, **trigger_args)\n\n return self._scheduler.modify_job(id, jobstore, **changes)", "def updateJobData(self, jobName):\n self.jobRow.setText(jobName)\n self.updateSelectedLayer()", "def update_job_progress(self, job, token, status, prog, est_complete, context=None):\n return self._client.call_method(\n 'UserAndJobState.update_job_progress',\n [job, token, status, prog, est_complete], self._service_ver, context)", "def put(self, job):\n self.queue.put(job)", "def put(self, job):\n self.queue.put(job)", "def finish(ch, method, properties, body) -> Union[Job, None]:\n del ch, method, properties\n # todo: add error handling\n found_job = db.Jobs().get_by_id(body)\n if not found_job:\n return\n found_job.status = \"done\"\n return db.Jobs().update(found_job)", "def edit_job(request, job_id):\n job = get_object_or_404(Jobs, pk=job_id)\n\n if request.user.id != job.author.id:\n messages.error(request, 'You can only edit your own job profiles')\n return redirect(reverse('view_home'))\n\n if request.method == 'POST':\n form = JobsForm(request.POST, instance=job)\n if form.is_valid():\n form.save()\n messages.success(\n request, 'You have successfully updated the job profile!')\n return redirect(reverse('job_profile', args=[job.id]))\n else:\n messages.error(request,\n ('Could not update job profile. '\n 'Make sure you entered valid data.'))\n else:\n form = JobsForm(instance=job)\n messages.info(request, f'You are editing {job.title}')\n\n template = 'jobs/edit_job.html'\n context = {\n 'title': 'Edit job profile',\n 'form': form,\n 'job': job,\n }\n\n return render(request, template, context)", "def run(params):\n jobs_config_file = os.path.join(CONFIG_PATH, 'jobs.yaml')\n\n jenkins_config_file = os.path.join(RESOURCE_PATH, 'jobs', 'config')\n\n jobs_path = os.path.join(RESOURCE_PATH, 'jobs')\n\n jobs = list(JobGenerator(jobs_config_file).jobs())\n\n if params.jobs:\n jobs = [job for job in jobs if fnmatch.fnmatch(job.name, params.jobs)]\n yaml_obj = [job.get_object() for job in jobs]\n if params.config:\n yaml_file = open(params.config, 'w')\n yaml_path = params.config\n else:\n yaml_file = tempfile.NamedTemporaryFile(\n prefix='libvirt_ci-jobs-', suffix='.yaml',\n dir=jobs_path, delete=False)\n yaml_path = yaml_file.name\n try:\n yaml.dump(yaml_obj, stream=yaml_file, indent=4,\n default_flow_style=False)\n yaml_file.close()\n\n if params.only_config:\n return\n\n cmd = \"jenkins-jobs\"\n cmd += \" --conf %s\" % jenkins_config_file\n if params.test:\n cmd += \" test\"\n else:\n cmd += \" update\"\n\n cmd += \" -r %s\" % jobs_path\n if params.jobs:\n cmd += \" %s\" % params.jobs\n # Ignore standard output of jenkins-job-builder\n cmd += \" > /dev/null\"\n\n utils.run(cmd, debug=True, ignore_fail=False, timeout=3600)\n finally:\n if params.only_config:\n LOGGER.info('Keep job file %s', yaml_path)\n else:\n try:\n LOGGER.info('Removing job file %s', yaml_path)\n os.remove(yaml_path)\n except (OSError, IOError) as details:\n LOGGER.warning('Failed to remove job file %s: %s',\n yaml_file.name, details)", "def test_as_agent(self, client, job, agent_token):\n job_url = job_url_for(job)\n response = client.patch(\n job_url,\n headers={'x_dockci_api_key': agent_token},\n data={'commit': 'updated'},\n )\n\n assert response.status_code == 200\n\n response_data = json.loads(response.data.decode())\n assert response_data.pop('commit') == 'updated'\n\n response = client.get(job_url)\n response_data = json.loads(response.data.decode())\n assert response_data.pop('commit') == 'updated'", "def put_job_success(job, message):\n print('Putting job success')\n print(message)\n code_pipeline.put_job_success_result(jobId=job)", "def __set_job_status(self, job: Job):\n\n self.redis_client.set(f'jobstatus:{job.id}:{str(job.status)}', f'job:{job.id}')", "def do_job(self, job_id, job_param):\n raise NotImplementedError(\"should be implemented in inherited class\")", "def updateSauceName(self, name):\n if self.useSauce:\n sauce_client.jobs.update_job(self.driver.session_id, name=name)", "def Update(self, name, source_ref, destination_ref, args=None):\n self._ValidateArgs(args)\n\n current_mj = self._GetExistingMigrationJob(name)\n\n migration_job, update_fields = self._GetUpdatedMigrationJob(\n current_mj, source_ref, destination_ref, args)\n\n request_id = api_util.GenerateRequestId()\n update_req_type = (\n self.messages.DatamigrationProjectsLocationsMigrationJobsPatchRequest\n )\n update_req = update_req_type(\n migrationJob=migration_job,\n name=name,\n requestId=request_id,\n updateMask=','.join(update_fields)\n )\n\n return self._service.Patch(update_req)", "def assignJob(self, jobname):\n c = \"/cli:python /app:matrix /sys:1 /cmd:assignjob /job:\"+jobname.lower() # convert jobname to lowercase as workaround\n print \"Assigning \", jobname\n self.sendCMDstring(c)\n time.sleep(0.5)", "def submit(self, job_parameters):\n # FIX: Don't pass through the real job name. Bilby outputs the job files by whatever this parameter is, that\n # means that names containing special characters will break. Uniqueness is guaranteed by the folder structure\n job_parameters = json.loads(job_parameters)\n job_parameters['name'] = 'bilby'\n\n # Write the job parameters to a file\n json.dump(job_parameters, open(self.job_parameter_file, 'w'))\n\n # Run the job\n return super().submit(job_parameters)", "def put(self):\n data = IocManager.api.payload\n code = data.get('Code') #\n cron = data.get('Cron') #\n start_date = data.get('StartDate') #\n end_date = data.get('EndDate') #\n start_operation_result = self.job_operation_service.modify_job(code=code, cron=cron, start_date=start_date,\n end_date=end_date)\n if isinstance(start_operation_result, DataOperationJob):\n result = JobSchedulerModels.get_data_operation_job_model(start_operation_result)\n return CommonModels.get_response(result=result)\n else:\n message = start_operation_result\n return CommonModels.get_error_response(message=message)", "def job(username, root_wf_id, wf_id, job_id, job_instance_id):\n dashboard = Dashboard(g.master_db_url, root_wf_id, wf_id)\n job = dashboard.get_job_information(wf_id, job_id, job_instance_id)\n job_states = dashboard.get_job_states(wf_id, job_id, job_instance_id)\n job_instances = dashboard.get_job_instances(wf_id, job_id)\n\n previous = None\n\n for state in job_states:\n timestamp = state.timestamp\n state.timestamp = datetime.fromtimestamp(state.timestamp).strftime('%a %b %d, %Y %I:%M:%S %p')\n\n if previous is None:\n state.interval = 0.0\n else:\n state.interval = timestamp - previous\n\n previous = timestamp\n\n if not job:\n return 'Bad Request', 400\n\n return render_template('workflow/job/job_details.html', root_wf_id=root_wf_id, wf_id=wf_id, job_id=job_id, job=job,\n job_instances=job_instances, job_states=job_states)", "def setJobId(self, jobid):\n self._ShREEKConfig.setJobId(jobid)", "def update_job_status(self, job_id, status, msg):\n try:\n self._session.query(JobEntity).\\\n filter(JobEntity.id == job_id).\\\n update(\n {\n 'status': status,\n 'msg': case(\n [(JobEntity.msg == '', msg)],\n else_=JobEntity.msg+'|'+msg\n )\n },\n synchronize_session=False\n )\n except SQLAlchemyError as err:\n Log.an().error('sql exception [%s]', str(err))\n return False\n\n return True", "def on_job_update(_job):\n nonlocal job, job_update_counter\n job = _job\n job_update_counter += 1\n # Assert that simple job is reported as cancelable only when it\n # is in the `PENDING` or `CANCELING` state.\n if job.state in ['PENDING', 'CANCELING']:\n assert job.is_cancelable, ( # pylint: disable=no-member\n 'Job is not cancelable when it must be cancelable!')\n else:\n assert not job.is_cancelable, ( # pylint: disable=no-member\n 'Job is cancelable when it must not be cancelable!')", "def on_job_update(_job):\n nonlocal job, job_update_counter\n job = _job\n job_update_counter += 1\n # Assert that the job is reported as cancelable only when it is\n # in the `PENDING` or `CANCELING` state.\n if job.state in ['PENDING', 'CANCELING']:\n assert job.is_cancelable, ('Job is not cancelable when it '\n 'must be cancelable!')\n else:\n assert not job.is_cancelable, ('Job is cancelable when it '\n 'must not be cancelable!')", "def lookup(job_id: str) -> JobState:\n job = JobState(job_id)\n job.update()\n return job", "def updateRcloneJobStatus():\n global jobIds, jobStatusGauge\n\n # Check if the jobs are running, update the variables\n for jobName, jobId in jobIds.items():\n jobIsRunning = getRcloneJobRunning(jobId)\n jobIds[jobName] = jobId if jobIsRunning else None\n jobStatusGauge.labels(rclone_job=jobName).set(1 if jobIsRunning else 0)", "def update_put():\n try:\n update.launcher.start_async()\n except update.launcher.AlreadyInProgressError:\n # If an update is already in progress, treat it as success.\n pass\n except update.launcher.Error as e:\n return json_response.error(str(e)), 200\n return json_response.success()", "def delete_job(self, job):\n subprocess.call(self.cli + [PlatformJenkinsJavaCLI.DELETE_JOB, job.name])", "def set_wrapper(bot, update, args, job_queue, chat_data):\n chat_id = update.message.chat_id\n try:\n # args[0] should contain the time for the timer in seconds\n due = int(args[1])\n if due < 0:\n update.message.reply_text('Sorry we can not go back to future!')\n return\n\n job_name = args[0]\n if job_name not in settings.JOBS:\n update.message.reply_text(\n 'Sorry {0} is not a valid job'.format(job_name))\n return\n\n # Add job to queue\n job_queue.run_repeating(logparser_job, due, name=job_name, context=chat_id)\n\n update.message.reply_text('{0} job set!'.format(job_name))\n\n except (IndexError, ValueError):\n update.message.reply_text('Usage: /set <job_name> <seconds>')", "def job_exists(self, job):\n\n with open(os.devnull, 'w') as devnull:\n result = subprocess.call(self.cli + [PlatformJenkinsJavaCLI.GET_JOB, job.name], stdout=devnull)\n\n return result == 0", "def _addjob(self, job: Job):\n self._job = copy.deepcopy(job)\n self._job.restart = True", "def version_update(component_version, job_type):\n version_update_list = {\n \"validation_parameter.sample\": \"validation_parameter.yaml\",\n \"machine_detail.sample\": \"machine_detail.yaml\",\n }\n sample_file_data = Common.validation_param_detail(\n \"validation_parameter.sample\", \"jenkins_job_details\"\n )\n # The purpose of this line to check the subversion of the job and update\n # their supported subversion in the validation_parameter.yaml\"\n try:\n if sample_file_data[f\"{job_type}\"][\n \"sub_version_count\"\n ] != component_version.count(\".\"):\n component_version = component_version[\n 0 : int(sample_file_data[f\"{job_type}\"][\"sub_version_count\"]) + 2\n ]\n Common.logger.info(f\"Supported Component version: {component_version}\")\n command = f\"sed -i 's/<component-version>/{component_version}/'\"\n except Exception as ex:\n Common.logger.error(\n f\"Component Version: {component_version} and Job Type \"\n f\"{job_type} is not supported and dump error {ex}\"\n )\n\n for tmp_file in version_update_list:\n if os.path.isfile(f\"{Common.config_path}/{version_update_list[tmp_file]}\"):\n os.remove(f\"{Common.config_path}/{version_update_list[tmp_file]}\")\n status = os.popen(\n f\"cp {Common.config_path}/{tmp_file} {Common.config_path}/\"\n f\"{version_update_list[tmp_file]}\"\n )\n status.close()\n file_name = f\"{Common.config_path}/{version_update_list[tmp_file]}\"\n status = os.popen(f\"{command} {file_name}\")\n status.close()\n Common.logger.info(\n \"Version updated successfully in runtime gernated validation yaml file\"\n )", "def update_info(self, *args, **kwargs):\n # Create the layout with the information\n self.info_widgets_list = [\n urwid.Text('ID: {}'.format(self.job.id)),\n urwid.Divider('='),\n urwid.Text('Command: {}'.format(self.job.command)),\n urwid.Text('Status: {}'.format(self.job.status))\n ]\n\n if self.job.status == JobStatus.FAILED: # If job has failed add error reason (if available)\n if 'Error reason' in self.job.metadata:\n self.info_widgets_list.append(urwid.Text('Possible error reason: {}'.format(self.job.metadata['Error reason'])))\n\n # Add button with the option available depending on the job status\n if self.job.status in [JobStatus.DONE, JobStatus.FAILED]:\n self.info_widgets_list.append(urwid.Padding(JobResubmitWidget(self.job, callback=self.resubmit), align='center', left=4, right=2))\n self.info_widgets_list.append(urwid.Divider('-'))\n elif self.job.status != JobStatus.UNSUBMITTED:\n self.info_widgets_list.append(create_button('Kill', self.terminate))\n self.info_widgets_list.append(urwid.Divider('-'))\n\n self.metadata_widgets_list = []\n self.metadata_widgets_list.append(urwid.Text('Retries: {}'.format(self.job.retries)))\n self.metadata_widgets_list.append(urwid.Divider())\n # Add resources requested by the job\n requested_resources = 'Specific requested resources:\\n'\n requested_resources += ' '+str(self.job.params).replace('\\n', '\\n ')\n self.metadata_widgets_list.append(urwid.Text(requested_resources))\n\n # If usage information is available, display it\n if 'usage' in self.job.metadata:\n self.metadata_widgets_list.append(urwid.Divider())\n used_resources = 'Used resources:\\n'\n used_resources += \"\\n\".join([\" {} = {}\".format(k, v) for k, v in self.job.metadata['usage'].items()])\n self.metadata_widgets_list.append(urwid.Text(used_resources))\n\n self.file_widgets_list = [] # Reset files widget\n # Create widget with the files if the job has failed\n if self.job.status == JobStatus.FAILED:\n # Generate wigets with stdout and stderr if available. Done here because Failed state is \"absolute\"=\n stdout_widget = self._load_file_as_widget(self.job.f_stdout, 'stdout')\n if stdout_widget is not None:\n self.file_widgets_list.append(stdout_widget)\n self.file_widgets_list.append(urwid.Divider('*'))\n stderr_widget = self._load_file_as_widget(self.job.f_stderr, 'stderr')\n if stderr_widget is not None:\n self.file_widgets_list.append(stderr_widget)\n self.file_widgets_list.append(urwid.Divider('*'))", "def test_normal_user_trying_to_update_job(self):\n normal_user = self.user_login.become_normal_user()\n job = Job.objects.create(name='somejob',\n workflow_version=self.workflow_version,\n job_order={},\n user=normal_user,\n share_group=self.share_group,\n job_settings=self.vm_job_settings,\n job_flavor=self.job_flavor,\n )\n url = reverse('v2-admin_job-list') + '{}/'.format(job.id)\n response = self.client.put(url, format='json',\n data={\n 'state': Job.JOB_STATE_FINISHED,\n 'step': Job.JOB_STEP_RUNNING,\n })\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)", "def edit_jobs(self, modified_jobs: dict, notify=True) -> requests.Response:\n\n data = {\n \"jobs\": modified_jobs,\n \"notify\": notify\n }\n response = self._api_call(method=\"POST\", endpoint=EDIT_JOBS_ENDPOINT, data=data)\n return response", "def _update_state(self, job_id):\n self.logger.info(\"updating 'timestamp' in profile state\")\n # get current state ...\n with open(self.state_file, \"r\") as json_current:\n state = json.load(json_current)\n json_current.close()\n # ... and write new timestamp\n with open(self.state_file, \"w\") as json_new:\n state[\"timestamp\"] = job_id\n json.dump(state, json_new, indent=4)\n json_new.close()", "def update_scheduler_job(self, body):\n try:\n self.logger.info('update_scheduler_job called.')\n\n # Validate required parameters\n self.logger.info(\n 'Validating required parameters for update_scheduler_job.')\n self.validate_parameters(body=body)\n\n # Prepare query URL\n self.logger.info('Preparing query URL for update_scheduler_job.')\n _url_path = '/public/scheduler'\n _query_builder = self.config.get_base_uri()\n _query_builder += _url_path\n _query_url = APIHelper.clean_url(_query_builder)\n\n # Prepare headers\n self.logger.info('Preparing headers for update_scheduler_job.')\n _headers = {\n 'accept': 'application/json',\n 'content-type': 'application/json; charset=utf-8'\n }\n\n # Prepare and execute request\n self.logger.info(\n 'Preparing and executing request for update_scheduler_job.')\n _request = self.http_client.put(\n _query_url,\n headers=_headers,\n parameters=APIHelper.json_serialize(body))\n AuthManager.apply(_request, self.config)\n _context = self.execute_request(_request, name='update_scheduler_job')\n\n # Endpoint and global error handling using HTTP status codes.\n self.logger.info('Validating response for update_scheduler_job.')\n if _context.response.status_code == 0:\n raise RequestErrorErrorException('Error', _context)\n self.validate_response(_context)\n\n # Return appropriate type\n return APIHelper.json_deserialize(_context.response.raw_body,\n SchedulerProto_SchedulerJob.from_dictionary)\n\n except Exception as e:\n self.logger.error(e, exc_info=True)\n raise", "def job_status(job_id):\n job_db = JobDb()\n job = job_db.get_job_by_id(job_id)\n job_db.close()\n\n if job is None:\n raise ApiError(\n \"job_not_found\",\n f\"Job '{job_id}' not found\",\n 404)\n\n job['duration'] = str(datetime.timedelta(\n seconds=int((job['updated'] - job['created']).total_seconds())))\n return jsonify(job)", "def update(self, customerguid, name=\"\", login=\"\", password=\"\", email=\"\", address=\"\", vat=\"\", jobguid=\"\", executionparams=None):", "def put_job(self, job):\n\n # Handle the job, depending on its command\n if job.command == \"UPLD_INIT\":\n self.handle_upld_init(job)\n elif job.command == \"UPLD_DATA\":\n self.handle_upld_data(job)\n elif job.command == \"DWLD\":\n self.handle_dwld(job)\n elif job.command == \"DELF_INIT\":\n self.handle_delf_init(job)\n elif job.command == \"DELF_CONF\":\n self.handle_delf_conf(job)\n elif job.command == \"LIST\":\n self.handle_list(job)\n else:\n print(\"Unrecognised command: {}\".format(job.command))\n return", "def unset_wrapper(bot, update, args, job_queue, chat_data):\n if len(args) == 0:\n update.message.reply_text('No parameter provided')\n return\n\n job_name = args[0]\n if len(args) == 0 or job_name not in settings.JOBS:\n update.message.reply_text(\n 'Sorry {0} is not a valid job'.format(job_name))\n return\n\n job = find_job(job_name, job_queue)\n\n if not job:\n update.message.reply_text('You have no active job')\n return\n\n job.schedule_removal()\n\n update.message.reply_text('{0} job successfully unset!'.format(job_name))", "def update(repository, args, **_):\n _log(repository, 'INFO', \"Going to build commit %s\" % args[2][:7])", "def handle_wps_update(self, data):\n\n self.jobs = data", "def _update_status(self, status: dict):\n with generate_retry_session() as session:\n session.headers.update({\n 'Authorization': 'Bearer {}'.format(self.platform_auth_token)\n })\n url = '{}/training/definitions/{}/jobs/{}/status'.format(\n ORGANIZATION_ENDPOINT, self.job_definition_name, self.training_job_id)\n res = session.put(url, json=status)\n res.raise_for_status()", "def enable_job(self, job):\n if subprocess.call(self.cli + [PlatformJenkinsJavaCLI.ENABLE_JOB, job.name]) != 0:\n raise PlatformJenkinsException(\"Enabling job failed: \" + job.name)", "def WaitForJobChange(self, job_id, fields, prev_job_info, prev_log_serial):\n body = {\n \"fields\": fields,\n \"previous_job_info\": prev_job_info,\n \"previous_log_serial\": prev_log_serial,\n }\n\n return self._SendRequest(HTTP_GET,\n \"/%s/jobs/%s/wait\" % (GANETI_RAPI_VERSION, job_id),\n None, body)", "def statusJob(self, job):\n with self.thread_lock:\n name = job.name\n job_container = self.shared_dags[job]\n job_dag = job_container.getDAG()\n\n # If there is no timing, then the job is not finished\n if job_container.getTime():\n job_container.addCaveat('time: ' + job_container.getTime())\n if job.getResult() == False:\n self.active.remove(job)\n self.killJobs()\n return\n else:\n self.job_queue_count -= 1\n job_dag.delete_node(job)\n self.active.remove(job)\n if self.args.download_only:\n result = ' -Downloaded | '\n else:\n result = ' --Finished | '\n\n else:\n result = ' Launching | '\n\n # Format job name length field\n name_cnt = (self.term_width - len(job.name)) + 2 # 2 character buffer\n result = strftime(\"%H:%M\") + result + job.name + ' '*name_cnt\n\n # Format caveat length\n caveats = job_container.getCaveats()\n caveat_cnt = self.max_caveat_length - len(caveats)\n\n if caveats:\n result = result + caveats + ' '*caveat_cnt\n else:\n result = result + ' '*caveat_cnt\n\n remaining = job_dag.size()\n print(result, \"remaining: %-3d active: %-2d\" % (remaining, len(self.active)), [x.name for x in self.active])" ]
[ "0.7519566", "0.7305608", "0.72748035", "0.67824453", "0.67824453", "0.67824453", "0.67824453", "0.65911585", "0.6502356", "0.64618725", "0.6382063", "0.6357814", "0.6337168", "0.6310358", "0.63080525", "0.63080525", "0.63080525", "0.62912005", "0.6251998", "0.61053956", "0.60982627", "0.6090971", "0.60133094", "0.59893095", "0.5970912", "0.59670943", "0.59670943", "0.5961652", "0.5913073", "0.58726716", "0.5823741", "0.58226806", "0.5806307", "0.580494", "0.5801942", "0.5771702", "0.57708997", "0.57669145", "0.5755634", "0.57287174", "0.5727152", "0.57182544", "0.5688315", "0.5675842", "0.5675842", "0.56533724", "0.5630738", "0.55824524", "0.5551395", "0.55383724", "0.55307746", "0.5512827", "0.5508813", "0.5505984", "0.5474247", "0.54738176", "0.54328", "0.54287976", "0.5424651", "0.5424651", "0.5395153", "0.5377329", "0.5376543", "0.5356064", "0.5356033", "0.5354332", "0.5340026", "0.53389126", "0.53324294", "0.5317509", "0.5314302", "0.530992", "0.5280797", "0.5263436", "0.52470416", "0.5241911", "0.52184236", "0.5206861", "0.5199536", "0.519446", "0.519307", "0.51857656", "0.5184755", "0.5184012", "0.51676315", "0.51578367", "0.51551396", "0.5144303", "0.5131591", "0.5127142", "0.5124444", "0.51123023", "0.50989366", "0.5095796", "0.5094917", "0.5090301", "0.50891083", "0.5082784", "0.50760657", "0.5073132" ]
0.8326795
0
Updates a job build description.
def set_build_description(self, job_name, build, description): try: subprocess.check_call(self.cli + [PlatformJenkinsJavaCLI.SET_DESCRIPTION, job_name, build, description]) except subprocess.CalledProcessError: message = "Setting build description failed (job={0}, build={1}, description='{2}')".format(job_name, build, description) raise PlatformJenkinsException(message)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_current_build_description(self, description):\n job_name = os.environ.get(\"JOB_NAME\", None)\n build_id = os.environ.get(\"BUILD_NUMBER\", None)\n if job_name is not None and build_id is not None:\n self.set_build_description(job_name, build_id, description)", "def update_experiment_description(self, experiment_id, description):\n return self.dbclient.update_by_id(Tables.EXPERIMENTS, experiment_id, {\n ExperimentAttr.DESC: description\n })", "def update(self, job_name, param_name, value, description=None):\n if job_name in self._jobs:\n getattr(self._jobs[job_name], param_name).update(value, description)\n else:\n self.log.error(\"Invalid job name: %s\", job_name)", "def update_info(self, *args, **kwargs):\n # Create the layout with the information\n self.info_widgets_list = [\n urwid.Text('ID: {}'.format(self.job.id)),\n urwid.Divider('='),\n urwid.Text('Command: {}'.format(self.job.command)),\n urwid.Text('Status: {}'.format(self.job.status))\n ]\n\n if self.job.status == JobStatus.FAILED: # If job has failed add error reason (if available)\n if 'Error reason' in self.job.metadata:\n self.info_widgets_list.append(urwid.Text('Possible error reason: {}'.format(self.job.metadata['Error reason'])))\n\n # Add button with the option available depending on the job status\n if self.job.status in [JobStatus.DONE, JobStatus.FAILED]:\n self.info_widgets_list.append(urwid.Padding(JobResubmitWidget(self.job, callback=self.resubmit), align='center', left=4, right=2))\n self.info_widgets_list.append(urwid.Divider('-'))\n elif self.job.status != JobStatus.UNSUBMITTED:\n self.info_widgets_list.append(create_button('Kill', self.terminate))\n self.info_widgets_list.append(urwid.Divider('-'))\n\n self.metadata_widgets_list = []\n self.metadata_widgets_list.append(urwid.Text('Retries: {}'.format(self.job.retries)))\n self.metadata_widgets_list.append(urwid.Divider())\n # Add resources requested by the job\n requested_resources = 'Specific requested resources:\\n'\n requested_resources += ' '+str(self.job.params).replace('\\n', '\\n ')\n self.metadata_widgets_list.append(urwid.Text(requested_resources))\n\n # If usage information is available, display it\n if 'usage' in self.job.metadata:\n self.metadata_widgets_list.append(urwid.Divider())\n used_resources = 'Used resources:\\n'\n used_resources += \"\\n\".join([\" {} = {}\".format(k, v) for k, v in self.job.metadata['usage'].items()])\n self.metadata_widgets_list.append(urwid.Text(used_resources))\n\n self.file_widgets_list = [] # Reset files widget\n # Create widget with the files if the job has failed\n if self.job.status == JobStatus.FAILED:\n # Generate wigets with stdout and stderr if available. Done here because Failed state is \"absolute\"=\n stdout_widget = self._load_file_as_widget(self.job.f_stdout, 'stdout')\n if stdout_widget is not None:\n self.file_widgets_list.append(stdout_widget)\n self.file_widgets_list.append(urwid.Divider('*'))\n stderr_widget = self._load_file_as_widget(self.job.f_stderr, 'stderr')\n if stderr_widget is not None:\n self.file_widgets_list.append(stderr_widget)\n self.file_widgets_list.append(urwid.Divider('*'))", "def edit_description(self, task, new_description):\n raise ValueError(\"cannot edit description in 'In Progress' status\")", "def request_description_update():\n global should_update_description\n should_update_description = True", "def update_job(self, job):\n call = subprocess.Popen(self.cli + [PlatformJenkinsJavaCLI.UPDATE_JOB, job.name], stdin=subprocess.PIPE)\n call.communicate(input=platform_ci.jjb.get_job_as_xml(job, self.template_dir))\n call.wait()\n if call.returncode != 0:\n raise PlatformJenkinsException(\"Updating job failed: \" + job.name)", "async def slashtag_edit_description(\n self, ctx: commands.Context, tag: GuildTagConverter, *, description: str\n ):\n await ctx.send(await tag.edit_description(description))", "def description(self, new_description):\r\n self.set({\"description\": new_description})", "def setDescription(self, description):\n url = G.api + self.testId + '/snapshots/' + self.hash\n self.info = requests.put(url, auth=(G.username, G.authkey), data={'description':description})", "def updateJobData(self, jobName):\n self.jobRow.setText(jobName)\n self.updateSelectedLayer()", "def submission_update_description(request, submission_pk):\n try:\n submission = models.CompetitionSubmission.objects.get(pk=submission_pk)\n if submission.participant.user != request.user:\n raise Http404()\n submission.description = request.POST.get('updated_description')\n submission.save()\n return HttpResponse()\n except models.CompetitionSubmission.DoesNotExist:\n raise Http404()", "def update(self):\n self._log.debug(\"About to update job {0}\".format(self.id))\n resp = self._api.get_job(self.id)\n\n if resp.success:\n self.submission = self._format_submission(resp.result)\n return True\n\n else:\n raise resp.result", "def describe_text_translation_job(JobId=None):\n pass", "def set_longdescription(self, longdesc):\n self.longdescription(longdesc)", "def update_description_debounced(self, value):\n self.update_description(value)", "def update(cfg, jobs):\n server = jenkins_utils.server_factory(cfg)\n libjobs.updateJobs(server, jobs)", "def EventContentMissionExcelAddDescription(builder, Description):\n return AddDescription(builder, Description)", "def update_description(self, option, desc):\n _, command = self.__options[option]\n self.__options[option] = (desc, command)", "def long_description(self, long_description):\n self._long_description = long_description", "def get_job_description(self, job, context=None):\n return self._client.call_method(\n 'UserAndJobState.get_job_description',\n [job], self._service_ver, context)", "def update_description(self, host, baseUrl, description):\n self._host = host\n self._urlBase = baseUrl\n self._description = description\n return", "def update_model_description(\n self, content: Dict[str, Any], model_name: str, is_already_documented: bool = False\n ) -> Dict[str, Any]:\n message = f\"Do you want to write a description for {model_name}\"\n if is_already_documented:\n message = f\"Do you want to change the model description of {model_name}\"\n model_doc_payload: List[Mapping[str, Any]] = [\n {\n \"type\": \"confirm\",\n \"name\": \"wants_to_document_model\",\n \"message\": message,\n \"default\": True,\n },\n {\n \"type\": \"text\",\n \"name\": \"model_description\",\n \"message\": \"Please write down your description:\",\n },\n ]\n user_input = UserInputCollector(\"model\", model_doc_payload).collect()\n if user_input.get(\"model_description\", None):\n for model in content.get(\"models\", []):\n if model[\"name\"] == model_name:\n model[\"description\"] = user_input[\"model_description\"]\n return content", "def put(self, job_id):\n form = request.get_json()\n try:\n the_job = jobs.find_job_by_id(job_id)\n if the_job is None:\n return get_message_json('任务不存在'), HTTPStatus.NOT_FOUND\n\n if the_job.account_id != current_user.account_id:\n return get_message_json('用户无法修改他人任务'), HTTPStatus.FORBIDDEN\n\n # The job state must be valid and can not go back\n form_job_state = form.get('job_state')\n if not(validate_job_state_code(form_job_state) and form_job_state >= the_job.job_state):\n return get_message_json('任务状态不合法'), HTTPStatus.BAD_REQUEST\n\n # Client can edit label id if and only if the job is 'unlabeled'\n form_label_id = form.get('label_id')\n if the_job.job_state == ConstantCodes.Unlabeled:\n if not form_label_id:\n return get_message_json('必须为该任务提供对应的标注'), HTTPStatus.BAD_REQUEST\n elif the_job.job_state == ConstantCodes.Labeling:\n # Can NOT change the label id\n if form_label_id is not None and form_label_id != the_job.label_id:\n return get_message_json('用户无法替换任务的标注'), HTTPStatus.FORBIDDEN\n elif the_job.job_state == ConstantCodes.Finished:\n return get_message_json('用户无法修改已完成的任务'), HTTPStatus.FORBIDDEN\n\n # Update finished date automatically when the job is updated to be finished\n finished_date = None\n if form_job_state == ConstantCodes.Finished:\n finished_date = datetime.date.today()\n\n if not form_label_id:\n form_label_id = the_job.label_id\n\n result = jobs.update_job_by_id(\n job_id,\n form_label_id,\n finished_date,\n form_job_state,\n the_job.image_id,\n the_job.account_id\n )\n if result == 1:\n json_res = form.copy()\n json_res['message'] = '成功编辑任务'\n\n return json_res, HTTPStatus.OK\n else:\n return get_message_json('未知的任务更新失败'), HTTPStatus.BAD_REQUEST\n\n except IntegrityError as err:\n if err.orig.args[0] == DBErrorCodes.FOREIGN_KEY_FAILURE:\n return get_message_json('指定的用户或标注不存在'), HTTPStatus.BAD_REQUEST\n else:\n return handle_internal_error(err.orig.args[1])\n except Exception as err:\n return handle_internal_error(str(err))", "def update(repository, args, **_):\n _log(repository, 'INFO', \"Going to build commit %s\" % args[2][:7])", "def _append_descriptions(self, issue, dep_name, dep_latest_version):\n logging.info(\"Updating JIRA issue {0} to track {1} upgrade process\".format(\n issue.key,\n dep_name))\n description = issue.fields.description + \"\"\"\\n\\n{0}\\n\n Please review and upgrade the {1} to the latest version {2} \\n \n cc: \"\"\".format(\n datetime.today(),\n dep_name,\n dep_latest_version\n )\n _, owners = self._find_owners(dep_name)\n for owner in owners:\n description += \"[~{0}], \".format(owner)\n try:\n self.jira.update_issue(issue, description=description)\n except Exception as e:\n traceback.print_exc()\n logging.error(\"Failed updating issue: \"+ str(e))", "def update_job_state(self, job):", "def build_description(self):\n return self._build_description", "def description(self, newDescription=None):\n pass", "def update(self) -> None:\n self.previous_status = self.status\n\n jobs = self._client.describe_jobs(jobs = [ self.id ])[\"jobs\"]\n\n try:\n self.state = jobs[0]\n except IndexError:\n raise ValueError(\"Invalid or unknown job id %s\" % self.id) from None", "def update_progress(job_title, progress):\n \n length = 20 # modify this to change the length\n block = int(round(length*progress))\n msg = \"\\r{0}: [{1}] {2}%\".format(job_title, \"#\"*block + \"-\"*(length-block), round(progress*100, 2))\n if progress >= 1: msg += \" DONE\\r\\n\"\n sys.stdout.write(msg)\n sys.stdout.flush()", "def set_desc(self, item_desc):\r\n self.description = item_desc", "def set_description(self, room_description):\n self.description = room_description", "def description(request):\n if request.method != 'POST':\n description = request.issue.description or \"\"\n return HttpTextResponse(description)\n if not request.issue.edit_allowed:\n if not IS_DEV:\n return HttpTextResponse('Login required', status=401)\n issue = request.issue\n issue.description = request.POST.get('description')\n issue.put()\n return HttpTextResponse('')", "def SetDescription(self, description):\n self.description = str(description)", "def test_update_task_description(\n self,\n mock_config_load,\n mock_custom_objects_api,\n mock_core_v1_api,\n ):\n task_id = util.MOCK_UUID_5\n rv = TEST_CLIENT.patch(\n f\"/tasks/{task_id}\",\n json={\n \"description\": \"valid description\",\n },\n )\n\n self.assertEqual(rv.status_code, 200)", "def statusJob(self, job):\n with self.thread_lock:\n name = job.name\n job_container = self.shared_dags[job]\n job_dag = job_container.getDAG()\n\n # If there is no timing, then the job is not finished\n if job_container.getTime():\n job_container.addCaveat('time: ' + job_container.getTime())\n if job.getResult() == False:\n self.active.remove(job)\n self.killJobs()\n return\n else:\n self.job_queue_count -= 1\n job_dag.delete_node(job)\n self.active.remove(job)\n if self.args.download_only:\n result = ' -Downloaded | '\n else:\n result = ' --Finished | '\n\n else:\n result = ' Launching | '\n\n # Format job name length field\n name_cnt = (self.term_width - len(job.name)) + 2 # 2 character buffer\n result = strftime(\"%H:%M\") + result + job.name + ' '*name_cnt\n\n # Format caveat length\n caveats = job_container.getCaveats()\n caveat_cnt = self.max_caveat_length - len(caveats)\n\n if caveats:\n result = result + caveats + ' '*caveat_cnt\n else:\n result = result + ' '*caveat_cnt\n\n remaining = job_dag.size()\n print(result, \"remaining: %-3d active: %-2d\" % (remaining, len(self.active)), [x.name for x in self.active])", "def ModifyDescriptionOfCost(idOfCost, description):\n\n logs.logger.debug(\"Start to modify description of Cost based on the ID.\")\n try:\n modifiedCost = session.query(Cost.Cost).filter(\n Cost.Cost.id == idOfCost).one()\n modifiedCost.description = description\n session.commit()\n logs.logger.info(\"Modify description of Cost based on the ID.\")\n except Exception as e:\n logs.logger.error(e, exc_info=True)", "def long_description(self, long_description: str):\n\n self._long_description = long_description", "def description(self, description):\n self._description = description", "def description(self, description):\n self._description = description", "def description(self, description):\n self._description = description", "def description(self, description):\n self._description = description", "def set_description(self, description):\n self.description = description\n if not self.record:\n return\n self.mdb.results.update({'_id':self.result_id}, \n {'$set':{'test_case':description}})", "def description(self, value):\n self._update_values('description', value)", "def record_task_update(job_name, task_id, version, task_info, cjrdb_conn, print_progress=False):\n if print_progress:\n print(\"Update Job...\")\n db_ses_obj = cjrdb_conn.get_db_session()\n\n qury_rslt = db_ses_obj.query(CJRTaskInfo).filter(CJRTaskInfo.JobName == job_name). \\\n filter(CJRTaskInfo.TaskID == task_id). \\\n filter(CJRTaskInfo.Version == version).one_or_none()\n\n if qury_rslt is not None:\n if qury_rslt.TaskCompleted:\n db_ses_obj.close()\n raise Exception(\"The task '{} - {} v{}' has already been finished - check inputs.\". \\\n format(job_name, task_id, version))\n\n update_time = datetime.datetime.now()\n task_updates_info = qury_rslt.TaskUpdates\n if task_updates_info is None:\n lcl_task_updates_info = dict()\n else:\n lcl_task_updates_info = copy.deepcopy(task_updates_info)\n lcl_task_updates_info[update_time.isoformat()] = task_info\n qury_rslt.TaskUpdates = lcl_task_updates_info\n else:\n db_ses_obj.close()\n raise Exception(\"The task '{} - {} v{}' could not be found - check inputs.\". \\\n format(job_name, task_id, version))\n\n db_ses_obj.commit()\n db_ses_obj.close()", "def set_description(self, desc: str) -> None:\n self.metadata.data[\"description\"] = desc", "def description(self, description) :\n\t\ttry :\n\t\t\tself._description = description\n\t\texcept Exception as e:\n\t\t\traise e", "def on_job_update(_job):\n nonlocal job\n job = _job", "def on_job_update(_job):\n nonlocal job\n job = _job", "def on_job_update(_job):\n nonlocal job\n job = _job", "def on_job_update(_job):\n nonlocal job\n job = _job", "def description(self, description):\n\n self._set_field(\"description\", description)", "def format_description(self, project_name, description):\n description = description if description else ''\n return \"%s %s\" % (project_name, '- ' + description)", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def on_job_update(_job):\n nonlocal job\n job = _job\n # Asserts that job is either pending or canceled.\n assert job.state in ['PENDING', 'CANCELED'], (\n 'job that canceled immediately after submission has wrong '\n 'state `%s`' % job.state\n )", "def on_job_update(_job):\n nonlocal job\n job = _job\n # Asserts that job is either pending or canceled.\n assert job.state in ['PENDING', 'CANCELED'], (\n 'Job that canceled immediately after submission has wrong '\n f'state `{job.state}`!')", "def on_job_update(_job):\n nonlocal job\n job = _job\n # Asserts that job is either pending or canceled.\n assert job.state in ['PENDING', 'CANCELED'], (\n 'Job that canceled immediately after submission has wrong '\n f'state `{job.state}`!')", "def on_job_update(_job):\n nonlocal job\n job = _job\n # Asserts that job is either pending or canceled.\n assert job.state in ['PENDING', 'CANCELED'], (\n 'Job that canceled immediately after submission has wrong '\n f'state `{job.state}`!')", "def add_description(self, desc):\n self.description = desc", "def set_description(desc):\n global last_description\n last_description = desc" ]
[ "0.72318727", "0.60756296", "0.60743475", "0.6020576", "0.5999859", "0.59925103", "0.59728664", "0.5708703", "0.5699203", "0.56206816", "0.55843204", "0.55488455", "0.55029446", "0.548009", "0.5469594", "0.5459312", "0.54585123", "0.5456848", "0.5452424", "0.54208547", "0.5387264", "0.53862894", "0.5376971", "0.5344236", "0.5311184", "0.53074163", "0.5272898", "0.5266738", "0.5258574", "0.52426124", "0.5228408", "0.52200145", "0.52162826", "0.5213882", "0.520929", "0.5199732", "0.51992756", "0.51985145", "0.5197823", "0.51970696", "0.51970696", "0.51970696", "0.51970696", "0.5196171", "0.5187595", "0.5163801", "0.51601183", "0.51529735", "0.5152791", "0.5152791", "0.5152791", "0.5152791", "0.51497936", "0.51467294", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5144789", "0.5137229", "0.51294523", "0.51294523", "0.51294523", "0.5127644", "0.51251036" ]
0.7354544
0
Updates a job build description for the current build. This method is intended to be run in an environment where JOB_NAME and BUILD_NUMBER are set in the environment, such as from within the job build itself. If either of the environment variables is not set, setting the description is not attempted at all.
def set_current_build_description(self, description): job_name = os.environ.get("JOB_NAME", None) build_id = os.environ.get("BUILD_NUMBER", None) if job_name is not None and build_id is not None: self.set_build_description(job_name, build_id, description)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_build_description(self, job_name, build, description):\n try:\n subprocess.check_call(self.cli + [PlatformJenkinsJavaCLI.SET_DESCRIPTION, job_name, build, description])\n except subprocess.CalledProcessError:\n message = \"Setting build description failed (job={0}, build={1}, description='{2}')\".format(job_name,\n build,\n description)\n raise PlatformJenkinsException(message)", "def update_experiment_description(self, experiment_id, description):\n return self.dbclient.update_by_id(Tables.EXPERIMENTS, experiment_id, {\n ExperimentAttr.DESC: description\n })", "def request_description_update():\n global should_update_description\n should_update_description = True", "def update_description(self, host, baseUrl, description):\n self._host = host\n self._urlBase = baseUrl\n self._description = description\n return", "def description(request):\n if request.method != 'POST':\n description = request.issue.description or \"\"\n return HttpTextResponse(description)\n if not request.issue.edit_allowed:\n if not IS_DEV:\n return HttpTextResponse('Login required', status=401)\n issue = request.issue\n issue.description = request.POST.get('description')\n issue.put()\n return HttpTextResponse('')", "def edit_description(self, task, new_description):\n raise ValueError(\"cannot edit description in 'In Progress' status\")", "def set_variable_value():\n\n mp_rp_conf_file = 'entitlement-tests/CCI/ReportPortal/mp_rp_conf.json'\n \n # 1. Set project name which is just the test product name with upper case letter\n cmd = \"sed -i -e 's/PROJECT_NAME/{0}/g' {1}\".format(test_product.upper(), mp_rp_conf_file)\n (ret, output) = commands.getstatusoutput(cmd)\n \n # 2. Set launch name\n # Launch name examples - Errata-49798_RHEL7_Server_x86_64_Full_ProdCDN; Errata-53717_RHEL8_x86_64_Full_ProdCDN\n cmd = \"sed -i -e 's/LAUNCH_NAME/{0}/g' {1}\".format(get_launch_name(), mp_rp_conf_file)\n (ret, output) = commands.getstatusoutput(cmd)\n \n # 3. Set variables value in description of launch\n # a) Set Errata url in description of launch\n errata_url = \"[{0}](https:\\/\\/errata.devel.redhat.com\\/advisory\\/{1})\".format(errata_id, errata_id)\n cmd = \"sed -i -e 's/ERRATA_URL/{0}/g' {1}\".format(errata_url, mp_rp_conf_file)\n (ret, output) = commands.getstatusoutput(cmd)\n \n # b) Set jenkins job url in description of launch\n build_id = build_url.strip('/').split('/')[-1]\n build_url_str = \"[{0}]({1})\".format(build_id, build_url.replace(\"/\",\"\\/\"))\n \n cmd = \"sed -i -e 's/BUILD_URL/{0}/g' {1}\".format(build_url_str, mp_rp_conf_file)\n (ret, output) = commands.getstatusoutput(cmd)\n \n # 4. Set launch tag\n # Tag examples - OpenStack16; Ceph3; CNV2\n cmd = \"cat product_version.txt\"\n (ret, output) = commands.getstatusoutput(cmd)\n \n cmd = \"sed -i -e 's/LAUNCH_TAG/{0}{1}/g' {2}\".format(test_product, output, mp_rp_conf_file)\n (ret, output) = commands.getstatusoutput(cmd)", "def format_description(self, project_name, description):\n description = description if description else ''\n return \"%s %s\" % (project_name, '- ' + description)", "def set_description(desc):\n global last_description\n last_description = desc", "def update(self, job_name, param_name, value, description=None):\n if job_name in self._jobs:\n getattr(self._jobs[job_name], param_name).update(value, description)\n else:\n self.log.error(\"Invalid job name: %s\", job_name)", "def set_description(self, room_description):\n self.description = room_description", "def set_description(self, description):\n self.description = description\n if not self.record:\n return\n self.mdb.results.update({'_id':self.result_id}, \n {'$set':{'test_case':description}})", "def get_job_description(self, job, context=None):\n return self._client.call_method(\n 'UserAndJobState.get_job_description',\n [job], self._service_ver, context)", "def set_description(self, desc: str) -> None:\n self.metadata.data[\"description\"] = desc", "def EventContentMissionExcelAddDescription(builder, Description):\n return AddDescription(builder, Description)", "def update_model_description(\n self, content: Dict[str, Any], model_name: str, is_already_documented: bool = False\n ) -> Dict[str, Any]:\n message = f\"Do you want to write a description for {model_name}\"\n if is_already_documented:\n message = f\"Do you want to change the model description of {model_name}\"\n model_doc_payload: List[Mapping[str, Any]] = [\n {\n \"type\": \"confirm\",\n \"name\": \"wants_to_document_model\",\n \"message\": message,\n \"default\": True,\n },\n {\n \"type\": \"text\",\n \"name\": \"model_description\",\n \"message\": \"Please write down your description:\",\n },\n ]\n user_input = UserInputCollector(\"model\", model_doc_payload).collect()\n if user_input.get(\"model_description\", None):\n for model in content.get(\"models\", []):\n if model[\"name\"] == model_name:\n model[\"description\"] = user_input[\"model_description\"]\n return content", "def buildname(self, env_prop=None):\n if self._buildname is not None:\n return self._buildname\n try:\n platform, build = env_prop['chipName'], env_prop['switchppVersion']\n except (KeyError, TypeError):\n message = 'Cannot determine build name'\n self.class_logger.warning(message)\n self._buildname = self.UNDEFINED_BUILD\n else:\n self.platform = platform\n self.build = build\n name_iter = (MODULES[_var].ReportingServerConfig._get_build_name(self._opts) for _var in # pylint: disable=protected-access\n MODULES if 'reports_conf.' in _var)\n with suppress(StopIteration): # retain build name from env_prop\n build = next(name for name in name_iter if name is not None)\n self._buildname = '{0}-{1}'.format(build, platform)\n\n # WORKAROUND to add 'sanity' suffix to buildname\n if 'sanity' in self._opts.markexpr and self._buildname is not None:\n self._buildname += \"-sanity\"\n # WORKAROUND END\n return self._buildname", "def set_description(urn: str, description: str, md_file: Path) -> None:\n\n if not urn.startswith(\"urn:li:dataProduct:\"):\n urn = f\"urn:li:dataProduct:{urn}\"\n\n if description is None and not md_file:\n click.secho(\n \"Need one of --description or --md-file provided to populate description field\",\n fg=\"red\",\n )\n raise click.Abort()\n\n if description and md_file:\n click.secho(\n \"Need only one of --description or --md-file provided to populate description field. You provided both.\",\n fg=\"red\",\n )\n raise click.Abort()\n\n if md_file:\n logger.info(f\"Opening file {md_file} for populating description\")\n with open(md_file) as fp:\n description = fp.read()\n\n dataproduct_patcher: DataProductPatchBuilder = DataProduct.get_patch_builder(urn)\n dataproduct_patcher.set_description(description)\n with get_default_graph() as graph:\n _abort_if_non_existent_urn(graph, urn, \"set description\")\n for mcp in dataproduct_patcher.build():\n graph.emit(mcp)", "def set_longdescription(self, longdesc):\n self.longdescription(longdesc)", "def long_description(self, long_description):\n self._long_description = long_description", "def SetDescription(self, description):\n self.description = str(description)", "def description(self, description) :\n\t\ttry :\n\t\t\tself._description = description\n\t\texcept Exception as e:\n\t\t\traise e", "def generate_description(jira_key: str, comment: str) -> str:\n if jira_key in comment:\n return comment\n else:\n return '{}: {}'.format(jira_key, comment)", "def _append_descriptions(self, issue, dep_name, dep_latest_version):\n logging.info(\"Updating JIRA issue {0} to track {1} upgrade process\".format(\n issue.key,\n dep_name))\n description = issue.fields.description + \"\"\"\\n\\n{0}\\n\n Please review and upgrade the {1} to the latest version {2} \\n \n cc: \"\"\".format(\n datetime.today(),\n dep_name,\n dep_latest_version\n )\n _, owners = self._find_owners(dep_name)\n for owner in owners:\n description += \"[~{0}], \".format(owner)\n try:\n self.jira.update_issue(issue, description=description)\n except Exception as e:\n traceback.print_exc()\n logging.error(\"Failed updating issue: \"+ str(e))", "def set_description(self, description):\r\n self.__description = description", "def set_description(self, description):\n self.description = description", "def set_desc(self, item_desc):\r\n self.description = item_desc", "def setDescription(self, description):\n url = G.api + self.testId + '/snapshots/' + self.hash\n self.info = requests.put(url, auth=(G.username, G.authkey), data={'description':description})", "async def slashtag_edit_description(\n self, ctx: commands.Context, tag: GuildTagConverter, *, description: str\n ):\n await ctx.send(await tag.edit_description(description))", "def description(self, new_description):\r\n self.set({\"description\": new_description})", "def long_description(self, long_description: str):\n\n self._long_description = long_description", "def UpdateReport(report_dict, builder, test, report_date, build_link,\n test_summary, board, color):\n\n if 'date' not in report_dict:\n report_dict['date'] = [report_date]\n elif report_date not in report_dict['date']:\n # It is possible that some of the builders started/finished on different\n # days, so we allow for multiple dates in the reports.\n report_dict['date'].append(report_date)\n\n build_key = ''\n if builder == GCC_ROTATING_BUILDER:\n build_key = '%s-gcc-toolchain' % board\n elif builder == LLVM_ROTATING_BUILDER:\n build_key = '%s-llvm-next-toolchain' % board\n else:\n build_key = builder\n\n if build_key not in report_dict.keys():\n build_dict = dict()\n else:\n build_dict = report_dict[build_key]\n\n if 'build_link' not in build_dict:\n build_dict['build_link'] = build_link\n\n if 'date' not in build_dict:\n build_dict['date'] = report_date\n\n if 'board' in build_dict and build_dict['board'] != board:\n raise RuntimeError('Error: Two different boards (%s,%s) in one build (%s)!'\n % (board, build_dict['board'], build_link))\n build_dict['board'] = board\n\n color_key = '%s-color' % test\n build_dict[color_key] = color\n\n # Check to see if we already have a build status for this build_key\n status = ''\n if 'build_status' in build_dict.keys():\n # Use current build_status, unless current test failed (see below).\n status = build_dict['build_status']\n\n if not test_summary:\n # Current test data was not available, so something was bad with build.\n build_dict['build_status'] = 'bad'\n build_dict[test] = '[ no data ]'\n else:\n build_dict[test] = test_summary\n if not status:\n # Current test ok; no other data, so assume build was ok.\n build_dict['build_status'] = 'ok'\n\n report_dict[build_key] = build_dict", "def set_description(self, description):\n self.__description = description", "def set_description(self, sNewDescription):\n\t\tcall_sdk_function('PrlVmDev_SetDescription', self.handle, sNewDescription)", "def update_job_name(self, job_name):\n if strtobool(os.getenv(\"TP_UPDATE_JOB_NAME\")):\n logging.info(f\"Updating job name to: {job_name}\")\n try:\n response = self.send_request(\n \"PUT\",\n urljoin(self._remote_address, Endpoint.DevelopmentSession.value),\n {\"jobName\": job_name},\n )\n if not response.passed:\n logging.error(\"Failed to update job name\")\n except requests.exceptions.RequestException:\n logging.error(\"Failed to update job name\")", "def build_description(self):\n return self._build_description", "def description(self, description):\n if self.local_vars_configuration.client_side_validation and description is None: # noqa: E501\n raise ValueError(\"Invalid value for `description`, must not be `None`\") # noqa: E501\n\n self._description = description", "def description(self, description):\n if self.local_vars_configuration.client_side_validation and description is None: # noqa: E501\n raise ValueError(\"Invalid value for `description`, must not be `None`\") # noqa: E501\n\n self._description = description", "async def _forcedescription(self, ctx, *args):\n if len(args) < 2:\n await ctx.send(\"Include both a name and a description!\")\n return\n\n god = database.getGodName(args[0], ctx.guild.id)\n\n if god:\n desc = \"\"\n i = 1\n for arg in args:\n if i > 1:\n desc = desc + \" \" + arg\n i += 1\n desc.strip()\n\n if len(desc) > 100:\n await ctx.send(\"Keep the description under 100 chars, please.\")\n return\n\n database.setDesc(god.ID, desc)\n await ctx.send(\"Description set successfully!\")\n else:\n await ctx.send(\"No God found by that name!\")", "def test_update_task_description(\n self,\n mock_config_load,\n mock_custom_objects_api,\n mock_core_v1_api,\n ):\n task_id = util.MOCK_UUID_5\n rv = TEST_CLIENT.patch(\n f\"/tasks/{task_id}\",\n json={\n \"description\": \"valid description\",\n },\n )\n\n self.assertEqual(rv.status_code, 200)", "def update_job(self, job):\n call = subprocess.Popen(self.cli + [PlatformJenkinsJavaCLI.UPDATE_JOB, job.name], stdin=subprocess.PIPE)\n call.communicate(input=platform_ci.jjb.get_job_as_xml(job, self.template_dir))\n call.wait()\n if call.returncode != 0:\n raise PlatformJenkinsException(\"Updating job failed: \" + job.name)", "def description(self, description):\n self._description = description", "def description(self, description):\n self._description = description", "def description(self, description):\n self._description = description", "def description(self, description):\n self._description = description", "def description(self, newDescription=None):\n pass", "def update_info(self, *args, **kwargs):\n # Create the layout with the information\n self.info_widgets_list = [\n urwid.Text('ID: {}'.format(self.job.id)),\n urwid.Divider('='),\n urwid.Text('Command: {}'.format(self.job.command)),\n urwid.Text('Status: {}'.format(self.job.status))\n ]\n\n if self.job.status == JobStatus.FAILED: # If job has failed add error reason (if available)\n if 'Error reason' in self.job.metadata:\n self.info_widgets_list.append(urwid.Text('Possible error reason: {}'.format(self.job.metadata['Error reason'])))\n\n # Add button with the option available depending on the job status\n if self.job.status in [JobStatus.DONE, JobStatus.FAILED]:\n self.info_widgets_list.append(urwid.Padding(JobResubmitWidget(self.job, callback=self.resubmit), align='center', left=4, right=2))\n self.info_widgets_list.append(urwid.Divider('-'))\n elif self.job.status != JobStatus.UNSUBMITTED:\n self.info_widgets_list.append(create_button('Kill', self.terminate))\n self.info_widgets_list.append(urwid.Divider('-'))\n\n self.metadata_widgets_list = []\n self.metadata_widgets_list.append(urwid.Text('Retries: {}'.format(self.job.retries)))\n self.metadata_widgets_list.append(urwid.Divider())\n # Add resources requested by the job\n requested_resources = 'Specific requested resources:\\n'\n requested_resources += ' '+str(self.job.params).replace('\\n', '\\n ')\n self.metadata_widgets_list.append(urwid.Text(requested_resources))\n\n # If usage information is available, display it\n if 'usage' in self.job.metadata:\n self.metadata_widgets_list.append(urwid.Divider())\n used_resources = 'Used resources:\\n'\n used_resources += \"\\n\".join([\" {} = {}\".format(k, v) for k, v in self.job.metadata['usage'].items()])\n self.metadata_widgets_list.append(urwid.Text(used_resources))\n\n self.file_widgets_list = [] # Reset files widget\n # Create widget with the files if the job has failed\n if self.job.status == JobStatus.FAILED:\n # Generate wigets with stdout and stderr if available. Done here because Failed state is \"absolute\"=\n stdout_widget = self._load_file_as_widget(self.job.f_stdout, 'stdout')\n if stdout_widget is not None:\n self.file_widgets_list.append(stdout_widget)\n self.file_widgets_list.append(urwid.Divider('*'))\n stderr_widget = self._load_file_as_widget(self.job.f_stderr, 'stderr')\n if stderr_widget is not None:\n self.file_widgets_list.append(stderr_widget)\n self.file_widgets_list.append(urwid.Divider('*'))", "def set_description(self, descr):\n self._current_test_descr = descr", "def description(self, newDescription=None):\n if newDescription != None:\n self._setValue('description', newDescription)\n return self._getValue('description')", "def testDescription(self):\n project = self.session.create_project()\n\n self.util.stringTypeTest(self, project, \"description\")\n\n self.util.stringPropertyTest(self, project, \"description\")", "def set_description(self, description):\n self._description = description", "def description(self, description):\n if (self.local_vars_configuration.client_side_validation and\n description is not None and len(description) > 200):\n raise ValueError(\"Invalid value for `description`, length must be less than or equal to `200`\") # noqa: E501\n\n self._description = description", "def set_description(self):\n if 'description' not in self.data:\n if self.verbose:\n click.echo('Adding empty descriptions to root')\n self.data['description'] = ''", "def _set_description(self):\n if self._report_key == ReportTypes.MHR_REGISTRATION:\n description = self._report_data.get('description')\n if description and description.get('rebuiltRemarks'):\n description['rebuiltRemarks'] = markupsafe.Markup(description['rebuiltRemarks'])\n if description and description.get('otherRemarks'):\n description['otherRemarks'] = markupsafe.Markup(description['otherRemarks'])", "def _build_create_job_definition_request(\n self,\n monitoring_schedule_name,\n job_definition_name,\n image_uri,\n latest_baselining_job_name=None,\n latest_baselining_job_config=None,\n existing_job_desc=None,\n endpoint_input=None,\n ground_truth_input=None,\n analysis_config=None,\n output_s3_uri=None,\n constraints=None,\n enable_cloudwatch_metrics=None,\n role=None,\n instance_count=None,\n instance_type=None,\n volume_size_in_gb=None,\n volume_kms_key=None,\n output_kms_key=None,\n max_runtime_in_seconds=None,\n env=None,\n tags=None,\n network_config=None,\n batch_transform_input=None,\n ):\n if existing_job_desc is not None:\n app_specification = existing_job_desc[\n \"{}AppSpecification\".format(self.monitoring_type())\n ]\n baseline_config = existing_job_desc.get(\n \"{}BaselineConfig\".format(self.monitoring_type()), {}\n )\n job_input = existing_job_desc[\"{}JobInput\".format(self.monitoring_type())]\n job_output = existing_job_desc[\"{}JobOutputConfig\".format(self.monitoring_type())]\n cluster_config = existing_job_desc[\"JobResources\"][\"ClusterConfig\"]\n if role is None:\n role = existing_job_desc[\"RoleArn\"]\n existing_network_config = existing_job_desc.get(\"NetworkConfig\")\n stop_condition = existing_job_desc.get(\"StoppingCondition\", {})\n else:\n app_specification = {}\n baseline_config = {}\n job_input = {}\n job_output = {}\n cluster_config = {}\n existing_network_config = None\n stop_condition = {}\n\n # job output\n if output_s3_uri is not None:\n normalized_monitoring_output = self._normalize_monitoring_output(\n monitoring_schedule_name, output_s3_uri\n )\n job_output[\"MonitoringOutputs\"] = [normalized_monitoring_output._to_request_dict()]\n if output_kms_key is not None:\n job_output[\"KmsKeyId\"] = output_kms_key\n\n # app specification\n if analysis_config is None:\n if latest_baselining_job_config is not None:\n analysis_config = latest_baselining_job_config.analysis_config\n elif app_specification:\n analysis_config = app_specification[\"ConfigUri\"]\n else:\n raise ValueError(\"analysis_config is mandatory.\")\n # backfill analysis_config\n if isinstance(analysis_config, str):\n analysis_config_uri = analysis_config\n else:\n analysis_config_uri = self._upload_analysis_config(\n analysis_config._to_dict(), output_s3_uri, job_definition_name, output_kms_key\n )\n app_specification[\"ConfigUri\"] = analysis_config_uri\n app_specification[\"ImageUri\"] = image_uri\n normalized_env = self._generate_env_map(\n env=env, enable_cloudwatch_metrics=enable_cloudwatch_metrics\n )\n if normalized_env:\n app_specification[\"Environment\"] = normalized_env\n\n # baseline config\n if constraints:\n # noinspection PyTypeChecker\n _, constraints_object = self._get_baseline_files(\n statistics=None, constraints=constraints, sagemaker_session=self.sagemaker_session\n )\n constraints_s3_uri = None\n if constraints_object is not None:\n constraints_s3_uri = constraints_object.file_s3_uri\n baseline_config[\"ConstraintsResource\"] = dict(S3Uri=constraints_s3_uri)\n elif latest_baselining_job_name:\n baseline_config[\"BaseliningJobName\"] = latest_baselining_job_name\n\n # job input\n if endpoint_input is not None:\n normalized_endpoint_input = self._normalize_endpoint_input(\n endpoint_input=endpoint_input\n )\n # backfill attributes to endpoint input\n if latest_baselining_job_config is not None:\n if normalized_endpoint_input.features_attribute is None:\n normalized_endpoint_input.features_attribute = (\n latest_baselining_job_config.features_attribute\n )\n if normalized_endpoint_input.inference_attribute is None:\n normalized_endpoint_input.inference_attribute = (\n latest_baselining_job_config.inference_attribute\n )\n if normalized_endpoint_input.probability_attribute is None:\n normalized_endpoint_input.probability_attribute = (\n latest_baselining_job_config.probability_attribute\n )\n if normalized_endpoint_input.probability_threshold_attribute is None:\n normalized_endpoint_input.probability_threshold_attribute = (\n latest_baselining_job_config.probability_threshold_attribute\n )\n job_input = normalized_endpoint_input._to_request_dict()\n elif batch_transform_input is not None:\n # backfill attributes to batch transform input\n if latest_baselining_job_config is not None:\n if batch_transform_input.features_attribute is None:\n batch_transform_input.features_attribute = (\n latest_baselining_job_config.features_attribute\n )\n if batch_transform_input.inference_attribute is None:\n batch_transform_input.inference_attribute = (\n latest_baselining_job_config.inference_attribute\n )\n if batch_transform_input.probability_attribute is None:\n batch_transform_input.probability_attribute = (\n latest_baselining_job_config.probability_attribute\n )\n if batch_transform_input.probability_threshold_attribute is None:\n batch_transform_input.probability_threshold_attribute = (\n latest_baselining_job_config.probability_threshold_attribute\n )\n job_input = batch_transform_input._to_request_dict()\n\n if ground_truth_input is not None:\n job_input[\"GroundTruthS3Input\"] = dict(S3Uri=ground_truth_input)\n\n # cluster config\n if instance_count is not None:\n cluster_config[\"InstanceCount\"] = instance_count\n if instance_type is not None:\n cluster_config[\"InstanceType\"] = instance_type\n if volume_size_in_gb is not None:\n cluster_config[\"VolumeSizeInGB\"] = volume_size_in_gb\n if volume_kms_key is not None:\n cluster_config[\"VolumeKmsKeyId\"] = volume_kms_key\n\n # stop condition\n if max_runtime_in_seconds is not None:\n stop_condition[\"MaxRuntimeInSeconds\"] = max_runtime_in_seconds\n\n request_dict = {\n \"JobDefinitionName\": job_definition_name,\n \"{}AppSpecification\".format(self.monitoring_type()): app_specification,\n \"{}JobInput\".format(self.monitoring_type()): job_input,\n \"{}JobOutputConfig\".format(self.monitoring_type()): job_output,\n \"JobResources\": dict(ClusterConfig=cluster_config),\n \"RoleArn\": self.sagemaker_session.expand_role(role),\n }\n\n if baseline_config:\n request_dict[\"{}BaselineConfig\".format(self.monitoring_type())] = baseline_config\n\n if network_config is not None:\n network_config_dict = network_config._to_request_dict()\n request_dict[\"NetworkConfig\"] = network_config_dict\n elif existing_network_config is not None:\n request_dict[\"NetworkConfig\"] = existing_network_config\n\n if stop_condition:\n request_dict[\"StoppingCondition\"] = stop_condition\n\n if tags is not None:\n request_dict[\"Tags\"] = tags\n\n return request_dict", "def long_description(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"long_description\")", "def description(self, description):\n\n self._set_field(\"description\", description)", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, description):\n\n self._description = description", "def description(self, value):\n self._update_values('description', value)", "def form_payload(build_number, job_name, build_url, status):\n message = \"Build #{} {} for {}\".format(build_number, status, job_name)\n description = \"Build #{} {} for {}. \\nPlease check detailed logs here: {}console\".format(build_number, status, job_name, build_url)\n \n branch_name = \"\"\n # Check optional env variable\n if \"BRANCH_NAME\" in os.environ:\n branch_name = os.environ['BRANCH_NAME']\n\n payload_rep = {\"message\" : message , \"description\" : description, \"branch_name\" : branch_name,\n \"build_url\": build_url, \"job_name\": job_name, \"build_number\": build_number, \"node_name\": os.environ['NODE_NAME'],\n \"status\" : status, \"event_id\" : job_name}\n return payload_rep", "def submission_update_description(request, submission_pk):\n try:\n submission = models.CompetitionSubmission.objects.get(pk=submission_pk)\n if submission.participant.user != request.user:\n raise Http404()\n submission.description = request.POST.get('updated_description')\n submission.save()\n return HttpResponse()\n except models.CompetitionSubmission.DoesNotExist:\n raise Http404()" ]
[ "0.77581465", "0.58388937", "0.55399406", "0.5432845", "0.5427102", "0.5406418", "0.5297104", "0.524192", "0.5240581", "0.5238138", "0.519109", "0.51833254", "0.5152861", "0.5104744", "0.5102506", "0.50946575", "0.50842863", "0.50842136", "0.5079061", "0.50598854", "0.5059168", "0.50519663", "0.5014469", "0.50041556", "0.49860138", "0.4979509", "0.49547186", "0.49539742", "0.49522904", "0.49368942", "0.4920609", "0.49203166", "0.49094254", "0.48865017", "0.4883258", "0.48652998", "0.4858247", "0.4858247", "0.48553568", "0.4849947", "0.4841815", "0.4839052", "0.4839052", "0.4839052", "0.4839052", "0.4831719", "0.4831149", "0.48194075", "0.4815051", "0.48117727", "0.48082379", "0.47922057", "0.47883278", "0.47678974", "0.47654557", "0.4765013", "0.47602275", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.4729335", "0.47238496", "0.4713004", "0.47127607" ]
0.8271981
0
DZ Scrape companies descriptions. sync
def scrape_descriptions_sync(): # прочитать Symbols, for symbol in tqdm(symbols) # исользовать urllib get запросы на yahoo и полученное записывать в файл с помощью # добавить tqdm(symbols) myheader = { 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.142 Safari/537.36' } symbols = read_symbols() YAHOO_HTMLS.mkdir(parents=True, exist_ok=True) for symbol in tqdm(symbols): #Example myurl = "https://finance.yahoo.com/quote/AAPL/profile?p=AAPL" myurl = f'https://finance.yahoo.com/quote/{symbol}/profile?p={symbol}' try: req = request.Request(myurl, headers=myheader) response = request.urlopen(req) text = response.read() response.close() except Exception: print("Error occuried during web request!!") print(sys.exc_info()[1]) f = open(YAHOO_HTMLS / f'{symbol}.html', 'wb') f.write(text) f.close()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def scrap_data_companies(self):\n list_job_offers = self.driver.find_elements_by_class_name(\n \"jobContainer\")\n jobs = []\n if len(list_job_offers) == 0:\n print(\"There is nothing to scrap for \", conf.URL_TO_SCRAPE,\n \"that was requested\")\n return\n\n for i, elt in enumerate(list_job_offers):\n\n self.remove_sign_up_prompt()\n self.remove_recommended_jobs()\n html_job_container = elt.get_attribute('innerHTML')\n time.sleep(2)\n name_company = get_name_company(elt.text)\n city_job = get_city_job(html_job_container)\n job_id = get_job_id(html_job_container)\n position_job = get_position(html_job_container)\n job_description = get_summary_job(position_job)\n\n if job_id is not None and name_company is not None:\n company = Company.Company(name_company)\n company_and_id_job = name_company + \"-\" + job_id\n self.current_path = os.path.join(self.date_path,\n company_and_id_job)\n os.mkdir(self.current_path)\n\n if i != 0:\n click_on_job_offer(\n elt) # link since we are already seeing it\n\n self.scrape_data_company(elt, company)\n company_id = company.insert_to_db(self.db_connection)\n job = JobOffer.JobOffer(job_id, company=company, city=city_job,\n position=position_job,\n description=job_description)\n job.insert_to_db(company_id, self.db_connection)\n jobs.append(job)\n print(job)\n else:\n logger.error(\"Job Id not found\")\n JobOffer.print_jobs(jobs)", "def __getCompaniesData(self, schema):\n try:\n self.cursor.execute(\"\"\"SELECT id, twitter, proven_score, slug FROM {schema}.vendors_vendor WHERE\n twitter <> ''\"\"\".format(schema=schema))\n data = self.cursor.fetchall()\n\n companies = []\n for entry in data:\n self.cursor.execute('SELECT location_id FROM {schema}.vendors_vendorlocation WHERE vendor_id = {vendor}'.format(schema=schema, vendor=entry[0]))\n cities = self.cursor.fetchall()\n\n if cities is None:\n continue\n\n city = ''\n\n for cityId in cities:\n self.cursor.execute('SELECT city FROM {schema}.locations_location WHERE id = {city}'.format(schema=schema, city=cityId[0]))\n cityName = self.cursor.fetchone()\n\n if cityName is not None:\n city += cityName[0]\n\n self.cursor.execute('SELECT category_id, rank FROM {schema}.vendors_vendorcustomkind WHERE vendor_id = {vendor} AND \"primary\" is true'.format(schema=schema, vendor=entry[0]))\n customKind = self.cursor.fetchone()\n\n if customKind is None:\n catId = rank = None\n else:\n catId, rank = customKind\n\n if catId is not None:\n self.cursor.execute('SELECT name, slug FROM {schema}.categories_category WHERE id = {cat}'.format(schema=schema, cat=catId))\n catData = self.cursor.fetchone()\n else:\n catData = None\n\n companies.append(DBItemCompany(\n _id = entry[0],\n tweeter = entry[1],\n category = catData[0] if catData is not None else None,\n categoryUrl = self.__buildCategoryUrl(catId, schema) if catId is not None else None,\n provenScore = entry[2],\n ranking = rank,\n location = city,\n url = self.__buildProfileUrl(catData[1], entry[3], schema) if catData is not None else self.__buildProfileUrlWOCategory(entry[3], schema),\n categoryId = catId\n ))\n\n self.__companies[schema] = companies\n\n except psycopg2.DatabaseError as err:\n raise DBException(err.args[0])", "def scrape_BI(url):\n response = requests.get(url)\n soup = BeautifulSoup(response.text)\n companies = soup.find_all('h3', class_='slide-title')\n #names = []\n driver = init_driver()\n for company in companies[:]:\n name = company.getText().strip()\n # if \" \" in name:\n # name.replace(' ','+')\n html_code = load_google(driver, name)\n #name, address = scrape_google(html_code)\n url = scrape_google(html_code)\n print(name,url)\n #names.append(name)\n driver.quit()\n #print(names)", "def test_website_companies_get_details(self):\n pass", "def _scrape(self):", "def company_v1(request):\n domain = request.GET.get(\"domain\")\n output_data = {}\n feed_company_from_db1_is_ok = 0\n\n if domain:\n\n domain = domain.lower()\n if domain.startswith(\"www.\"):\n domain = domain.replace(\"www.\", \"\")\n\n try:\n\n output_data = feed_company_from_db1(output_data, domain)\n feed_company_from_db1_is_ok = 1\n\n try:\n\n output_data = company_addition_from_db2(output_data, domain)\n\n except IndexError:\n\n if not feed_company_from_db1_is_ok:\n\n try:\n\n output_data = feed_company_from_clearbit(\n output_data=output_data,\n domain=domain\n )\n\n except IndexError:\n\n try:\n\n clearbit_company = clearbit_api.get_company(domain)\n\n except:\n # Not sure which exceptions I could get from\n # Clearbit's Python lib.\n # I know I could get a KeyError if I'm trying\n # to access a json field that Clearbit put in\n # his docs but forgets to put in the response\n # (actually not anymore because I'm retrieving\n # the dict values with .get() now).\n # But I don't know which error it would give me\n # if api call gives me an error like a http 500\n # error.\n # Sometimes if Clearbit does not find a company\n # it raises a 422 http error (validation error\n # which should only happend for malformed domain\n # names) instead of just returning none...\n\n api_access_logging(\n request,\n \"company\",\n domain,\n \"404\",\n \"2\",\n \"1\"\n )\n return Response(\n {\n \"error_code\": \"2\",\n \"detail\": errors_for_customers[\"2\"]\n },\n status=status.HTTP_404_NOT_FOUND\n )\n\n if clearbit_company:\n\n output_data = feed_company_from_clearbit(\n output_data=output_data,\n cbcompany=clearbit_company\n )\n\n else:\n\n api_access_logging(\n request,\n \"company\",\n domain,\n \"404\",\n \"2\",\n \"1\"\n )\n return Response(\n {\n \"error_code\": \"2\",\n \"detail\": errors_for_customers[\"2\"]\n },\n status=status.HTTP_404_NOT_FOUND\n )\n\n except IndexError:\n\n try:\n\n output_data = feed_company_from_clearbit(\n output_data=output_data,\n domain=domain\n )\n\n except IndexError:\n\n try:\n\n clearbit_company = clearbit_api.get_company(domain)\n\n except:\n\n api_access_logging(\n request,\n \"company\",\n domain,\n \"404\",\n \"2\",\n \"1\"\n )\n return Response(\n {\n \"error_code\": \"2\",\n \"detail\": errors_for_customers[\"2\"]\n },\n status=status.HTTP_404_NOT_FOUND\n )\n\n if clearbit_company:\n\n output_data = feed_company_from_clearbit(\n output_data=output_data,\n cbcompany=clearbit_company\n )\n\n else:\n\n api_access_logging(\n request,\n \"company\",\n domain,\n \"404\",\n \"2\",\n \"1\"\n )\n return Response(\n {\n \"error_code\": \"2\",\n \"detail\": errors_for_customers[\"2\"]\n },\n status=status.HTTP_404_NOT_FOUND\n )\n\n else:\n\n api_access_logging(\n request,\n \"company\",\n domain,\n \"400\",\n \"3\",\n \"1\"\n )\n return Response(\n {\n \"error_code\": \"3\",\n \"detail\": errors_for_customers[\"3\"]\n },\n status=status.HTTP_400_BAD_REQUEST\n )\n\n # Check that user plan allows this request.\n if not customer_throttling_checked(request, domain):\n\n api_access_logging(\n request,\n \"company\",\n domain,\n \"402\",\n \"5\",\n \"1\"\n )\n return Response(\n {\n \"error_code\": \"5\",\n \"detail\": errors_for_customers[\"5\"]\n },\n status=status.HTTP_402_PAYMENT_REQUIRED\n )\n\n api_access_logging(\n request,\n \"company\",\n domain,\n \"200\",\n None,\n \"1\"\n )\n return Response(output_data)", "def companies():\n res = requests.get('http://0.0.0.0:5002/companies')\n return jsonify(res.json())", "def fill(self):\n if self.source == 'citations':\n url = self._scholarly.URLS(\"CITATIONPUB\").format(self.id_citations)\n soup = self._scholarly._get_soup(\n self._scholarly.URLS('HOST').format(url))\n self.bib['title'] = soup.find('div', id='gsc_vcd_title').text\n\n if soup.find('a', class_='gsc_vcd_title_link'):\n self.bib['url'] = soup.find(\n 'a', class_='gsc_vcd_title_link')['href']\n\n for item in soup.find_all('div', class_='gs_scl'):\n key = item.find(class_='gsc_vcd_field').text\n val = item.find(class_='gsc_vcd_value')\n if key == 'Authors':\n self.bib['author'] = ' and '.join(self.get_authorlist(val))\n elif key == 'Journal':\n self.bib['journal'] = val.text\n elif key == 'Volume':\n self.bib['volume'] = val.text\n elif key == 'Issue':\n self.bib['number'] = val.text\n elif key == 'Pages':\n self.bib['pages'] = val.text\n elif key == 'Publisher':\n self.bib['publisher'] = val.text\n elif key == 'Publication date':\n self.bib['year'] = arrow.get(val.text).year\n elif key == 'Description':\n if val.text[0:8].lower() == 'abstract':\n val = val.text[9:].strip()\n self.bib['abstract'] = val\n elif key == 'Total citations':\n self.id_scholarcitedby = re.findall(\n self._scholarly.URLS('SCHOLARPUBRE'), val.a['href'])[0]\n\n # number of citation per year\n years = [int(y.text) for y in soup.find_all(class_='gsc_vcd_g_t')]\n cites = [int(c.text) for c in soup.find_all(class_='gsc_vcd_g_al')]\n self.cites_per_year = dict(zip(years, cites))\n\n if soup.find('div', class_='gsc_vcd_title_ggi'):\n self.bib['eprint'] = soup.find(\n 'div', class_='gsc_vcd_title_ggi').a['href']\n self._filled = True\n\n elif self.source == 'scholar':\n self.bib['add_to_lib'] = self.url_add_sclib\n\n try:\n bibtex = self._scholarly._get_soup(self.url_scholarbib)\n bibtex = bibtex.find('pre').string\n self.bib.update(bibtexparser.loads(bibtex).entries[0])\n self.bib['author_count'] = str(\n len(self.bib['author'].split('and')))\n self.bib['age'] = str(\n int(date.today().year) - int(self.bib['year']))\n except:\n # did not find year\n pass\n\n self._filled = True\n return self", "def search_descriptions(companies, main_company, top_count=5):\n stemmer = SnowballStemmer(\"english\")\n main_company_desc = strip_clean_stem_description(main_company.desc, stemmer)\n distances = {\"name\":[], \"distance\":[]}\n\n # If description is basically nothing\n if (\"undisclosed\" in main_company.desc) or (\"stealth\" in main_company.desc):\n # No keywords either? No search can be done\n if pd.isnull(main_company.keywords):\n return None\n\n # No description but had keywords? I can use that\n else:\n return list(companies.sort_values(by=\"match_fraction\", \n ascending=0)[:top_count][\"name\"])\n\n else:\n for ii in range(len(companies)):\n company = companies.iloc[ii]\n company_desc = strip_clean_stem_description(company.desc, stemmer)\n # Getting cosine distance between two descriptions\n dist = calculate_cosine_dist(main_company_desc, company_desc)\n distances[\"name\"].append(company[\"name\"])\n # Synonyms shouldn't be as important as straight keywords.\n weight = company[\"match_fraction\"] + 0.5 * company[\"syn_match_frac\"]\n distances[\"distance\"].append(dist * weight)\n\n # the higher the distance, the better the match\n distances = pd.DataFrame(distances).sort_values(by=\"distance\", \n ascending=0)\n if len(distances) > top_count:\n return list(distances[:top_count][\"name\"])\n\n else:\n return list(distances[\"name\"])", "def scrape(self):\n pass", "def scrape_data_company(self, elt, company):\n\n try:\n self.wait_job_loading(elt)\n self.remove_sign_up_prompt()\n self.remove_recommended_jobs()\n tabs_category = self.driver.find_element_by_class_name(\n \"scrollableTabs\")\n tabs_category = tabs_category.find_elements_by_class_name(\"tab\")\n self.get_data_from_tabs(company, tabs_category)\n\n except TimeoutError:\n logger.error(\"Timeout was reached and data was not loaded\")\n except StaleElementReferenceException as err:\n logger.error(\"Trying to click on a stale element \", err)", "def get_data_from_individual_company_pages(soup):\n individual_company_data = []\n usd_roe = get_usd_roe()\n company_code = (\n soup.find(\"meta\", {\"name\": \"description\"}).get(\"content\").split(\":\")[0]\n )\n current_price_usd = float(\n soup.find(\"span\", {\"class\": \"price-section__current-value\"}).text.replace(\n \",\", \"\"\n )\n )\n current_price = round(current_price_usd * usd_roe)\n try:\n p_e_ratio = float(\n soup.find(\n \"div\", {\"class\": \"snapshot__header\"}, string=\"P/E Ratio\"\n ).previous_sibling.replace(\",\", \"\")\n )\n except AttributeError:\n p_e_ratio = 0\n\n try:\n week_52_low = float(\n soup.find(\"div\", {\"class\": \"snapshot__header\"}, string=\"52 Week Low\")\n .previous_sibling.strip()\n .replace(\",\", \"\")\n )\n except AttributeError:\n week_52_low = 1\n\n try:\n week_52_high = float(\n soup.find(\"div\", {\"class\": \"snapshot__header\"}, string=\"52 Week High\")\n .previous_sibling.strip()\n .replace(\",\", \"\")\n )\n except AttributeError:\n week_52_high = 0\n\n unreal_profit_per_year_percent = round((week_52_high / week_52_low - 1) * 100, 2)\n\n individual_company_data.append(\n [company_code, current_price, p_e_ratio, unreal_profit_per_year_percent]\n )\n\n company_df = pd.DataFrame(\n columns=[\"company_code\", \"current_price\", \"P_E\", \"potential_profit_percent\"]\n )\n company_df = company_df.append(\n {\n \"company_code\": company_code,\n \"current_price\": current_price,\n \"P_E\": p_e_ratio,\n \"potential_profit_percent\": unreal_profit_per_year_percent,\n },\n ignore_index=True,\n )\n\n return company_df", "def collect_web_sites(self):\n min_distance = None\n max_sequence_match = None\n index_string_match = index_distance = None\n self.collection = collections.OrderedDict()\n for i_web, web_row in self.company_urls_df.iterrows():\n # get the url first from the websites table which list all the urls belonging to\n # one kvk search\n url = web_row[URL_KEY]\n\n # skip all none uls and also the filtered urls\n if url is None or url == \"\":\n logger.debug(\"Skipping url because it is None or empty\")\n continue\n if self.filter_urls and url not in self.filter_urls:\n logger.debug(f\"filter urls is given so skip {url}\")\n continue\n\n # store a list of UrlInfo object with a minimum info the url which was tested\n url_info = UrlInfo(index=i_web, url=url)\n self.collection[url] = url_info\n\n print_banner(f\"Processing {url}\")\n\n # quick check if we can processes this url based on the country code\n suffix = url_info.url_extract.suffix\n if suffix in self.exclude_extensions.index:\n url_info.outside_nl = True\n logger.info(f\"Web site {url} has suffix '.{suffix}' Continue \")\n\n # get the processing time of the last time you did this url from the table\n try:\n processing_time = self.urls_df.loc[url, DATETIME_KEY]\n except KeyError:\n processing_time = None\n\n if self.force_process or self.rescan_missing_urls:\n url_info.needs_update = True\n else:\n url_info.needs_update = check_if_url_needs_update(processing_time=processing_time,\n current_time=self.current_time,\n older_time=self.older_time)\n if url_info.needs_update:\n # if the url needs update, store the current time\n url_info.processing_time = self.current_time\n else:\n url_info.processing_time = processing_time\n\n url_analyse = self.scrape_url_and_store_in_dataframes(url, url_info)\n\n url_info.url_analyse = url_analyse\n\n if url_analyse and not url_analyse.exists:\n self.logger.debug(f\"url '{url}'' does not exist\")\n continue\n\n # based on the company postcodes and kvknummer and web contents, make a ranking how\n # good the web sides matches the company\n match = UrlCompanyRanking(url, self.company_name_small,\n url_extract=url_info.url_extract,\n url_analyse=url_analyse,\n company_kvk_nummer=self.kvk_nr,\n company_postcodes=self.postcodes,\n threshold_string_match=self.threshold_string_match,\n threshold_distance=self.threshold_distance,\n logger=self.logger)\n\n url_info.match = match\n\n # update the min max\n if min_distance is None or match.distance < min_distance:\n index_distance = i_web\n min_distance = match.distance\n\n if max_sequence_match is None or match.string_match > max_sequence_match:\n index_string_match = i_web\n max_sequence_match = match.string_match\n\n self.logger.debug(\" * {} - {} - {}\".format(url, match.ext.domain,\n match.distance))\n\n if min_distance is None:\n self.company_urls_df = None\n elif index_string_match != index_distance:\n self.logger.warning(\n \"Found minimal distance for {}: {}\\nwhich differs from \"\n \"best string match {}: {}\".format(index_distance,\n self.collection[url].url,\n index_string_match,\n self.collection[url].url))", "def retrieve_company_data(self):\n self.set_stock_sym_append_str('')\n self.set_stock_retrieval_type('all') #'all', watcher\n self.load_stock_symbol_fr_file()", "def feed_company_from_db1(output_data, domain):\n companyfl = CompanyFromdb1.objects.using('db1').filter(\n company_domain__iexact=domain,\n active=1\n )[0]\n\n if companyfl.company_name:\n output_data['name'] = companyfl.company_name\n\n if companyfl.company_phone:\n output_data['phone'] = companyfl.company_phone\n\n if companyfl.company_website:\n analyzed_url = urlparse(companyfl.company_website)\n if analyzed_url.netloc and analyzed_url.scheme:\n website_url = \"%s://%s\" % (\n analyzed_url.scheme,\n analyzed_url.netloc\n )\n elif analyzed_url.netloc and not analyzed_url.scheme:\n website_url = analyzed_url.netloc\n else:\n website_url = analyzed_url.path\n output_data['website_url'] = website_url\n\n if (companyfl.company_size and\n company_size_mapping_dict.get(companyfl.company_size)):\n output_data['size'] = company_size_mapping_dict.get(\n companyfl.company_size\n )\n\n if companyfl.company_remarks:\n output_data['description'] = (\n companyfl.\n company_remarks.\n replace('\\n', ' ').\n replace('\\r', '')\n )\n\n if companyfl.company_social:\n output_data['linkedin_url'] = companyfl.company_social\n\n if companyfl.sectors:\n output_data['industries'] = companyfl.sectors.split(u'§')\n\n if companyfl.profiles:\n output_data['types'] = companyfl.profiles.split(u'§')\n\n if companyfl.updated_on:\n output_data['last_updated'] = companyfl.updated_on\n\n # only retrieving email if email_status=VAL and row was updated less than\n # 365days ago\n if companyfl.company_email_status == \"VAL\" and companyfl.updated_on:\n duration_in_days = (timezone.now() - companyfl.updated_on).days\n if duration_in_days <= 365:\n output_data['email'] = companyfl.company_email\n\n if companyfl.street_name and companyfl.city and companyfl.country:\n # TODO: if street_number or postcode are None, we do not add it but it\n # leaves 2 spaces...find a way to solve it intelligently\n output_data['formatted_address'] = \"%s %s, %s %s, %s\" % (\n companyfl.street_number if companyfl.street_number else '',\n companyfl.street_name,\n companyfl.postcode if companyfl.postcode else '',\n companyfl.city,\n companyfl.country.country_name\n )\n\n return output_data", "def webScraper(self):\n try:\n self.covid_df = pd.read_csv(self.COVID_URL)\n except:\n sys.exit('COVID data is unavailable at source.')\n \n latest_date = self.covid_df['date'].max()\n earliest_date = self.covid_df['date'].min()\n self.covid_df = self.covid_df[self.covid_df['date'] == self.date.strftime('%Y-%m-%d')]\n \n if self.covid_df.empty:\n exit_string = 'Requested date not available. Latest date available is ' + latest_date + ' while earliest is ' + earliest_date\n sys.exit(exit_string)\n else:\n self.covid_df = self.covid_df[self.covid_df['location'] != 'World']\n \n try:\n self.countries_centroids = pd.read_html(self.CENTROIDS_URL, header=0, index_col='country')[0]\n except:\n sys.exit('Central coordinates data for countries unavailable from Google developers.')\n \n try:\n self.geo_data = requests.get(self.GEOJSON_URL).json()\n except:\n sys.exit('GeoJSON data unavailable to draw country polygons.')", "def get_all_companies_and_people():", "def test_all_companies(self, setup_data):\n url = reverse('api-v3:search:basic')\n response = self.api_client.get(\n url,\n data={\n 'term': '',\n 'entity': 'company',\n },\n )\n\n assert response.status_code == status.HTTP_200_OK\n assert response.data['count'] > 0", "async def async_get_all_descriptions(hass):\n if SERVICE_DESCRIPTION_CACHE not in hass.data:\n hass.data[SERVICE_DESCRIPTION_CACHE] = {}\n description_cache = hass.data[SERVICE_DESCRIPTION_CACHE]\n\n format_cache_key = '{}.{}'.format\n\n def domain_yaml_file(domain):\n \"\"\"Return the services.yaml location for a domain.\"\"\"\n if domain == ha.DOMAIN:\n from homeassistant import components\n component_path = path.dirname(components.__file__)\n else:\n component_path = path.dirname(get_component(hass, domain).__file__)\n return path.join(component_path, 'services.yaml')\n\n def load_services_files(yaml_files):\n \"\"\"Load and parse services.yaml files.\"\"\"\n loaded = {}\n for yaml_file in yaml_files:\n try:\n loaded[yaml_file] = load_yaml(yaml_file)\n except FileNotFoundError:\n loaded[yaml_file] = {}\n\n return loaded\n\n services = hass.services.async_services()\n\n # Load missing files\n missing = set()\n for domain in services:\n for service in services[domain]:\n if format_cache_key(domain, service) not in description_cache:\n missing.add(domain_yaml_file(domain))\n break\n\n if missing:\n loaded = await hass.async_add_job(load_services_files, missing)\n\n # Build response\n catch_all_yaml_file = domain_yaml_file(ha.DOMAIN)\n descriptions = {}\n for domain in services:\n descriptions[domain] = {}\n yaml_file = domain_yaml_file(domain)\n\n for service in services[domain]:\n cache_key = format_cache_key(domain, service)\n description = description_cache.get(cache_key)\n\n # Cache missing descriptions\n if description is None:\n if yaml_file == catch_all_yaml_file:\n yaml_services = loaded[yaml_file].get(domain, {})\n else:\n yaml_services = loaded[yaml_file]\n yaml_description = yaml_services.get(service, {})\n\n description = description_cache[cache_key] = {\n 'description': yaml_description.get('description', ''),\n 'fields': yaml_description.get('fields', {})\n }\n\n descriptions[domain][service] = description\n\n return descriptions", "def company_data(driver):\n try:\n stats_selector = \"ul.company-growth-stats.stats-list li\"\n company_stats = driver.find_elements_by_css_selector(stats_selector)\n company_info = [stat.text for stat in company_stats]\n except Exception as e:\n print(\"error acquiring company info\")\n print(e)\n else:\n try:\n employees = list(filter(lambda text: 'employees' in text, company_info))\n num_employees = ''.join(list(filter(lambda c: c.isdigit(), employees[0])))\n except Exception as e:\n num_employees = \"\"\n pass\n try:\n tenure = list(filter(lambda text: 'tenure' in text, company_info))\n avg_tenure = ''.join(list(filter(lambda c: c in '0123456789.', tenure[0])))\n except Exception as e:\n avg_tenure = \"\"\n pass\n company_info = {\n \"avg_tenure\" : avg_tenure, \n \"num_employees\" : num_employees\n }\n return {\"avg_tenure\" : avg_tenure, \"num_employees\" : num_employees}", "def test_companies(self, setup_data):\n term = 'abc defg'\n\n url = reverse('api-v3:search:basic')\n response = self.api_client.get(\n url,\n data={\n 'term': term,\n 'entity': 'company',\n },\n )\n\n assert response.status_code == status.HTTP_200_OK\n assert response.data['count'] == 2\n assert response.data['results'][0]['name'].startswith(term)\n assert [{'count': 2, 'entity': 'company'}] == response.data['aggregations']", "def get_coauthors(self):\n # Get number of authors to search for\n res = download(url=self.coauthor_link, accept='json')\n data = loads(res.text)['search-results']\n N = int(data.get('opensearch:totalResults', 0))\n # Store information in namedtuples\n fields = 'surname given_name id areas affiliation_id name city country'\n coauth = namedtuple('Coauthor', fields)\n coauthors = []\n # Iterate over search results in chunks of 25 results\n count = 0\n while count < N:\n params = {'start': count, 'count': 25}\n res = download(url=self.coauthor_link, params=params, accept='json')\n data = loads(res.text)['search-results'].get('entry', [])\n # Extract information for each coauthor\n for entry in data:\n aff = entry.get('affiliation-current', {})\n try:\n areas = [a['$'] for a in entry.get('subject-area', [])]\n except TypeError: # Only one subject area given\n areas = [entry['subject-area']['$']]\n new = coauth(surname=entry['preferred-name']['surname'],\n given_name=entry['preferred-name'].get('given-name'),\n id=entry['dc:identifier'].split(':')[-1],\n areas='; '.join(areas),\n affiliation_id=aff.get('affiliation-id'),\n name=aff.get('affiliation-name'),\n city=aff.get('affiliation-city'),\n country=aff.get('affiliation-country'))\n coauthors.append(new)\n count += 25\n return coauthors", "def get_available_companies(team):", "def generate_company_library():\n\n companyids = generate_company_list()\n counter = 0\n companyids = companyids[counter:]\n\n for id in companyids:\n print(counter,\"generating\", id + \".csv\")\n try:\n update_data(id)\n except:\n pass\n counter += 1", "def test_website_companies_update(self):\n pass", "def company(self):\n\n x = 0\n my_company = self.data[\"Company Name\"]\n my_account = self.data[\"Account\"]\n result = []\n for i in my_company:\n my_string = i + \" -- \" + my_account[x]\n x += 1\n result.append(my_string)\n\n return result", "def run_whoxy_company_search(self,company):\n if self.whoxy_api_key:\n try:\n results = requests.get(self.reverse_whoxy_api_endpoint.format(self.whoxy_api_key,company),timeout=self.requests_timeout).json()\n if results['status'] == 1 and results['total_results'] > 0:\n whois_results = {}\n total_results = results['total_results']\n for domain in results['search_result']:\n domain_name = domain['domain_name']\n temp = self.parse_whoxy_results(domain,True)\n whois_results[domain_name] = temp\n return whois_results,total_results\n else:\n click.secho(\"[*] WhoXY returned status code 0, error/no results, for reverse company search.\",fg=\"yellow\")\n except requests.exceptions.Timeout:\n click.secho(\"\\n[!] The connection to WhoXY timed out!\",fg=\"red\")\n except requests.exceptions.TooManyRedirects:\n click.secho(\"\\n[!] The connection to WhoXY encountered too many redirects!\",fg=\"red\")\n except requests.exceptions.RequestException as error:\n click.secho(\"[!] Error connecting to WhoXY for reverse company search!\",fg=\"yellow\")\n click.secho(\"L.. Details: {}\".format(error),fg=\"yellow\")", "def __getCompanyMetadata(parsed: BeautifulSoup) -> dict:\n\n # Company metadata container\n metadata_container = parsed.find('p', class_='identInfo')\n # String representation of HTML (used in RegEx)\n metadata_str = str(metadata_container)\n\n # Dictionary for company metadata\n company_metadata = dict()\n\n # RegEx for extracting SIC and SIC type\n sic_re = re.compile(r'SIC.+?:.+?(\\d+?)<\\/a> -(.+?)<br')\n # Getting SIC and SIC type match\n sic_matches = sic_re.findall(metadata_str)\n # Saving SIC and stripped, HTML-parsed SIC type\n company_metadata['sic'] = sic_matches[0][0]\n company_metadata['sic_type'] = html.unescape(sic_matches[0][1]).strip()\n\n # RegEx for extracting company location (state)\n location_re = re.compile(r'State location:.+?>(\\w+?)<\\/a>')\n # Getting company location\n location_matches = location_re.findall(metadata_str)\n # Saving company location\n company_metadata['location'] = location_matches[0].strip()\n\n # RegEx for extracting state of incorporation\n incorp_state_re = re.compile(r'State of Inc\\.:.+?>(\\w+?)<\\/strong>')\n # Getting state of incorporation\n incorp_match = incorp_state_re.findall(metadata_str)[0]\n # Saving state of incorporation\n company_metadata['incorporation_state'] = incorp_match.strip()\n\n # RegEx for extracting end of fiscal year\n fiscal_year_re = re.compile(r'Fiscal Year End:.+?(\\d{4})')\n # Getting end of fiscal year\n fiscal_year_match = fiscal_year_re.findall(metadata_str)[0]\n # Saving end of fiscal year (in mm-dd format)\n fy_formatted = fiscal_year_match[0:2] + '-' + fiscal_year_match[2:]\n company_metadata['fiscal_year_end'] = fy_formatted\n\n return company_metadata", "async def create_customers_from_summary(client, account_id, customer_id, timeout, logger):\n logger.info(\"Fetching summary page\")\n\n customers = []\n\n await client.select_customer(account_id, customer_id)\n\n res = await client.http_request(CONTRACT_URL_3, \"get\")\n content = await res.text()\n soup = BeautifulSoup(content, 'html.parser')\n\n #Add a boolean variable to set correctly the state of the initial page for later\n more_than_10_contracts_holder = False\n\n #1st check if we have a more than 10 contracts on that client\n if (not soup.find('h2', {'class': 'entete-multi-compte'})\n and not soup.find('p', {'class': 'solde'})):\n #load the alternative page for listing all contracts\n res = await client.http_request(CONTRACT_URL_5, \"get\")\n content = await res.text()\n soup = BeautifulSoup(content, 'html.parser')\n more_than_10_contracts_holder = True\n\n #Then determine if it's a multi contract holder\n if soup.find('h2', {'class': 'entete-multi-compte'}):\n #It's a multi account so we need to create multiple customer objects\n accounts = soup.find_all('article', {'class': 'compte'})\n for account in accounts:\n try:\n account_ncc = account.get('id')[7:]\n raw_balance = account.find('p', {'class': 'solde'}).text\n balance = float(raw_balance[:-2].replace(\",\", \".\").\n replace(\"\\xa0\", \"\"))\n #time to get the contract id from the special ajax request\n params = {'ncc':account_ncc}\n res2 = await client.http_request(CONTRACT_URL_4, \"get\",\n params=params)\n content2 = await res2.text()\n soup2 = BeautifulSoup(content2, 'html.parser')\n raw_contract_id = soup2.find('div', {'class': 'contrat'}).text\n contract_id = (raw_contract_id\n .split(\"Contrat\", 1)[-1]\n .replace(\"\\t\", \"\")\n .replace(\"\\n\", \"\"))\n #Time to create the customer object\n customer = Customer(client, account_id, customer_id, timeout, logger)\n customer.contract_id = contract_id\n customer._balance = balance\n customers.append(customer)\n except AttributeError:\n logger.info(\"Customer has no contract\")\n else:\n try:\n raw_balance = soup.find('p', {'class': 'solde'}).text\n balance = float(raw_balance[:-2].replace(\",\", \".\").\n replace(\"\\xa0\", \"\"))\n\n raw_contract_id = soup.find('div', {'class': 'contrat'}).text\n contract_id = (raw_contract_id\n .split(\"Contrat\", 1)[-1]\n .replace(\"\\t\", \"\")\n .replace(\"\\n\", \"\"))\n customer = Customer(client, account_id, customer_id, timeout, logger)\n customer.contract_id = contract_id\n customer._balance = balance\n customers.append(customer)\n except AttributeError:\n logger.info(\"Customer has no contract\")\n \n # Needs to load the consumption profile page to not break\n # the next loading of the other pages\n await client.http_request(CONTRACT_CURRENT_URL_1, \"get\")\n if more_than_10_contracts_holder:\n await client.http_request(CONTRACT_CURRENT_URL_4, \"get\")\n\n return customers", "def find_data(company: str, multiplicators: list):\n data = request_page(URL, company)\n companys_multi = {}\n try:\n print (data.title.text)\n for param in multiplicators:\n result = data.find(text = param)\n multi = result.next_element.text\n companys_multi[result] = multi\n print (companys_multi)\n return companys_multi\n except Exception as e:\n print (e)", "def main(self, name):\n\t\tapi_results = [] \n\t\tparams = self.get_search_parameters(name)\n\t\tapi_results.append(self.api_connect(params))\n\t\ttime.sleep(1.0)\n\t\tkey = api_results[0]['businesses'][0]\n\t\tbusiness_information = [key['name'], self.phone_number_organizer(key), key['rating'],\\\n\t\tkey['review_count']]\n\t\treturn business_information", "def get_sp_list():\n bs = get_soup('https://en.wikipedia.org/wiki/List_of_S%26P_500_companies')\n sp_companies = bs.find_all('a', class_=\"external text\")\n return sp_companies", "def scrap_public_codes(driver):\n\n # Get the public local coupons\n try:\n for i in range(1, 20):\n # Grab the public coupons and parse the data and put them in a dict\n code_text = driver.find_element_by_xpath(\n \"//*[@id='js-pageSplit']/section/div[2]/div/div[3]/div[\" + str(i) + \"]/a/div[4]/p\").text\n\n code_number = int(code_text[-5:-1])\n description = code_text[:-13]\n coupons_dict[code_number] = description\n except:\n pass", "async def org_info_above_14(orgs_urls14):\n org_info_14 = []\n project_urls_from14 = []\n for url in orgs_urls14:\n try:\n soup = await get_page(url)\n org_name = basename(url)\n org_info = soup.find_all('p')\n web_page = org_info[1].text.splitlines()[-1].strip()\n mailing_list = org_info[2].text.split(\":\")[-1].strip()\n description = soup.find('div', {'class': 'main mdl-cell mdl-cell--8-col\\\n mdl-card mdl-shadow--4dp'})\n detail = description.find_all('p')[2].nextSibling\n org_info_14.append({'name': org_name, 'page': web_page,\n 'about': detail, 'mail': mailing_list,\n 'link': url})\n project_urls_from14.extend(grab_project_links(soup))\n except IndexError:\n print(url)\n\n return org_info_14, get_project_info(project_urls_from14)", "def description_mega(self, html): # pylint: disable=too-many-statements,too-many-branches\n description_list = []\n with suppress(Exception):\n '''\n Tested on\n * https://economictimes.indiatimes.com/news/economy/policy/government-mops-up-rs-8660-cr-from-disinvestment-in-02/articleshow/33105933.cms\n <meta content=\"The total disinvestment realisation of the government during 2002 topped Rs 8,660 crore. The cabinet committee on disinvestment (CCD) had cleared transactions worth Rs 6,168 crore during the year.\" name=\"description\">\n * https://timesofindia.indiatimes.com/city/bengaluru/ISROs-second-launch-pad-to-be-sent-by-March-end/articleshow/3801270.cms\n <meta name=\"description\" content=\"BANGALORE: The second launch pad for the Indian Space Research Organisation will be dispatched to Sriharikota by the end of March. The Mobile Launch P\">\n '''\n meta_name_description = html.find('meta', {'name': 'description'})\n description_list.append(\n self.text_cleaning(meta_name_description['content']))\n\n with suppress(Exception):\n '''\n Tested on\n * https://www.deccanherald.com/content/1368/agriculture-department-urged-regulate-fertilisers.html\n <meta property=\"og:description\" content=\"Farmers will be happy only if they get good rains and sufficient fertilisers. They were is deep trouble due to the improper supply of fertilisers.\">\n * https://sports.ndtv.com/cricket/we-cant-influence-indian-high-commission-for-visas-pcb-1594242\n <meta property=\"og:description\" content=\"Pakistan Cricket Board made it clear that it had done everything under its power to get the visas for its cricketers to play in the IPL next year.\">\n '''\n meta_property_og_description = html.find(\n 'meta', {'property': 'og:description'})\n description_list.append(\n self.text_cleaning(meta_property_og_description['content']))\n\n with suppress(Exception):\n '''\n Tested on\n * https://www.independent.co.uk/news/world/americas/elijah-mcclain-death-colorado-police-black-lives-matter-george-floyd-police-a9584366.html\n <meta name=\"twitter:description\" content=\"'Demand these officers are taken off duty, and that a more in-depth investigation is held', page reads\">\n * https://nypost.com/2010/09/27/brooklyn-tea-party-rallies-against-ground-zero-mosque-multiculturalism/\n <meta name=\"twitter:description\" content=\"About 125 people gathered at a recent Bay Ridge rally of the Brooklyn Tea Party to protest a variety of hot subjects — especially the planned Ground Zero mosque, according to a Brooklyn Ink\">\n '''\n meta_name_twitter_description = html.find(\n 'meta', {'name': 'twitter:description'})\n description_list.append(\n self.text_cleaning(meta_name_twitter_description['content']))\n\n with suppress(Exception):\n '''\n Tested on\n * https://www.standard.co.uk/news/uk/boris-johnson-u-turn-free-school-meals-marcus-rashford-a4470506.html\n <meta property=\"twitter:description\" content=\"'THIS is England in 2020'\">\n * https://www.express.co.uk/news/politics/1369685/brexit-news-uk-eu-trade-deal-france-fishing-emmanuel-macron-no-deal-latest\n <meta property=\"twitter:description\" content=\"FRENCH fishermen have lashed out at Emmanuel Macron, warning he is playing a &quot;dangerous game&quot; and has &quot;overstepped the mark&quot; by threatening to veto a post-Brexit trade deal with the UK.\">\n '''\n meta_property_twitter_desc = html.find(\n 'meta', {'property': 'twitter:description'})\n description_list.append(\n self.text_cleaning(meta_property_twitter_desc['content']))\n\n with suppress(Exception):\n '''\n Tested on\n * https://www.indiatoday.in/india/story/pm-modi-launch-covid-vaccination-drive-jan-16-cowin-app-coronavirus-covaxin-covishield-1758628-2021-01-13\n <meta itemprop=\"description\" content=\"Prime Minister Narendra Modi will kickstart the Covid-19 vaccination programme in India with a virtual launch on January 16, sources have told India Today.\">\n * https://indianexpress.com/article/world/print/four-killed-as-armed-militants-storm-5-star-hotel-in-pakistans-gwadar-port-city-police-5723193/\n <meta itemprop=\"description\" content=\"A shootout between the militants and the security forces broke out at the hotel as the anti-terrorism force, the Army and the Frontier Corps were called in, Gwadar Station House Officer (SHO) Aslam Bangulzai said.\">\n '''\n meta_itemprop_description = html.find('meta',\n {'itemprop': 'description'})\n description_list.append(\n self.text_cleaning(meta_itemprop_description['content']))\n\n with suppress(Exception):\n '''\n Tested on\n * https://www.cnbc.com/2020/12/25/the-plant-based-meat-industry-is-on-the-rise-but-challenges-remain.html\n <meta itemprop=\"description\" name=\"description\" content=\"Demand for meat alternatives has grown and will continue to rise, but the industry still has hurdles to overcome in different parts of the world, analysts said.\">\n * https://www.oneindia.com/india/congress-leader-dk-shivakumar-to-appear-before-cbi-in-disproportionate-assets-case-today-3180984.html\n <meta name=\"description\" itemprop=\"description\" content=\"On October 5, the CBI conducted raids at 14 locations, including in Karnataka, Delhi and Mumbai at the premises belonging to Shivakumar and others, and recovered Rs 57 lakh cash and several documents, including property documents, bank related information, computer hard disk. \">\n '''\n meta_name_itemprop_description = html.find(\n 'meta', {\n 'name': 'description',\n 'itemprop': 'description'\n })\n description_list.append(\n self.text_cleaning(meta_name_itemprop_description['content']))\n\n with suppress(Exception):\n '''\n Tested on\n * https://scroll.in/field/979390/they-can-beat-australia-in-their-own-den-shastri-backs-india-s-fabulous-five-quicks-to-shine\n <meta name=\"dcterms.description\" content=\"The India coach said his team’s pace unit was the best in the world, despite being likely to be without the injured Ishant Sharma.\">\n * https://scroll.in/field/979393/champions-league-last-gasp-wins-take-juventus-chelsea-and-sevilla-into-last-16-barcelona-cruise\n <meta name=\"dcterms.description\" content=\"They are the first teams to make it out of the group stage, doing so with two games to spare.\">\n '''\n meta_name_dcterms_description = html.find(\n 'meta', {'name': 'dcterms.description'})\n description_list.append(\n self.text_cleaning(meta_name_dcterms_description['content']))\n\n with suppress(Exception):\n '''\n Tested on\n * https://www.express.co.uk/news/weather/1370081/BBC-Weather-Europe-snow-forecast-cold-December-update-video-vn\n <div class=\"text-description\"><p><span>BBC Weather meteorologist Stav Danaos forecast unsettled weather across the&nbsp;</span><span>Mediterranean for the rest of the week. He added a blocking area of high pressure across Russia was contributing to the unsettling weather.</span></p></div>\n * https://www.express.co.uk/news/politics/1383306/Brexit-live-latest-brexit-deal-Northern-Ireland-customs-boris-johnson-john-redwood\n <div class='text-description'><p>Earlier today, Boris Johnson suggested some fishing businesses in Scotland would receive compensation as he defended...</p></div>\n '''\n div_class_text_description = html.find(\n 'div', {'class': 'text-description'})\n description_list.append(\n self.text_cleaning(div_class_text_description.text))\n\n with suppress(Exception):\n '''\n Tested on\n * https://www.usatoday.com/story/news/nation/2020/12/07/north-atlantic-right-whale-endangered-species-newborns/6484190002/\n <div...data-ss-d=\"Two North Atlantic right whale newborns have been spotted in the last week at the start of calving season, providing hope for an endangered species.\"...>\n * https://www.usatoday.com/story/sports/mls/2020/12/07/mls-cup-2020-seattle-sounders-advance-play-columbus-crew-title/6487291002/\n <div...data-ss-d=\"The Seattle Sounders scored two late goals to complete a dramatic rally over Minnesota United and advance to MLS Cup to play the Columbus Crew.\"...>\n '''\n div_data_ssd = html.find('div', {'data-ss-d': True})\n description_list.append(\n self.text_cleaning(div_data_ssd['data-ss-d']))\n\n with suppress(Exception):\n '''\n Tested on\n * https://www.indiatoday.in/technology/news/story/amazon-great-republic-day-sale-announced-from-january-20-deals-bank-offers-and-more-1758622-2021-01-13\n <div class=\"story-kicker\"><h2>Amazon's Great Republic Day Sale begins January 20 but Prime members will get 24 hours early access on deals.</h2></div>\n * https://www.indiatoday.in/sports/cricket/story/a-win-at-gabba-will-give-india-their-greatest-test-series-victory-ever-says-akhtar-1758619-2021-01-13\n <div class=\"story-kicker\"><h2>Former Pakistan fast bowler Shoaib Akhtar lauded India for the fight they have shown in the series so far and said that they should go on to win the final Test in Brisbane.</h2></div>\n '''\n div_class_story_kicker = html.find('div',\n {'class': 'story-kicker'})\n description_list.append(\n self.text_cleaning(div_class_story_kicker.text))\n\n with suppress(Exception):\n '''\n Tested on\n * https://www.espncricinfo.com/story/vitality-t20-blast-mitchell-claydon-misses-sussex-s-t20-blast-defeat-after-hand-sanitiser-ball-tampering-ban-1234150\n <p class=\"article-summary\">Seamer will miss first two games of 2021 as well after nine-match ban imposed by CDC</p>\n * https://www.espncricinfo.com/series/vitality-blast-2020-1207645/nottinghamshire-vs-leicestershire-1st-quarter-final-1207789/match-report\n <p class=\"article-summary\">Nottinghamshire progress on higher Powerplay score after securing dramatic tie off last ball</p>\n '''\n p_class_article_summary = html.find('p',\n {'class': 'article-summary'})\n description_list.append(\n self.text_cleaning(p_class_article_summary.text))\n\n with suppress(Exception):\n '''\n Tested on\n * https://www.nytimes.com/2020/01/31/learning/is-it-offensive-for-sports-teams-and-their-fans-to-use-native-american-names-imagery-and-gestures.html\n <p id=\"article-summary\" class=\"css-w6ymp8 e1wiw3jv0\">The Kansas City Chiefs will face the San Francisco 49ers for Super Bowl LIV. Chiefs fans regularly use a “tomahawk chop” to urge on their beloved team: Is it offensive?</p>\n * https://www.nytimes.com/2020/01/09/world/middleeast/iran-plane-crash-ukraine.html\n <p id=\"article-summary\" class=\"css-w6ymp8 e1wiw3jv0\">Western intelligence showed that Iran was responsible for the plane crash, suggesting that the deaths of those aboard were a consequence of the heightened tensions between Washington and Iran. </p>\n '''\n p_id_article_summary = html.find('p', {'id': 'article-summary'})\n description_list.append(\n self.text_cleaning(p_id_article_summary.text))\n\n with suppress(Exception):\n '''\n Tested on\n * https://economictimes.indiatimes.com/industry/services/education/indian-universities-look-abroad-for-success-at-home/articleshow/5957175.cms\n <h2 class=\"summary\">Foreign universities may soon be able to set up in India but some of their Indian counterparts are looking in the other direction — to better equip students for the demands of the global economy.</h2>\n * https://economictimes.indiatimes.com/industry/transportation/railways/conviction-rate-in-theft-cases-in-central-railways-mumbai-division-falls-steeply/articleshow/48554953.cms\n <h2 class=\"summary\">According to official data, the conviction rate in theft cases of railway properties has witnessed a steep fall in Mumbai Division of Central Railway.</h2>\n '''\n h2_class_summary_description = html.find('h2',\n {'class': 'summary'})\n description_list.append(\n self.text_cleaning(h2_class_summary_description.text))\n\n with suppress(Exception):\n '''\n Tested on\n * https://sports.ndtv.com/india-vs-england-2020-21/ind-vs-eng-virat-kohli-reflects-on-battling-depression-during-2014-england-tour-2373999\n <h2 class=\"sp-descp\">India vs England: Virat Kohli opened up about dealing with depression on India's 2014 tour of England where Kohli endured a horror run with the bat.</h2>\n * https://sports.ndtv.com/cricket/we-cant-influence-indian-high-commission-for-visas-pcb-1594242\n <h2 class=\"sp-descp\">Pakistan Cricket Board made it clear that it had done everything under its power to get the visas for its cricketers to play in the IPL next year.</h2>\n '''\n h2_class_sp_descp_description = html.find('h2',\n {'class': 'sp-descp'})\n description_list.append(\n self.text_cleaning(h2_class_sp_descp_description.text))\n\n with suppress(Exception):\n '''\n Tested on\n * https://indianexpress.com/article/news-archive/days-are-not-far-when-kashmiri-pandits-would-return-to-their-homes-with-dignity-jk-bjp-4842449/\n <h2 itemprop=\"description\" class=\"synopsis\">\"Those days are not far when the displaced people will return to their Kashmir with dignity and honour. The BJP will leave no stone unturned in solving the problems of the hapless people who were forced to leave the Valley,\" Jammu and Kashmir BJP unit chief Sat Sharma said. </h2>\n * https://indianexpress.com/article/india/web/bjp-mp-karandlaje-challenges-karnataka-cm-siddaramaiah-govt-to-arrest-her-4996043/\n <h2 itemprop=\"description\" class=\"synopsis\">An FIR was filed against BJP MP Shobha Karandlaje on charges of provoking people to cause riots, disturbing communal harmony and spreading rumours.</h2>\n '''\n h2_itemprop_description = html.find('h2',\n {'itemprop': 'description'})\n description_list.append(\n self.text_cleaning(h2_itemprop_description.text))\n\n with suppress(Exception):\n '''\n Tested on\n * https://www.business-standard.com/article/current-affairs/death-of-galaxy-galactic-collision-spews-gases-equal-to-10-000-suns-a-year-121011300543_1.html\n <h2 class=\"alternativeHeadline\">The merging galaxy formed 4.5 billion years ago is dubbed ID2299 and is ejecting gases equivalent to 10,000 Suns-worth of gas a year</h2>\n * https://www.business-standard.com/article/international/wb-economist-china-will-need-to-learn-to-restructure-emerging-market-debt-121011300034_1.html\n <h2 class=\"alternativeHeadline\">Increasing debt distress in emerging markets means that China will need to start restructuring debts in the same way that Paris Club lenders did in past crises, World Bank Chief Economist said</h2>\n '''\n h2_class_alternative_headline = html.find(\n 'h2', {'class': 'alternativeHeadline'})\n description_list.append(\n self.text_cleaning(h2_class_alternative_headline.text))\n\n with suppress(Exception):\n '''\n Tested on\n * https://www.express.co.uk/news/world/1369648/India-news-mystery-illness-coronavirus-covid-Andhra-Pradesh-eluru-disease-cause-ont\n <h3>OFFICIALS in India are reportedly seeking to manage panic in the Indian state of Andhra Pradesh due to a mysterious illness spreading in the district.</h3>\n * https://www.express.co.uk/news/politics/1383306/Brexit-live-latest-brexit-deal-Northern-Ireland-customs-boris-johnson-john-redwood\n <h3>A HUGE new fishing row has erupted between Scottish fishermen anf the UK Government, with BBC News Political Editor Laura Kuenssberg warning: \"This could get messy.\"</h3>\n '''\n h3_description = html.find('h3')\n description_list.append(self.text_cleaning(h3_description.text))\n\n with suppress(Exception):\n '''\n Tested on\n * https://www.independent.co.uk/arts-entertainment/tv/news/ratched-netflix-trigger-warning-child-abuse-suicide-violence-sarah-paulson-b571405.html\n <h2 class=\"sc-qYhdC bflsCm\"><p>Despite presence of warning over graphic content, fans have called for more</p></h2>\n * https://www.independent.co.uk/arts-entertainment/tv/news/bridgerton-violet-actor-ruth-gemmell-tracy-beaker-b1780757.html\n <h2 class=\"sc-oTcDH eZHAcN\"><p>Gemmell starred in the 2004 CBBC film Tracy Beaker: The Movie of Me</p></h2>\n '''\n header_id_articleheader = html.find('header',\n {'id': 'articleHeader'})\n header_two = header_id_articleheader.find('h2')\n description_list.append(self.text_cleaning(header_two.text))\n\n with suppress(Exception):\n '''\n Tested on\n * https://scroll.in/article/979318/what-is-the-extent-of-caste-segregation-in-indian-villages-today-new-data-gives-us-an-idea\n <h2>‘The extent of intra-village segregation in Karnataka is greater than the local black-white segregation in the American South.’</h2>\n * https://scroll.in/latest/979410/khichdification-ima-demands-withdrawal-of-move-allowing-ayurveda-doctors-to-perform-surgery\n <h2>The medical body said that the move should not be seen in isolation, referring to other government decisions ‘legitimising Mixopathy’.</h2>\n '''\n header = html.find('header')\n description_list.append(\n self.text_cleaning(header.find_next('h2').text))\n\n with suppress(Exception):\n '''\n Tested on\n * https://www.euronews.com/2020/12/08/charlie-hebdo-trial-prosecutors-request-30-year-sentence-for-fugitive-widow-of-attacker\n <script type=\"application/ld+json\"... '@graph': [\"description\": \"Prosecutors have asked for sentences ranging from 5 years to life imprisonment for the defendants in the Charlie Hebdo trial, including the fugitive widow of one of the attackers.\"...]...>\n * https://www.euronews.com/2020/12/08/france-s-next-aircraft-carrier-to-be-nuclear-powered-macron-confirms\n <script type=\"application/ld+json\"... '@graph': [\"description\": \"France's current flagship warship is to be retired in 2038. It will be replaced by a bigger, nuclear-powered model, Macron said on Tuesday.\"...]...>\n '''\n first_script = html.find('script', {'type': 'application/ld+json'})\n data = json.loads(first_script.string, strict=False)\n description_list.append(\n self.text_cleaning(data['@graph'][0]['description']))\n\n with suppress(Exception):\n scripts = html.find_all('script', {'type': 'application/ld+json'})\n scripts = [script for script in scripts if script is not None]\n for script in scripts:\n with suppress(Exception):\n '''\n Tested on\n * https://www.espncricinfo.com/story/ipl-2020-jofra-archer-thriving-in-different-type-of-pressure-at-ipl-says-rajasthan-royals-team-mate-jos-buttler-1234126\n <script type='application/ld+json'...\"description\":\"Fifty-over cricket must take a back seat in build-up to T20 World Cup, says senior batsman\"...>\n '''\n data = json.loads(script.string, strict=False)\n if isinstance(data, list):\n data = data[0]\n if data[\"@type\"] == \"NewsArticle\" or data[\n \"@type\"] == \"WebPage\":\n if data[\"description\"]:\n description_list.append(\n self.text_cleaning(data[\"description\"]))\n with suppress(Exception):\n data = json.loads(script.string, strict=False)\n if data[\"@type\"] == \"NewsArticle\":\n if isinstance(data[\"video\"], list):\n description_list.append(\n self.text_cleaning(\n data[\"video\"][0][\"description\"]))\n elif not isinstance(data[\"video\"], list):\n description_list.append(\n self.text_cleaning(\n data[\"video\"][\"description\"]))\n description_list = [\n description for description in description_list\n if description != ''\n ]\n if not description_list:\n return \" \"\n best_description = max(sorted(set(description_list)),\n key=description_list.count)\n return best_description", "def start_requests(self):\n try:\n # query = \"select website from company where status is null ORDER BY id asc limit 10000\"\n query = \"select website from company2 where web_addr = 'Web Address';\"\n self.cursor.execute(query)\n numrows = self.cursor.rowcount\n for x in xrange(0, numrows):\n row = self.cursor.fetchone()\n detailed_pag_url = str(row[0])\n yield scrapy.Request(url=detailed_pag_url, callback=self.parse)\n except MySQLdb.Error, e:\n print(\"Database connection Error\", e)", "def parse(self, response):\n content = response.body\n if not content:\n return\n sel = Selector(response)\n #print sel.xpath('//table[@class=\"board-list tiz\"]/tr').extract()\n for job in sel.xpath('//ul[@class=\"sojob-list\"]/li'):\n #print 'd',job\n info = job.xpath('div[@class=\"sojob-item-main clearfix\"]/div[@class=\"job-info\"]')\n com_info = job.xpath('div[@class=\"sojob-item-main clearfix\"]/div[@class=\"company-info nohover\"]')\n title = info.xpath('h3/a/text()').extract_first().lower()\n if title.find('python') != -1:\n url = info.xpath('h3/a/@href').extract_first()\n request = scrapy.Request(url=url,\n callback=self.parse_items,\n headers=self.spider.headers,\n cookies=self.cookies)\n company_item, job_item = CompanyItem(), JobItem()\n company_item['name'] = com_info.xpath('p[@class=\"company-name\"]/a/text()').extract_first()\n company_item['homepage'] = com_info.xpath('p[@class=\"company-name\"]/a/@href').extract_first()\n job_item['pub_time'] = info.xpath('p[@class=\"time-info clearfix\"]/time/text()').extract_first()\n year = str(date.today().year)\n if str(year) not in job_item['pub_time']:\n if job_item['pub_time'] == u'昨天':\n job_item['pub_time'] = (date.today()-timedelta(days=1)).strftime(\"%Y-%m-%d\")\n elif job_item['pub_time'] == u'前天':\n job_item['pub_time'] = (date.today() - timedelta(days=2)).strftime(\"%Y-%m-%d\")\n else:\n job_item['pub_time'] = date.today().strftime(\"%Y-%m-%d\")\n job_item['title'] = title\n job_item['welfare'] = ' '.join(com_info.xpath('p[@class=\"temptation clearfix\"]/span/text()').extract())\n job_item['salary'] = info.xpath('p[@class=\"condition clearfix\"]/span[@class=\"text-warning\"]/text()').extract_first()\n request.meta['company_item'] = company_item\n request.meta['job_item'] = job_item\n yield request", "def test_get_all_company_props_using_get(self):\n pass", "def pull_companies(cls, soup):\n companies = []\n parents = soup.findAll('div', 'row job-information')\n for parent in parents:\n try:\n temp = parent.find('div', 'columns large-2 medium-3 small-12').find('h4')\n except AttributeError:\n companies.append(None)\n else:\n if temp.a:\n # Company name is sometimes wrapped in anchor tag\n companies.append(temp.find('a').contents[0].strip())\n else:\n companies.append(temp.contents[0].strip())\n return companies", "def main():\n proxy = get_random_proxy()\n html = crawl(target_url)\n company_all_url = html.xpath('//*[@id=\"quotesearch\"]/ul/li/a/@href')\n code=['none']*len(company_all_url)\n for i in range(len(company_all_url)):\n s = str(str(company_all_url[i]))\n code[i]=s[(len(s) - 13):(len(s) - 5)]\n save_to_neo4j(code,0,len(code))", "def __load_company_data(self):\n\n for ticker_type, ticker_list in self.tickers.items():\n # yfinance only has sector, industry and country for stocks\n if ticker_type == \"STOCK\":\n for ticker in ticker_list:\n # Only gets fields for tickers with missing data\n # TODO: Should only get field missing for tickers with missing data\n # now it's taking the 4 of them\n if (\n self.__transactions.loc[\n self.__transactions[\"Ticker\"] == ticker,\n [\"Sector\", \"Industry\", \"Country\", \"Region\"],\n ]\n .isnull()\n .values.any()\n ):\n # Get ticker info in list [\"Sector\", \"Industry\", \"Country\", \"Region\"] from isin/ticker\n info_list = get_info_from_ticker(ticker)\n\n # Replace fields in transactions\n self.__transactions.loc[\n self.__transactions.Ticker == ticker,\n [\"Sector\", \"Industry\", \"Country\", \"Region\"],\n ] = info_list\n\n elif ticker_type == \"CRYPTO\":\n for ticker in ticker_list:\n if (\n self.__transactions.loc[\n self.__transactions[\"Ticker\"] == ticker,\n [\"Sector\", \"Industry\", \"Country\", \"Region\"],\n ]\n .isnull()\n .values.any()\n ):\n # Get ticker info in list [\"Sector\", \"Industry\", \"Country\", \"Region\"]\n info_list = [\"Crypto\", \"Crypto\", \"Crypto\", \"Crypto\"]\n\n # Replace fields in transactions\n self.__transactions.loc[\n self.__transactions.Ticker == ticker,\n [\"Sector\", \"Industry\", \"Country\", \"Region\"],\n ] = info_list\n\n else:\n for ticker in ticker_list:\n if (\n self.__transactions.loc[\n self.__transactions[\"Ticker\"] == ticker,\n [\"Sector\", \"Industry\", \"Country\", \"Region\"],\n ]\n .isnull()\n .values.any()\n ):\n # Get ticker info in list [\"Sector\", \"Industry\", \"Country\", \"Region\"]\n info_list = [\"-\", \"-\", \"-\", \"-\"]\n\n # Replace fields in transactions\n self.__transactions.loc[\n self.__transactions.Ticker == ticker,\n [\"Sector\", \"Industry\", \"Country\", \"Region\"],\n ] = info_list", "def crawl(url):\n while True:\n try:\n proxy=get_random_proxy()\n proxies = {'http': 'http://' + proxy}\n logger.info(proxies)\n resp = requests.get(url, proxies=proxies,timeout=3) # 设置代理,抓取每个公司的连接\n resp.encoding = resp.apparent_encoding # 可以正确解码\n if resp.status_code==200:\n html = etree.HTML(resp.text)\n logger.info(\"成功获得公司信息url!!!\")\n break\n else:\n continue\n except:\n logger.info(\"没获取到\")\n continue\n return html", "def get_info(self):\r\n\r\n self.driver.get(WEBSITE)\r\n time.sleep(3)\r\n self.driver.find_element_by_xpath(\"\"\"//*[@id=\"modalContent\"]/div/button/i\"\"\").click()\r\n time.sleep(3)\r\n #gets prices and appends to list\r\n all_prices = self.driver.find_elements_by_class_name(\"firstPrice\")\r\n for price in all_prices:\r\n text = price.text\r\n new_p = text.replace(\".\", \"\")\r\n price_int = int(new_p.split(\" \")[1])\r\n self.price_list.append(price_int)\r\n #gets addresses\r\n all_addresses = self.driver.find_elements_by_class_name(\"postingCardLocationTitle\")\r\n for address in all_addresses:\r\n self.address_list.append(address.text)\r\n print(self.address_list)\r\n # gets info\r\n ad_info = self.driver.find_elements_by_css_selector(\"a.go-to-posting\")\r\n for info in ad_info:\r\n links = info.get_attribute('href') #gets href link inside the css\r\n self.all_links.append(links)\r\n self.all_info.append(info.text)\r\n\r\n # Just for tests\r\n print(self.price_list)\r\n print(self.all_info)\r\n print(self.all_links)", "def test_get_all_companies(self):\n create_company()\n res = self.client.get(ALL_COMPANIES_LIST)\n self.assertEqual(res.status_code, status.HTTP_200_OK)", "def get_company_info(company_name):\n\n # Fix formatting of name\n co = company_name.replace(\".\", \"\").replace(\" \", \"%20\")\n\n query = f\"http://d.yimg.com/autoc.finance.yahoo.com/autoc?query={co}\\\n &region=1&lang=en&callback=YAHOO.Finance.SymbolSuggest.ssCallback\"\n\n response = requests.get(query)\n\n fdata = response.text.split(\"(\", 1)[1]\n fdata = fdata.rsplit(\")\", 1)[0]\n data = json.loads(fdata)\n yahoo_json = data[\"ResultSet\"][\"Result\"]\n\n return yahoo_json", "def get_headlines(outlet):\n if outlet == \"BBC\":\n parser = news_parser.BBC(\"https://www.bbc.co.uk\")\n elif outlet == \"DailyMail\":\n parser = news_parser.DailyMail(\"https://www.dailymail.co.uk\")\n elif outlet == \"Guardian\":\n parser = news_parser.Guardian(\"https://www.theguardian.com\")\n elif outlet == \"Metro\":\n parser = news_parser.Metro(\"https://www.metro.co.uk\")\n elif outlet == \"Mirror\":\n parser = news_parser.Mirror(\"https://www.mirror.co.uk/news/\")\n elif outlet == \"Reuters\":\n parser = news_parser.Reuters(\"https://uk.reuters.com\")\n elif outlet == \"Sun\":\n parser = news_parser.Sun(\"https://www.thesun.co.uk\")\n elif outlet == \"Independent\":\n parser = news_parser.Independent(\"https://www.independent.co.uk\")\n else:\n parser = news_parser.BBC(\"https://www.bbc.co.uk/news\")\n \n index = outlets.index(outlet)\n url_list = []\n while len(url_list) < 50:\n opts = {\n 'language': ['en'],\n 'source_id': [ids[index]],\n 'published_at_start':'NOW-1DAY',\n 'published_at_end':'NOW',\n 'sort_by': 'hotness',\n 'sort_direction': 'desc',\n 'cursor': '*',\n 'per_page': 100\n }\n\n try:\n api_response = api_instance.list_stories(**opts)\n for story in api_response.stories:\n url = story.links.permalink\n if url:\n url_list.append(url)\n except ApiException as e:\n print(\"Exception when calling DefaultApi->list_stories: %s\\n\" %e)\n \n opts['cursor'] = api_response.next_page_cursor\n \n url_list = url_list[:50]\n \n articles_list = []\n for url in url_list:\n raw_article = parser.get_article(url)\n if raw_article is not None:\n articles_list.append(raw_article)\n\n articles = []\n for article in articles_list:\n parsed_article = parser.parse(article)\n if parsed_article is not None:\n articles.append(parsed_article)\n \n if len(articles) > 30:\n articles = articles[:30]\n\n return articles", "def save_company_names(self,reload = False):\n #this is a security measure such that the companies can not be reloaded by fault.\n if not reload:\n return\n\n # Get the html of the Wikipedia site to extract the table\n website_url = requests.get(\"https://en.wikipedia.org/w/index.php?title=List_of_S%26P_500_companies&oldid=895655255\").text\n html_site = BeautifulSoup(website_url, 'lxml')\n\n # Extract the table\n SP_Table = html_site.find('table',{'class':'wikitable sortable'})\n \n # Extract the rows of the table\n rows = SP_Table.findAll('tr')\n \n # Extract for each row in rows the second value as this is the wanted symbol\n df = pd.DataFrame(columns=['Symbol', 'FullName', 'CSVName', 'Sector'])\n for row in rows[1:]:\n # Extract the company names\n companyFullName = row.findAll('td')[1].text\n # Extract the company csv names\n companyCSVName = companyFullName.replace('*', ' ')\n # Extract the company symbols\n companySymbol = row.findAll('td')[0].text\n companySymbol = ''.join(companySymbol.split())\n sector = row.findAll('td')[3].text\n df1 = pd.DataFrame([[companySymbol, companyFullName, companyCSVName, sector]], columns=df.columns)\n df = df.append(df1, ignore_index=True)\n \n df['Sector'] = df['Sector'].apply(lambda x: x.replace('\\n', ''))\n df.to_csv(self.PATH_TO_COMPANY_FILES + '/Companies.csv', index=False)\n\n return", "def feed_company_from_db2(output_data, domain):\n companyfc = CompanyFromdb2.objects.using('db2').filter(\n domain__iexact=domain\n )[0]\n\n if companyfc.founded:\n output_data['founded'] = companyfc.founded\n\n if companyfc.name:\n output_data['name'] = companyfc.name\n\n if companyfc.address:\n output_data['formatted_address'] = \"%s, %s\" % (\n companyfc.address,\n french_to_english_country_mapping_dict.get(\n companyfc.country,\n companyfc.country\n )\n )\n\n if companyfc.telephone:\n output_data['phone'] = companyfc.telephone\n\n if companyfc.website:\n analyzed_url = urlparse(companyfc.website)\n if analyzed_url.netloc and analyzed_url.scheme:\n website_url = \"%s://%s\" % (\n analyzed_url.scheme,\n analyzed_url.netloc\n )\n elif analyzed_url.netloc and not analyzed_url.scheme:\n website_url = analyzed_url.netloc\n else:\n website_url = analyzed_url.path\n output_data['website_url'] = website_url\n\n if companyfc.size and company_size_mapping_dict.get(companyfc.size):\n output_data['size'] = company_size_mapping_dict.get(companyfc.size)\n\n if companyfc.description:\n output_data['description'] = companyfc.description.replace(\n '\\n',\n ' '\n ).replace(\n '\\r',\n ''\n )\n\n if companyfc.updated_on:\n output_data['last_updated'] = companyfc.updated_on\n\n try:\n companysocialprofilefc = CompanySocialProfileFromdb2.objects.using(\n 'db2'\n ).filter(\n company__domain=domain\n )[0]\n output_data['linkedin_url'] = companysocialprofilefc.url\n except IndexError:\n pass\n\n return output_data", "def get_companies(self):\n response = self.do_request('/undertaking/list')\n if response:\n return response.json()", "def read_companies():\n list_of_companies = data_manager.get_data_from_file(filename=\"company/company_data.csv\")\n return list_of_companies", "def companies_list_reload_page(**kwargs):\n channel_layer = get_channel_layer()\n\n async_to_sync(channel_layer.group_send)(\n 'companies_list',\n {\n 'type': 'reload_page',\n 'reload_page': True,\n }\n )", "def download_all():\n task = load_jt('task.json')\n data = load_jt('data.json')\n spider = Crawler()\n \n for _, v in task.iteritems():\n disease_name = v['data']['disease_name']\n data.setdefault(disease_name, {})\n for url, v1 in ignore_iteritems(v, ignore = ['data']):\n print url\n html = spider.html(url)\n if html:\n soup = BS4(html)\n div = soup.find('div', id='main-content')\n data[disease_name].setdefault(v1['data']['category'], str(div))\n dump_jt(data, 'data.json', fastmode = True, replace = True)", "def create_company_df(companies):\n\n companies = list(set(companies)) # removes duplicates\n\n symbols = []\n exchanges = []\n ynames = []\n is_us = []\n\n for company in companies:\n sym, exch, yco, usa = check_usa_mkts(get_company_info(company))\n symbols.append(sym)\n exchanges.append(exch)\n ynames.append(yco)\n is_us.append(usa)\n\n marketcaps = []\n sizes = []\n urls = []\n urls_pr = []\n\n for sym, co in zip(symbols, companies):\n if sym == \"n/a\":\n print(f\"Skipping {co}\\n\")\n marketcaps.append(\"n/a\")\n sizes.append(\"n/a\")\n urls.append(\"n/a\")\n urls_pr.append(\"n/a\")\n continue\n\n print(f\"Checking {co} [{sym}]\")\n marketcap = get_market_cap(sym)\n size = id_company_size(marketcap)\n url = get_company_url(sym)\n url_pr = get_press_release_page(url)\n\n marketcaps.append(marketcap)\n sizes.append(size)\n urls.append(url)\n urls_pr.append(url_pr[0])\n\n print(\"Search complete\")\n\n df = pd.DataFrame(\n {\n \"Company\": companies,\n \"Yahoo Listed Co.\": ynames,\n \"Symbol\": symbols,\n \"Exchange\": exchanges,\n \"Market Cap\": marketcaps,\n \"Company Size\": sizes,\n \"Is American\": is_us,\n \"Home URL\": urls,\n \"Press Release URL\": urls_pr,\n }\n )\n\n return df", "def test_autocomplete_companies_urls(self):\n r = self.base_check_request(\"get\", \"autocomplete/companies/\")\n self.assertIsInstance(r, list)\n self.assertEqual(len(r), 10, \"Invalid default count\")\n\n ac_keys = ['id', 'name', 'type_name']\n for ac in r:\n # check response objects structure\n self.assertListEqual(sorted(list(ac.keys())), ac_keys)\n\n # check response types\n self.assertIsInstance(ac['name'], str)\n self.assertIsInstance(ac['type_name'], str)\n self.assertIsInstance(ac['id'], int)", "def populate_dataframes(self, only_the_company_df=False, only_found_urls=False):\n start = self.kvk_range_process.start\n stop = self.kvk_range_process.stop\n if self.kvk_range_process.selection is not None:\n kvk_selection = self.kvk_range_process.selection\n elif self.kvk_selection is not None:\n kvk_selection = self.kvk_selection\n else:\n kvk_selection = None\n\n if self.rescan_missing_urls:\n sql_command = f\"select {COMPANY_ID_KEY}, count(*)-count({BESTAAT_KEY}) as missing \"\n sql_command += \"from web_site\"\n sel, sql = read_sql_table(table_name=\"web_site\", connection=self.database,\n variable=COMPANY_ID_KEY, lower=start, upper=stop,\n sql_command=sql_command, group_by=COMPANY_ID_KEY)\n missing = sel[sel[\"missing\"] > 0]\n selection = list(missing[COMPANY_ID_KEY].values)\n else:\n selection = kvk_selection\n\n sql_table, sql_command = read_sql_table(table_name=\"company\", connection=self.database,\n variable=KVK_KEY, datetime_key=DATETIME_KEY,\n lower=start,\n upper=stop, max_query=self.maximum_entries,\n force_process=self.force_process,\n older_time=self.older_time,\n selection=selection)\n self.company_df = sql_table\n self.company_df.set_index(KVK_KEY, inplace=True, drop=True)\n self.company_df.sort_index(inplace=True)\n\n # convert the timezone of the date/time stamp (which is stored in utc in sql) to our time\n # note that you need to use the dt operator before converting the date/times\n try:\n self.company_df[DATETIME_KEY] = \\\n self.company_df[DATETIME_KEY].dt.tz_convert(self.timezone)\n except AttributeError:\n logger.debug(\"Could not convert the date times in the company table. Probably empty\")\n\n if not only_the_company_df:\n sql = None\n var = None\n sel = None\n if selection is None:\n sql = re.sub(\"from company\", \"from address\", sql_command)\n logger.debug(f\"External sql command: {sql}\")\n else:\n var = KVK_KEY\n sel = list(self.company_df.index.values)\n\n self.address_df, sc = read_sql_table(table_name=\"address\", connection=self.database,\n sql_command=sql, variable=var, selection=sel)\n if selection is None:\n sql = re.sub(\"from company\", \"from web_site\", sql_command)\n sql = re.sub(f\"where {KVK_KEY}\", f\"where {COMPANY_ID_KEY}\", sql)\n sql = re.sub(f\"order by {KVK_KEY}\", f\"order by {COMPANY_ID_KEY}\", sql)\n logger.debug(f\"External sql command: {sql}\")\n else:\n var = COMPANY_ID_KEY\n self.website_df, sc = read_sql_table(table_name=\"web_site\", connection=self.database,\n sql_command=sql, variable=var,\n lower=start, upper=stop, selection=sel)\n self.website_df.rename(columns={COMPANY_ID_KEY: KVK_KEY, URL_ID_KEY: URL_KEY},\n inplace=True)\n\n self.website_df.loc[:, DISTANCE_STRING_MATCH_KEY] = None\n\n if only_found_urls:\n url_selection = list(self.website_df[URL_KEY].values)\n else:\n url_selection = None\n\n self.url_df, sc = read_sql_table(table_name=\"url_nl\", connection=self.database,\n variable=URL_KEY, selection=url_selection)\n\n self.url_df.set_index(URL_KEY, inplace=True, drop=True)\n self.url_df.sort_index(inplace=True)\n try:\n self.url_df[DATETIME_KEY] = self.url_df[DATETIME_KEY].dt.tz_convert(self.timezone)\n except AttributeError:\n logger.debug(\"Could not convert the date times in the url table. Probably empty\")", "async def get_contents(self):\n if self._recon < 0:\n raise ValueError('Reconnection time needs to be positive!')\n urls = self._urls\n proxy_list = await self._pool.get_proxies(self._recon + 1)\n \n for count in range(self._recon + 1):\n proxy = proxy_list[count]\n if count > 0: # perform reconnection\n if not self._re_urls:\n print('No need to reconnect.')\n break\n else:\n if count == 1:\n print('Reconnecting...')\n print('\\n----------------------------------------------------------')\n print(ordinal[count].capitalize() + ' reconnection...\\n')\n urls = self._re_urls\n\n result_list = await self._connect(urls, proxy=proxy, which_site=True)\n\n self._re_urls.clear() # empty the reconnect urls list \n for result in result_list:\n url, soup, status, site = result\n if not self._error(url, soup, status, site, True):\n self._result += self._get_plain_text(url, soup, site)\n fail_num = len(self._re_urls)\n if count == self._recon:\n print('Failed to crawl ' + str(fail_num) + (' website.' if fail_num==1 else ' websites.'))\n\n self._result = re.sub(r'\\s+', '', self._result) # trim whitespaces\n self._result = self._rm_duplicate(self._result)", "def scrap_ticker(self, ticker: str, stock_list: None):\n logging.info(f'Scrap for Ticker {ticker}')\n print(ticker)\n r = requests.Response()\n\n self.main_dict['Ticker'] = ticker\n if stock_list is not None:\n self.main_dict['Description'] = stock_list[ticker]\n\n for u, site in self.urls.items():\n logging.info(f'Scrapping Site: {u}, ticker: {ticker}')\n # headers = {'accept': '*/*', 'User-Agent': ua.random}\n headers = random_headers()['headers']\n proxy = random_headers()['proxies']\n\n section = dict(self.config[u])\n # r = self.scraper.get(site.replace('{ticker}', ticker), headers=headers, proxies=proxy)\n try:\n r = requests.get(site.replace('{ticker}', ticker), headers=headers, proxies=proxy)\n count = 1\n while r.status_code != 200 and count <= 3:\n time.sleep(7)\n logging.error(f'Have status {r.status_code} from \"{u.upper()}\" try again')\n headers = random_headers()['headers']\n proxy = random_headers()['proxies']\n # r = self.scraper.get(site.replace('{ticker}', ticker), headers=headers, proxies=proxy)\n r = requests.get(site.replace('{ticker}', ticker), headers=headers, proxies=proxy)\n\n print(f'repeat request {count}')\n print(r.status_code)\n count += 1\n\n except Exception as e:\n logging.error(\n f'Exception while get acces to {u.upper()}, response: {r.status_code} proxy: {proxy}, {e}, ')\n\n finally:\n logging.info('Scraping page was is OK')\n\n for attr, path in section.items():\n mask = path\n atr = attr.title()\n\n try:\n if re.search(mask, r.text) is not None:\n print(atr, ':', re.search(mask, r.text).groups()[0])\n self.main_dict[atr] = re.search(mask, r.text).groups()[0]\n else:\n print(atr, ':', 'None')\n print(f'Warning!!! {ticker}, {u.upper()},')\n logging.warning(f'Can\\'t read \"{attr.upper()}\" from {u.upper()}. Request: {r.status_code}')\n self.main_dict[atr] = '-'\n\n except Exception as e:\n print(f'Exception!!!Ticker: {ticker}, Site: \"{site.upper}\", \"{attr.upper()}\" Message: {e}')\n logging.error(f'Exception!!! {ticker}, \"{site.upper()}\", \"{attr.upper()}\", message {e}')\n self.main_dict[attr.upper()] = '-'\n\n self.main_dict['Description'] = stock_list[ticker]\n logging.debug(f'All ROW: {self.main_dict}')\n return self.main_dict", "def get_company_info(company_no):\n in_ = 'curl -s -X GET -u yLwgnyHvwlYxkbOBAoLEwsaEfVQ_a7kAuCUTNtSt: https://api.companieshouse.gov.uk/company/{}/officers?q=Officers&items_per_page=100&start_index=0'.format(company_no).split()\n\n out = subprocess.check_output(in_)\n res = json.loads(out.decode('utf8'))\n ret = res['items']\n \n return ret", "def parse_items(self,response):\n sel = Selector(response)\n item = response.meta['job_item']\n company_item = response.meta['company_item']\n company_item['introduction'] = sel.xpath('//div[@class=\"job-item main-message noborder\"]/div[@class=\"content content-word\"]/text()').extract_first()\n company_item['address'] = sel.xpath('//div[@class=\"company-infor\"]/p/text()').extract_first()\n item['link'] = response.url\n item['requirement'] = sel.xpath('//div[@class=\"content content-word\"][1]/text()').extract_first()\n item['website_id'] = 7\n item['company'] = company_item\n print item\n yield item", "def main():\n category_list = []\n h = httplib2.Http('.cache')\n websites = [\"http://www.goodreads.com/genres/list?page=1\", \"http://www.goodreads.com/genres/list?page=2\",\n \"http://www.goodreads.com/genres/list?page=3\"]\n for website in websites:\n response, content = h.request(website)\n if response.status != 200:\n print(\"Status code \", response.status)\n return\n soup = BeautifulSoup(content, 'html.parser')\n data = soup.find_all(\"a\", class_=\"mediumText actionLinkLite\")\n for x in data:\n category_list.append(str(x.string))\n\n data = \"category_list = \" + str(category_list)\n\n with open(\"InitialDataExtraction/category_list.py\", mode='w', encoding=\"utf-8\") as a_file:\n a_file.write(data)\n print(len(category_list))", "def feed_company_from_clearbit(\n output_data,\n domain=None,\n cbcompany=None\n):\n if not cbcompany:\n cbcompany = ClearbitCompany.objects.filter(domain__iexact=domain)[0]\n\n if cbcompany.founded_year:\n # Need to cast to str because Clearbit formats it as\n # an integer.\n output_data['founded'] = str(cbcompany.founded_year)\n\n if cbcompany.name:\n output_data['name'] = cbcompany.name\n\n if cbcompany.location:\n output_data['formatted_address'] = cbcompany.location\n\n if cbcompany.phone:\n output_data['phone'] = cbcompany.phone\n\n # Always have a domain.\n output_data['website_url'] = \"http://%s\" % cbcompany.domain\n\n if cbcompany.metrics_employees_range:\n output_data['size'] = cbcompany.metrics_employees_range\n\n if cbcompany.description:\n output_data['description'] = cbcompany.description\n\n if cbcompany.indexed_at:\n output_data['last_updated'] = cbcompany.indexed_at\n\n if cbcompany.linkedin_handle:\n output_data['linkedin_url'] = \"https://www.linkedin.com/%s\" % (\n cbcompany.linkedin_handle\n )\n\n if cbcompany.category_sub_industry:\n # cbcompany.category_sub_industry always contains 1 item\n # but our output_data['industries'] should return a list\n # so we put it in a list.\n output_data['industries'] = [cbcompany.category_sub_industry]\n\n try:\n # We have a list of emails in db but output_data['email']\n # should only return 1 email so we take the first email in\n # the list.\n csea = ClearbitCompanySiteEmailAddress.objects.filter(\n clearbit_company=cbcompany\n )[0]\n output_data['email'] = csea.email\n except IndexError:\n pass\n\n return output_data", "def fetch_self(self):\r\n self.parsed_doc['names'] = self.fetch_candidate_name() \r\n self.parsed_doc['phones'] = self.fetch_phone_numbers() \r\n self.parsed_doc['emails'] = self.fetch_emails() \r\n self.parsed_doc['github'] = self.fetch_github() \r\n self.parsed_doc['linkedin'] = self.fetch_linkedin() \r\n self.parsed_doc['degrees'] = self.fetch_degrees() \r\n self.parsed_doc['skills'] = self.fetch_skills() \r\n self.parsed_doc['education'] = self.fetch_education() \r\n self.parsed_doc['languages'] = self.fetch_languages() \r\n self.parsed_doc['addresses'] = self.fetch_address() \r\n self.parsed_doc['raw_resume'] = self.stringtext", "def get_suggestions(db_company):\n if db_company.archived:\n return {}\n\n names = [\n db_company.name,\n *db_company.trading_names,\n ]\n\n data = [\n *itertools.chain(\n *[name.split(' ') for name in names],\n ),\n *names,\n ]\n\n countries = [\n db_company.registered_address_country_id,\n db_company.address_country_id,\n ]\n\n return {\n 'input': get_unique_values_and_exclude_nulls_from_list(data),\n 'contexts': {\n 'country': get_unique_values_and_exclude_nulls_from_list(countries),\n },\n }", "def get_available_companies_and_people(team):", "def parse_url(self, url: str):\n time.sleep(0.1)\n resp = requests.get(url, timeout=5).content.decode('windows-1250')\n selector = Selector(text=resp)\n name_addresses = []\n if not self.is_right_page(selector):\n return []\n\n company = self.parse_business_name(selector)\n name_addresses += self.parse_management_body(selector)\n name_addresses += self.parse_partners(selector)\n\n ret = []\n for name_address in name_addresses:\n name_address = [re.sub(r'[\",;]', '', n).strip() for n in name_address]\n print(\"Found name: \", name_address)\n is_russian = self.RUSSIA in name_address[1]\n ret.append([re.sub(r'[\",;]', '', company).strip()] + name_address + [is_russian])\n return ret", "def get_companies(request):\n try:\n companies = []\n for company in Company.objects.all():\n companies.append(company.dump_to_dict())\n\n return format_ajax_response(True, \"Companies list retrieved successfully.\", {'companies': companies})\n except Exception as ex:\n logging.error(\"failed to get_companies: %s\" % ex)\n return format_ajax_response(False, \"There was a problem retrieving the companies listing.\")", "def get_suggestions(db_company):\n if db_company.archived:\n return []\n\n names = [\n db_company.name,\n *db_company.trading_names,\n ]\n\n data = [\n *itertools.chain(\n *[name.split(' ') for name in names],\n ),\n *names,\n ]\n\n return list(filter(None, set(data)))", "def scrap_page(url, date):\n response = requests.get(url, headers=header.generate())\n print(url)\n if (response.status_code != 200):\n print(response.status_code)\n return\n \n soup = BeautifulSoup(response.text, 'html.parser')\n\n list_immo = []\n\n for a in soup.find_all('a', href=True):\n list_immo.append(a['href'])\n\n\n list_immo = [x for x in list_immo if \"ventes_immobilieres\" in x]\n list_immo = [x for x in list_immo if not \"offres\" in x]\n\n conn = create_conn()\n\n random.shuffle(list_immo)\n\n for elm in list_immo:\n time.sleep(random.randint(5, 15))\n\n # Store all info inside values_col and append to file\n values_col = []\n values_col.append(date)\n\n build_url = url_ref + elm\n print(build_url)\n response_immo = requests.get(build_url, headers=header.generate())\n if (response_immo.status_code != 200):\n print(response_immo.status_code)\n continue\n soup_immo = BeautifulSoup(response_immo.text, 'html.parser')\n\n # Description\n description = soup_immo.find(\"span\", class_=\"_1fFkI\").text\n description = description.replace(',', ' ')\n description = description.replace('\\n', ' ')\n \n # Price\n try:\n price = int(soup_immo.find(\"span\", class_=\"_3Ce01 _3gP8T _25LNb _35DXM\").text.replace(\"€\", \"\").replace(\" \", \"\"))\n except:\n print(build_url + \" => No price\")\n continue\n\n \n # City & zip code location\n localisation_all = soup_immo.find_all(\"h2\", class_=\"Roh2X _3c6yv _25dUs _21rqc _3QJkO _1hnil _1-TTU _35DXM\")[2]\n\n city = localisation_all.text.split(\"(\")[0]\n zip_code = localisation_all.text.split(\"(\")[-1].split(\")\")[0]\n\n values_col.append(build_url)\n values_col.append(description)\n values_col.append(price)\n values_col.append(city)\n values_col.append(zip_code)\n\n div_key_value = soup_immo.find_all(\"p\", class_=\"_2k43C _1pHkp _137P- P4PEa _3j0OU\")\n\n for div in div_key_value:\n if (div.text == \"Type de bien\"):\n type_bien = div.findNext(\"p\").text\n if type_bien == \"Maison\":\n type_bien = 1\n elif type_bien == \"Appartement\":\n type_bien = 2\n else:\n print(type_bien)\n break\n values_col.append(type_bien)\n if (div.text == \"Surface\"):\n values_col.append(div.findNext(\"p\").text.split(\" \")[0])\n if (div.text == \"Pièces\"):\n values_col.append(div.findNext(\"p\").text)\n\n # Write to csv\n send_to_rds(values_col, conn)\n append_list_as_row(\"data/{}.csv\".format(date), values_col)", "def companies(self):\n from hubspot3.companies import CompaniesClient\n\n return CompaniesClient(**self.auth, **self.options)", "def get_companies(self):\n url = 'companies'\n result = self.get(url)\n return result['companies']", "def covid_fetch():\n #Sets the structure of the data retrieved from the API\n cases_and_deaths = {\n \"date\": \"date\",\n \"areaName\": \"areaName\",\n \"areaCode\": \"areaCode\",\n \"newCasesByPublishDate\": \"newCasesByPublishDate\",\n \"cumCasesByPublishDate\": \"cumCasesByPublishDate\",\n \"newDeathsByDeathDate\": \"newDeathsByDeathDate\",\n \"cumDeathsByDeathDate\": \"cumDeathsByDeathDate\"\n }\n #Sets the filter for the API using config.json\n covid_nation = ['areaType=nation']\n nation = 'areaName=' + str(config_fetcher(\"covid_region\"))\n covid_nation.append(nation)\n\n #Gets API latest data\n covid_api = Cov19API(\n filters = covid_nation,\n structure = cases_and_deaths,\n )\n #Gets data in form of dictionary\n covid_json = covid_api.get_json()\n #Gets timestamp for last update\n covid_timestamp = covid_api.last_update\n #Assign data to variables\n covid_data = covid_json['data'] #This formats the data as a list, while I want a dictionary, hence the next line.\n return covid_data", "def find_company(self, search = None, limit = 10):\n\n #Conectar el servidor con la FDA\n company = self.connect_open_fda(search, limit)\n\n #Inicio del fichero HTML respuesta\n message = ('<!DOCTYPE html>\\n'\n '<html lang=\"en\">\\n'\n '<head>\\n'\n '<meta charset=\"UTF-8\">\\n'\n '<title>Empresas FDA</title>\\n'\n '</head>\\n'\n '<body>\\n'\n '<ul>\\n'\n )\n\n #META\n meta = company['meta']\n total = meta['results']['total'] #Objetos existentes\n limit = meta['results']['limit'] #Objetos recibidos\n\n message += '<h1>Resultados </h1>'\n message += '<h2>Se han recibido {} empresas de {}. </h2>'.format(limit, total)\n message += ('<br/>')\n\n\n #RESULTS\n empresas = company['results']\n for company in empresas:\n if company['openfda']:\n if company['openfda']['manufacturer_name']:\n company_name = company['openfda']['manufacturer_name'][0]\n else:\n company_name = 'Desconocido'\n else:\n company = 'Desconocido'\n\n #Añadir la informacion a la respuesta HTML\n message += '<li>Empresa: {}</li>'.format(company_name)\n message += '<br/>'\n message += '<a href=\"http://localhost:8000/\">Volver al Inicio</a>'\n\n #Acabar la respuesta HTLML\n message +=('</ul>\\n'\n '</body>\\n'\n '</html>')\n\n return message", "def get_companies_and_people(team):", "def make_companies():\n logging.info(\"Making CH\")\n companies_address = get_address()\n companies_sector = get_sector()\n\n companies = (\n companies_address[[\"company_number\", \"postcode\"]]\n .merge(\n companies_sector.query(\"rank==1\")[[\"company_number\", \"SIC4_code\"]],\n on=\"company_number\",\n )\n .assign(division=lambda x: [c[:2] for c in x[\"SIC4_code\"]])\n .assign(division_name=lambda x: x[\"division\"].map(_DIV_NAME_LOOKUP))\n .merge(nspl, left_on=\"postcode\", right_on=\"pcds\")\n )\n\n return companies", "def get_data(self):\n def _clean_search_hit(search_hit):\n \"\"\"\n Takes in a search result hit as a BeautifySoup tag and pulls out all the data to match the desired schema.\n\n :param search_hit:\n :return Dictionary: A dictionary with the cleaned data\n \"\"\"\n\n hit_name = search_hit.find(class_='hit-name')\n hit_url = hit_name.get('href')\n hit_id = hit_url.split('/')[-1]\n name = hit_name.get_text().split(',')[0].title().split()\n\n current_city = search_hit.find(class_='hit-location').get_text().upper()\n\n # Find all Addresses for search result.\n try:\n address = search_hit.find(class_='hit-pastAddresses').find_all(class_='hit-values')\n address = list({a.text.upper().replace('.', '') for a in address})\n except AttributeError:\n address = list()\n\n # find the address that is most likely the current main address.\n try:\n address.insert(0, address.pop(address.index(current_city)))\n except ValueError:\n address.insert(0, current_city)\n\n address = [\n {\n '@type': 'PostalAddress',\n 'addressLocality': locality.title(),\n 'addressRegion': region\n } for locality, region in [a.split(', ') for a in address]]\n\n work_location = {'@type': 'Place'}\n try:\n work_location['name'] = search_hit\\\n .find(class_='hit-work')\\\n .find(class_='hit-values')\\\n .get_text()\\\n .title()\n except AttributeError:\n work_location['name'] = ''\n\n alumni_of = {'@type': 'EducationalOrganization'}\n try:\n alumni_of['name'] = search_hit\\\n .find(class_='hit-high-school')\\\n .find(class_='hit-values')\\\n .get_text().title()\n except AttributeError:\n pass\n\n return {\n '@id': hit_id,\n '@type': 'Person',\n 'name': ' '.join(name),\n 'givenName': name[0],\n 'middleName': ' '.join(name[1:-1]),\n 'familyName': name[-1],\n 'url': hit_url,\n 'address': address,\n 'workLocation': work_location,\n 'alumniOf': alumni_of,\n }\n\n def _refine_search(search_str, options):\n \"\"\"\n Takes a list of WebElements and a search string, looks for string in the text of each WebElement, and\n press the option if found. Returns Boolean for found status\n\n :param search_str: str of the desired option.\n :param options: list of WebElements from Beautify Soup that represents all of the available options.\n :return:\n \"\"\"\n search_str = search_str.upper()\n logging.info(f'Looking for \\'{search_str}\\'')\n try:\n for option in options:\n option_text = option.text.upper()\n logging.info(f'Option Checked: {option_text}')\n if search_str in option_text:\n option.click()\n time.sleep(2)\n logging.info(f'Option Selected: {option_text}')\n return True\n else:\n return False\n except AttributeError:\n return True\n except StaleElementReferenceException as e:\n ChromeCrash(e)\n\n with self.driver(executable_path=self.DRIVER_DIR) as driver:\n driver.get(self.url)\n\n \"\"\"\n The CSS for the page doesn't show the State nor the City selector options if the page is too narrow,\n so we need to make sure the browser is open wide enough for the CSS to make those options visible. \n \"\"\"\n driver.fullscreen_window()\n\n # Refine the search by State\n address_region = self.person.get('addressRegion', '')\n address_region = STATES.get(address_region.upper(), address_region.upper())\n region_options = driver\\\n .find_element_by_class_name(\"STATE\")\\\n .find_elements_by_class_name(\"refinementList-text\")\n\n if not _refine_search(address_region, region_options):\n return False\n\n # Narrow the search by pressing a City option\n address_locality = self.person.get('addressLocality').title()\n locality_options = driver\\\n .find_element_by_class_name(\"CITY\")\\\n .find_elements_by_class_name(\"refinementList-text\")\n\n if not _refine_search(address_locality, locality_options):\n return False\n\n \"\"\"\n The Page Loads dynamically, so we need to scroll down the page to show all the search results. It needs to\n be done in steps with a pause between movements to allow for loading. \n Here it will first get the current location on the page, attempt to move down the page, and then check to\n see if the location changed.\n \"\"\"\n\n if self.auto_scroll and len(driver.find_elements_by_class_name(\"ais-InfiniteHits-item\")) > 15:\n current_height, new_height = 0, driver.execute_script(\"return document.body.scrollHeight\")\n\n while new_height != current_height:\n # Scroll down to the bottom of the page\n driver.execute_script(\"window.scrollTo(0, document.body.scrollHeight);\")\n\n # Wait to load page\n time.sleep(SCROLL_PAUSE_TIME)\n\n # Calculate new scroll height and compare with last scroll height\n current_height, new_height = new_height, driver.execute_script(\"return document.body.scrollHeight\")\n\n page_source = driver.page_source\n page_soup = bs(page_source, 'html.parser')\n search_results = list(page_soup.find_all(class_='ais-InfiniteHits-item'))\n for i, search_result in enumerate(search_results):\n search_results[i] = _clean_search_hit(search_result)\n\n self.data_from_website = pd.DataFrame(search_results)\n self.data_from_website.set_index('@id', inplace=True)\n return True", "def update_yields_CGB():\n url='http://yield.chinabond.com.cn/cbweb-mn/yc/ycDetail?ycDefIds=2c9081e50a2f9606010a3068cae70001&&zblx=txy&&workTime=%s&&dxbj=0&&qxlx=0,&&yqqxN=N&&yqqxK=K&&wrjxCBFlag=0&&locale=zh_CN'%date.today() \n while datetime.now().hour<23:\n try:\n response=requests.post(url,timeout=100)\n if 'tablelist' not in response.text:\n continue\n else:\n break\n except requests.exceptions.ConnectionError:\n time.sleep(180)\n if datetime.now().hour<23:\n soup=BeautifulSoup(response.text,'html.parser')\n table=soup.find_all(\"table\",{\"class\":\"tablelist\"})[0]\n g=(td.text for td in table.find_all('td'))\n CN1d=CN1M=CN2M=CN3M=CN6M=CN9M=None\n CN1Y=CN2Y=CN3Y=CN5Y=CN7Y=CN10Y=CN15Y=CN20Y=CN30Y=CN50Y=None \n while True:\n try:\n element=next(g)\n if element=='0.0y':\n CN1d=float(next(g))\n if element=='0.08y':\n CN1M=float(next(g))\n if element=='0.17y':\n CN2M=float(next(g)) \n if element=='0.25y':\n CN3M=float(next(g))\n if element=='0.5y':\n CN6M=float(next(g))\n if element=='0.75y':\n CN9M=float(next(g))\n if element=='1.0y':\n CN1Y=float(next(g))\n if element=='2.0y':\n CN2Y=float(next(g))\n if element=='3.0y':\n CN3Y=float(next(g))\n if element=='5.0y':\n CN5Y=float(next(g))\n if element=='7.0y':\n CN7Y=float(next(g))\n if element=='10.0y':\n CN10Y=float(next(g))\n if element=='15.0y':\n CN15Y=float(next(g))\n if element=='20.0y':\n CN20Y=float(next(g))\n if element=='30.0y':\n CN30Y=float(next(g))\n if element=='50.0y':\n CN50Y=float(next(g)) \n except StopIteration:\n break\n cgb_yield_curve=CGB(id_date=date.today(),CN1d=CN1d,CN1M=CN1M,CN2M=CN2M,\n CN3M=CN3M,CN6M=CN6M,CN9M=CN9M,CN1Y=CN1Y,CN2Y=CN2Y,\n CN3Y=CN3Y,CN5Y=CN5Y,CN7Y=CN7Y,CN10Y=CN10Y,\n CN15Y=CN15Y,CN20Y=CN20Y,CN30Y=CN30Y,CN50Y=CN50Y)\n DBSession.add(cgb_yield_curve)\n transaction.commit()\n return", "def get_compo_list(self):\n super(self.__class__, self).get_compo_list()\n link = 'https://en.wikipedia.org/wiki/Dow_Jones_Industrial_Average'\n params={'Symbol':2, 'Name':0, 'Sector':3, 'Industry':3}\n self.components = get_index_components_from_wiki(link, params)\n # insert CIK\n ciks = self.update_ciks(updateall=True)\n self.components = self.components.join(ciks)\n return self.components", "def scraper_data(self):\n self.lock.acquire()\n for item in s.item:\n item_name = item.get(\"item\")\n item_url = item.get(\"url\")\n item_stock, item_cost = self.scraper.ChooseScraper(item_url)\n s.updateStatus(item_name, item_url, item_stock, item_cost)\n time.sleep(1)\n\n self.lock.release()", "def _fetch_data(self):\n pass", "def parse(self, response):\n ad_previews = Selector(response).xpath(\n '//div[@class=\"offers list\"]/article'\n )\n for ad in ad_previews:\n item = CarItem()\n item['source'] = self.source_name\n\n gas_type = ad.xpath(\n ('''div[@class=\"offer-item__content\"]/\n ul[@class=\"offer-item__params\"]/\n li[@data-code=\"fuel_type\"]/\n span/\n text()''')\n ).extract_first()\n if gas_type:\n item['gas_type'] = gas_type\n\n power = ad.xpath(\n ('''div[@class=\"offer-item__content\"]/\n ul[@class=\"offer-item__params\"]/\n li[@data-code=\"power\"]/\n span/\n text()''')\n ).extract_first().replace(' cv', \"\")\n if power:\n item['power'] = int(power)\n\n year = ad.xpath(\n ('''div[@class=\"offer-item__content\"]/\n ul[@class=\"offer-item__params\"]/\n li[@data-code=\"first_registration_year\"]/\n span/\n text()''')\n ).extract_first()\n if year:\n item['year'] = int(year.strip().replace(\" \", \"\"))\n\n link = ad.xpath(\n ('''div[@class=\"offer-item__content\"]/\n div[@class=\"offer-item__title\"]/\n h2[@class=\"offer-title\"]/\n a[@class=\"offer-title__link\"]/\n @href''')\n ).extract_first()\n if link:\n item['link'] = link\n\n title = ad.xpath(\n ('''div[@class=\"offer-item__content\"]/\n div[@class=\"offer-item__title\"]/\n h2[@class=\"offer-title\"]/\n a[@class=\"offer-title__link\"]/\n @title''')\n ).extract_first()\n if title:\n item['title'] = title\n\n price = ad.xpath(\n ('''div[@class=\"offer-item__content\"]/\n div[@class=\"offer-item__price\"]/\n div[@class=\"offer-price\"]/\n span[@class=\"offer-price__number\"]/\n text()''')\n ).extract_first()\n if price:\n item['price'] = int(price.strip().replace(\" \", \"\"))\n\n picture = ad.xpath(\n ('''div[@class=\"offer-item__photo \"]/\n a[@class=\"offer-item__photo-link\"]/\n @style''')\n ).extract_first()\n if picture:\n match = RegexHandler.extract_beetwen_quotes(picture)\n if match:\n item['picture'] = match\n\n location = ad.xpath(\n ('''div[@class=\"offer-item__content\"]/\n div[@class=\"offer-item__bottom-row \"]/\n span[@class=\"offer-item__location\"]/\n h4/\n em/\n text()''')\n ).extract_first()\n if location:\n item['location'] = location\n\n yield scrapy.Request(\n link,\n self.parse_content,\n meta={'item': item}\n )", "def fill_cites(self):\n response = requests.get(\"https://restcountries.eu/rest/v2/all\")\n json_content = json.loads(response.text)\n i = 0\n for t in json_content:\n currency = t[\"currencies\"][0][\"code\"]\n pop = t[\"population\"]\n state_name = t[\"name\"]\n self.cities_from_api[t[\"capital\"].lower()] = [str(state_name), str(currency), str(pop)]", "def parse_listing(keyword,place):\n\turl = \"https://www.yellowpages.com/search?search_terms={0}&geo_location_terms={1}\".format(keyword,place)\n\tprint(\"retrieving \",url)\n\n\theaders = {'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',\n\t\t\t\t'Accept-Encoding':'gzip, deflate, br',\n\t\t\t\t'Accept-Language':'en-GB,en;q=0.9,en-US;q=0.8,ml;q=0.7',\n\t\t\t\t'Cache-Control':'max-age=0',\n\t\t\t\t'Connection':'keep-alive',\n\t\t\t\t'Host':'www.yellowpages.com',\n\t\t\t\t'Upgrade-Insecure-Requests':'1',\n\t\t\t\t'User-Agent':'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.140 Safari/537.36'\n\t\t\t}\n\t# Adding retries\n\tfor retry in range(10):\n\t\ttry:\n\t\t\tresponse = requests.get(url,verify=False, headers = headers )\n\t\t\tprint(\"parsing page\")\n\t\t\tif response.status_code==200:\n\t\t\t\tparser = html.fromstring(response.text)\n\t\t\t\t#making links absolute\n\t\t\t\tbase_url = \"https://www.yellowpages.com\"\n\t\t\t\tparser.make_links_absolute(base_url)\n\n\t\t\t\tXPATH_LISTINGS = \"//div[@class='search-results organic']//div[@class='v-card']\"\n\t\t\t\tlistings = parser.xpath(XPATH_LISTINGS)\n\t\t\t\tscraped_results = []\n\n\t\t\t\tfor results in listings:\n\t\t\t\t\tXPATH_BUSINESS_NAME = \".//a[@class='business-name']//text()\"\n\n\t\t\t\t\tXPATH_WEBSITE = \".//div[@class='info']//div[contains(@class,'info-section')]//div[@class='links']//a[contains(@class,'website')]/@href\"\n\n\t\t\t\t\traw_business_name = results.xpath(XPATH_BUSINESS_NAME)\n\n\t\t\t\t\traw_website = results.xpath(XPATH_WEBSITE)\n\n\n\t\t\t\t\tbusiness_name = ''.join(raw_business_name).strip() if raw_business_name else None\n\n\t\t\t\t\twebsite = ''.join(raw_website).strip() if raw_website else None\n\n\n\n\n\n\t\t\t\t\tbusiness_details = {\n\t\t\t\t\t\t\t\t\t\t'business_name':business_name,\n\n\t\t\t\t\t\t\t\t\t\t'website':website\n\n\t\t\t\t\t}\n\t\t\t\t\tscraped_results.append(business_details)\n\t\t\t\t\tprint(scraped_results)\n\t\t\t\treturn scraped_results\n\n\t\t\telif response.status_code==404:\n\t\t\t\tprint(\"Could not find a location matching\",place)\n\t\t\t\t#no need to retry for non existing page\n\t\t\t\tbreak\n\t\t\telse:\n\t\t\t\tprint(\"Failed to process page\")\n\t\t\t\treturn []\n\n\t\texcept:\n\t\t\tprint(\"Failed to process page\")\n\t\t\treturn []", "def get_artist_disco(\n browser, soup: BeautifulSoup, complementary_infos: bool\n) -> List[dict]:\n\n # artist discography\n artist_disco = []\n artist = soup.find(\"h1\", {\"class\": \"artist_name_hdr\"}).text.strip()\n logger.debug(\"Extracting discography for %s\", artist)\n disco = soup.find(\"div\", {\"id\": \"discography\"})\n logger.debug(\"Sections find_all\")\n sections = disco.find_all(\"div\", {\"class\": \"disco_header_top\"})\n for section in sections:\n category = section.find(\"h3\").text.strip()\n logger.debug(\"Section %s\", category)\n discs = section.find_next_sibling(\n \"div\", {\"id\": re.compile(\"disco_type_*\")}\n ).find_all(\"div\", {\"class\": \"disco_release\"})\n for disc in tqdm(discs, dynamic_ncols=True):\n album = disc.find(\"a\", {\"class\", \"album\"})\n url_disc = \"https://rateyourmusic.com\" + album[\"href\"]\n date = disc.find(\"span\", {\"class\": re.compile(\"disco_year_*\")})\n logger.debug(\n \"Getting information for disc %s - %s - %s\",\n artist,\n album.text.strip(),\n date.text.strip(),\n )\n dict_disc = {\n \"Artist\": artist,\n \"Category\": category,\n \"Name\": album.text.strip(),\n \"URL\": url_disc,\n \"Date\": date[\"title\"].strip(),\n \"Year\": date.text.strip(),\n \"Average Rating\": disc.find(\"div\", {\"class\": \"disco_avg_rating\"}).text,\n \"Ratings\": disc.find(\"div\", {\"class\": \"disco_ratings\"}).text.replace(\n \",\", \"\"\n ),\n \"Reviews\": disc.find(\"div\", {\"class\": \"disco_reviews\"}).text.replace(\n \",\", \"\"\n ),\n }\n if complementary_infos:\n dict_disc = get_complementary_infos_disc(browser, dict_disc, url_disc)\n artist_disco.append(dict_disc)\n return artist_disco", "def scrap_results(self):\n # Find the table\n table = self.driver.find_element_by_xpath(results_table_path)\n\n found_links = []\n # For each row the table hase\n for row in table.find_elements_by_xpath(\".//tr\"):\n elements = row.find_elements_by_xpath(\".//td\")\n # If this row is not empty\n if len(elements) != 0:\n # Extract the link\n entity_link = elements[0].find_element_by_xpath(\".//a\").get_attribute(\"href\")\n found_links.append(entity_link)\n\n # Randomize the list of links so each time the order is different.\n shuffle(found_links)\n\n generic_data_found = []\n activity_data_found = []\n components_data_found = []\n components_alt_data_found = []\n historical_name_data_found = []\n historical_social_capital_data_found = []\n count = 0\n # For each link found\n for link in found_links:\n # Scrap the data from this entity\n gd, act, comp, hist_name, hist_c_s = self._scrap_single_entity(link)\n\n # Update the found data variables with the new data\n generic_data_found.append(gd)\n activity_data_found += act\n if len(comp) > 0 and \"total_miembros_patronado\" in comp[0]:\n components_alt_data_found += comp\n else:\n components_data_found += comp\n historical_name_data_found += hist_name\n historical_social_capital_data_found += hist_c_s\n\n # TODO: Remove this\n if count == 2:\n pass\n\n\n count += 1\n\n # Add data to the centralized search_result variable\n self.search_results.add_generic_data(generic_data_found)\n self.search_results.add_activity_data(activity_data_found)\n self.search_results.add_components_data(components_data_found)\n self.search_results.add_components_alt_data(components_alt_data_found)\n self.search_results.add_historical_names_data(historical_name_data_found)\n self.search_results.add_historical_social_capital_data(historical_social_capital_data_found)", "def fetch_website_list(self):\r\n # Clear list\r\n self.website_list = []\r\n\r\n # Open websites overview\r\n self.browser.open(self.config[\"base_url\"] + \"websites\")\r\n\r\n # Find table and iterate over rows\r\n for table_row in self.browser.get_current_page().select(\"table tr\"):\r\n\r\n # Fetch cells\r\n cells = table_row.findAll('td')\r\n\r\n # Iterate over cells\r\n if(len(cells) > 0):\r\n\r\n # Get website ID\r\n website_id = table_row['data-id']\r\n # Get website name\r\n name = cells[1].text.strip()\r\n # Get website domain name\r\n domain = cells[2].text.strip()\r\n\r\n # Build website object\r\n website = {'id': website_id,\r\n 'name': name, 'domain': domain}\r\n\r\n # Add website object to list\r\n self.website_list.append(website)", "def _load_companies(self):\n if os.path.exists(self.PATH_TO_COMPANY_FILES + '/Companies.csv'):\n df = pd.read_csv(self.PATH_TO_COMPANY_FILES + '/Companies.csv')\n self.Symbols = list(df['Symbol'])\n self.FullNames = list(df['FullName'])\n self.CSVNames = list(df['CSVName'])\n self.sectors = list(df['Sector'])\n self.companies = df\n \n return", "def GetAllDifferentDescriptionOfCost():\n\n logs.logger.debug(\n \"Start to get back all different description of \"\n \"Cost objects from database.\")\n try:\n ListOfAllDifferentDescriptionOfCost = []\n searchedCostsItems = GetAllDescriptionOfCost()\n for item in searchedCostsItems:\n if item not in ListOfAllDifferentDescriptionOfCost:\n ListOfAllDifferentDescriptionOfCost.append(item)\n logs.logger.info(\n \"Start to get back all different description of \"\n \"Cost objects from database.\")\n return ListOfAllDifferentDescriptionOfCost\n except Exception as e:\n logs.logger.error(e, exc_info=True)", "def get_all_headlines_from_chrome(site,URL_exclusions):\r\n headlines = []\r\n #Initial URL to pass to return search:\r\n URL = f'https://www.google.co.uk/search?hl=en&as_q=&as_epq=&as_oq=travellers&as_eq=quarantine+travel+train+flight+tourist+archive+airport+covid+coronavirus+hotel+holiday+honeymoon&as_nlo=&as_nhi=&lr=&cr=&as_qdr=all&as_sitesearch={site}&as_occt=title&safe=active&as_filetype=&tbs='\r\n n = 0\r\n while n < 10:\r\n n += 1\r\n driver = launch_chrome()\r\n try:\r\n return_search(URL,driver)\r\n except:\r\n continue\r\n sleep_time = np.random.random() * np.random.randint(1,6) \r\n time.sleep(sleep_time) #Slow down to avoid bot detection\r\n timeout = 0\r\n start = time.time()\r\n while timeout < 120:\r\n try:\r\n page_headlines = get_headlines_from_one_page(driver,site,URL_exclusions)\r\n break\r\n except:\r\n end = time.time()\r\n timeout = end - start\r\n for headline in page_headlines:\r\n headlines.append(headline)\r\n try:\r\n next_button = driver.find_element_by_id('pnnext')\r\n URL = next_button.get_attribute('href') #Pass new URL to return_search()\r\n except NoSuchElementException:\r\n driver.quit() #Quit driver if can't find next button \r\n break\r\n driver.quit() #Quit driver each iteration to avoid triggering recaptcha.\r\n return headlines", "def get_com_data(self):\n self.form_url_str()\n if self.__print_url: print self.com_data_full_url\n self.download_json()\n self.get_datalist_fr_json()", "def fetch_data(self):", "def produce_main(self) -> list:\n self.headers_random = {'user-agent': UserAgent().random,}\n links_google, links_qwant, links_bing, \\\n links_duckduckgo, links_yahoo = self.links_search\n value_google = [[rand, self.headers_random] for rand in links_google]\n value_qwant = [[rand, self.headers_random] for rand in links_qwant]\n value_bing = [[rand, self.headers_random] for rand in links_bing]\n value_duckduckgo = [[rand, self.headers_random] for rand in links_duckduckgo]\n value_yahoo = [[rand, self.headers_random] for rand in links_yahoo]\n \n with Pool(5) as pool:\n value_html_google = pool.map(self.produce_html_text, value_google)\n value_html_qwant = pool.map(self.produce_html_text, value_qwant)\n value_html_bing = pool.map(self.produce_html_text, value_bing)\n value_html_duckduckgo = pool.map(self.produce_html_text, value_duckduckgo)\n value_html_yahoo = pool.map(self.produce_html_text, value_yahoo)\n\n value_google = [[html, link] for html, link in zip(value_html_google, links_google)]\n value_qwant = [[html, link] for html, link in zip(value_html_qwant, links_qwant)]\n value_bing = [[html, link] for html, link in zip(value_html_bing, links_bing)]\n value_duckduckgo = [[html, link] for html, link in zip(value_html_duckduckgo, links_duckduckgo)]\n value_yahoo = [[html, link] for html, link in zip(value_html_yahoo, links_yahoo)]\n \n with Pool(5) as pool:\n value_dict_google = pool.map(self.produce_parse_google, value_google)\n value_dict_qwant = pool.map(self.produce_parse_qwant, value_qwant)\n value_dict_bing = pool.map(self.produce_parse_bing, value_bing)\n value_dict_duckduckgo = pool.map(self.produce_parse_duckduckgo, value_duckduckgo)\n value_dict_yahoo = pool.map(self.produce_parse_yahoo, value_yahoo)\n \n return value_dict_google, value_dict_qwant, value_dict_bing, \\\n value_dict_duckduckgo, value_dict_yahoo", "def _generate_entities(data):\n\n i = 0\n while i < len(data):\n release_date = datetime.strptime(data[i].text, '%m/%d/%Y')\n release_date = release_date.strftime('%Y-%m-%d')\n name = data[i+1].text\n url = data[i+1].find_element_by_tag_name('a').get_attribute(\"/href\")\n\n href = data[i+2].find_element_by_tag_name('a').get_attribute(\"/href\")\n related = []\n if href:\n doc = BeautifulSoup(helpers.fetch_string(href), \"html.parser\")\n tds = doc.find_all(\"td\", class_='ms-vb')\n for td in tds:\n try:\n related.append(td.find('a')['href'])\n except AttributeError:\n pass\n \n related_documents = ' '.join(related) \n fields = [{\"name\": \"Release date\", \"value\": release_date},\n {\"tag\": \"url\", \"value\": url},\n {\"name\": \"Related documents\", \"value\": related_documents}]\n i += 3\n\n my_id = helpers.make_id(name)\n if len(my_id) > 99:\n my_id = my_id[:99]\n\n entity = {\n \"_meta\": {\n \"id\": my_id,\n \"entity_type\": \"company\"\n },\n \"fields\": fields,\n \"name\": name,\n }\n\n helpers.emit(entity)", "async def org_info_below_13(org_urls13):\n org_info_till13 = []\n project_urls_till13 = []\n for url in org_urls13:\n # General information about the org\n try:\n soup = await get_page(url)\n org_name = basename(url)\n org_info = soup.find_all('p')\n web_page = org_info[0].text.splitlines()[-1].strip()\n mailing_list = org_info[1].text.split(\":\")[-1].strip()\n detail = org_info[2].text\n org_info_till13.append({'name': org_name, 'about': detail,\n 'page': web_page, 'mail': mailing_list,\n 'link': url})\n project_urls_till13.extend(grab_project_links(soup))\n\n except IndexError:\n print(url)\n\n return org_info_till13, get_project_info(project_urls_till13)", "def mor_prepare_data():\n prices, locations, areas, links = [], [], [], []\n for i in range(START_PAGE, SEARCHING_DEPTH+1):\n handler = requests.get(main_url, params={\"page\": str(i)})\n soup = bs4.BeautifulSoup(handler.text, 'lxml')\n heads = soup.find_all(\"header\")\n once = True\n for head in heads:\n if head.find(\"meta\", {\"itemprop\": \"category\"}) and once:\n\n raw_price = head.find(\"meta\", {\"itemprop\": \"price\"})\n price = int(float(raw_price[\"content\"]) if raw_price else \"\")\n\n raw_loc_list = head.find(\"h2\",\n {\"class\": \"single-result__title\"}).getText().strip().split(\n \", \")\n found = False\n for loc in raw_loc_list:\n if location_mapper[CITY].get(loc.lower(), 0):\n location = location_mapper[CITY][loc.lower()]\n\n found = True\n break\n if not found:\n location = \"\"\n if DEBUG_MODE:\n print(raw_loc_list)\n\n raw_area = head.find(\"p\", {\n \"class\": \"single-result__price single-result__price--currency\"}).getText().strip().split()\n if price and location:\n square_price = raw_area[0] if len(raw_area) == 2 else \"\".join(\n (raw_area[0], raw_area[1]))\n\n area = int(price / float(square_price.replace(\",\", \".\")))\n link_url = head.find('a')['href']\n\n if location and area and link_url:\n prices.append(price) if price < PRICE_UPPER_LIMIT else prices.append(\n PRICE_UPPER_LIMIT)\n locations.append(location)\n areas.append(area) if area < AREA_UPPER_LIMIT else areas.append(\n AREA_UPPER_LIMIT)\n links.append(link_url)\n\n return prices, locations, areas, links", "def __get_citedby(self):\n if not hasattr(self, 'id_scholarcitedby'):\n self.fill()\n if hasattr(self, 'id_scholarcitedby'):\n url = self._scholarly.URLS('SCHOLARPUB').format(\n requests.utils.quote(self.id_scholarcitedby))\n soup = self._scholarly._get_soup(\n self._scholarly.URLS('HOST').format(url))\n return self._scholarly._search_scholar_soup(soup)\n else:\n return []", "def test_company_feed(self):\n url, parsed = self.prepare_urls(\n 'v1:activity-company-feed', subdomain=self.company.subdomain)\n \n response = self.client.get(url, HTTP_HOST=parsed.netloc)\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)\n\n self.authenticate_user()\n response = self.client.get(url, HTTP_HOST=parsed.netloc)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n \n content = json.loads(response.content)\n self.assertTrue(content.has_key('count'))\n self.assertTrue(content.has_key('next'))\n self.assertTrue(content.has_key('previous'))\n self.assertTrue(content.has_key('results'))", "def get_results():\n # store info in a dictionary {name -> shortname}\n res = {}\n session = requests.Session()\n handle_url('http://www.creators.com/comics/cat-seeall.html', session, res)\n save_result(res, json_file)", "def __fetch_company_historical_data(self) -> list:\n processing_result = {}\n try:\n survey_data = self.__redis.retrieve(key=self.__company_id, field=\"score\")\n\n # If data for given period exists\n if survey_data:\n processing_result = survey_data\n except Exception:\n return {}\n\n if not processing_result:\n raise Exception(\"No baseline data found on Redis.\")\n\n return processing_result", "def get_my_contracts(self):\n self.my_contracts = []\n my_shares = self.browser.get('https://www.predictit.org/Profile/GetSharesAjax')\n for market in my_shares.soup.find_all('table', class_='table table-striped table-center'):\n market_title = market.previous_element.previous_element.find('div', class_='outcome-title').find('a').get(\n 'title')\n market_data = [i.text.strip().replace(\n \"\\n\", \"\").replace(\" \", \"\").replace('\\r', '') for i in market.find_all('td')]\n market_data_lists = [market_data[x:x + 10] for x in range(0, len(market_data), 10)]\n cid = None\n for list_ in market_data_lists:\n parsed_market_data = [market_title]\n for string in list_:\n try:\n cid = re.search(\n pattern='#\\w+\\-(\\d+)', string=string\n ).group(1)\n string = re.search(\n pattern='(.*)\\$\\(.*\\)\\;', string=string\n ).group(1)\n except AttributeError:\n pass\n parsed_market_data.append(string)\n for line in urlopen('https://www.predictit.org/Contract/'+ str(cid) + '/#data').read().splitlines():\n if 'ChartTicker' in str(line):\n ticker = re.search(pattern=\"= '(.*)';\", string=str(line)).group(1)\n break\n parsed_market_data.insert(1, cid)\n parsed_market_data.append(ticker)\n contract = Contract(*parsed_market_data)\n self.my_contracts.append(contract)", "def company_addition_from_db2(output_data, domain):\n companyfc = CompanyFromdb2.objects.using('db2').filter(\n domain__iexact=domain\n )[0]\n\n if companyfc.founded:\n output_data['founded'] = companyfc.founded\n\n # maybe those 2 were already found in db1 but we want company name\n # and description from db2 as a priority\n # it's not the cleanest way since data are overwritten, but the easiest\n if companyfc.name:\n output_data['name'] = companyfc.name\n\n if companyfc.description:\n output_data['description'] = (\n companyfc.\n description.\n replace('\\n', ' ').\n replace('\\r', '')\n )\n\n try:\n output_data['formatted_address']\n except KeyError:\n if companyfc.address:\n # mapping french country names to english names\n # in case the french name is not found we put the english name\n # by default\n output_data['formatted_address'] = \"%s, %s\" % (\n companyfc.address,\n french_to_english_country_mapping_dict.get(\n companyfc.country,\n companyfc.country\n )\n )\n\n try:\n output_data['phone']\n except KeyError:\n if companyfc.telephone:\n output_data['phone'] = companyfc.telephone\n\n try:\n output_data['website_url']\n except KeyError:\n if companyfc.website:\n analyzed_url = urlparse(companyfc.website)\n if analyzed_url.netloc and analyzed_url.scheme:\n website_url = \"%s://%s\" % (\n analyzed_url.scheme,\n analyzed_url.netloc\n )\n elif analyzed_url.netloc and not analyzed_url.scheme:\n website_url = analyzed_url.netloc\n else:\n website_url = analyzed_url.path\n output_data['website_url'] = website_url\n\n try:\n output_data['size']\n except KeyError:\n if companyfc.size and company_size_mapping_dict.get(companyfc.size):\n output_data['size'] = company_size_mapping_dict.get(companyfc.size)\n\n try:\n output_data['last_updated']\n except KeyError:\n if companyfc.updated_on:\n output_data['last_updated'] = companyfc.updated_on\n\n # try to add linkedin profile from db2 (not present in db1).\n # Needs to use a JOIN\n try:\n companysocialprofilefc = CompanySocialProfileFromdb2.objects.using(\n 'db2'\n ).filter(\n company__domain=domain\n )[0]\n output_data['linkedin_url'] = companysocialprofilefc.url\n except IndexError:\n pass\n\n return output_data" ]
[ "0.6335587", "0.62796295", "0.6159124", "0.6062749", "0.5804519", "0.579404", "0.57562304", "0.5744922", "0.570986", "0.5648496", "0.5623103", "0.5619176", "0.55752385", "0.5574863", "0.5566009", "0.55634576", "0.55546683", "0.55116194", "0.5485694", "0.5482928", "0.5479279", "0.5448199", "0.5444315", "0.54421175", "0.543298", "0.5406457", "0.5381257", "0.5356049", "0.53522944", "0.53497267", "0.5336233", "0.5331737", "0.5329468", "0.53100723", "0.5303571", "0.5302601", "0.5291364", "0.5282012", "0.5259614", "0.5253312", "0.5227363", "0.5225789", "0.52199835", "0.5215083", "0.5214541", "0.5200949", "0.5199637", "0.5181107", "0.51567584", "0.5143725", "0.5130783", "0.512889", "0.5126126", "0.5111673", "0.51097935", "0.51070863", "0.51069957", "0.5106186", "0.5105062", "0.5103526", "0.5102993", "0.50980073", "0.509269", "0.5075447", "0.506763", "0.5063635", "0.50621724", "0.5053448", "0.5040884", "0.5036698", "0.5029003", "0.50254226", "0.5018012", "0.5013631", "0.5011182", "0.5001428", "0.49886042", "0.49866924", "0.49755365", "0.4969435", "0.49560475", "0.49539602", "0.49506935", "0.49477917", "0.49457955", "0.49292836", "0.492495", "0.49215138", "0.49209642", "0.49195835", "0.49181148", "0.49114838", "0.49104106", "0.4907527", "0.4907073", "0.48988757", "0.4897666", "0.48899058", "0.4889556", "0.48874912" ]
0.62571484
2
Parse a configuration file in INI format.
def __init__(self, inifile, dry_run, output): config = ConfigParser() config.read(inifile) sequence = config['dithersequence'] # Set up the output. self._output = output # Set up the file type and exposure sequence. self._location = sequence['location'] self._filetype = sequence['filetype'] self._date = sequence['date'] self._exposures = [int(e) for e in sequence['exposures'].split()] if 'coordinates' not in config: raise ValueError('no coordinates set for dither!') coords = config['coordinates'] self._dithertype = coords['dithertype'] self._wcs = fits.getdata(coords['wcsfile'], 2) self._wcs = self._wcs[np.argsort(self._wcs['mjd_obs'])] self._central_exposure = int(sequence['centralexposure']) if coords['dithertype'] == 'telescope': fadir = coords['fiberassigndir'] self._ditherfa = fits.getdata(os.path.join( fadir, 'fiberassign-%s.fits' % coords['ditheredtilenum'])) self._unditherfa = fits.getdata(os.path.join( fadir, 'fiberassign-%s.fits' % coords['unditheredtilenum'])) expnum = [int(fn.split('-')[1]) for fn in self._wcs['filename']] centralind = expnum.index(self._central_exposure) self._central_wcs = self._wcs[centralind] # Set the Tile ID for the output metadata. self._tileid = coords['unditheredtilenum'] else: raise ValueError('not implemented') # Extract the list of exposures on disk. self._exposure_files = self._getfilenames() if not dry_run: # Construct fiber output. self._exposure_table = self._buildtable()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def parse_config(self):\n # TODO: parse config file\n pass", "def parse_config():\n config_file = glob.glob('config.ini')\n parser = ConfigParser()\n if config_file:\n parser.read(config_file)\n else:\n cwd = os.path.abspath(os.path.dirname(__file__))\n config_file = os.path.join(cwd, 'default_config.ini')\n parser.read(config_file)\n return _parse_config(parser)", "def _parse_file(cls, config_file, namespace):\n config_file = _fixpath(config_file)\n\n sections = {}\n normalized = {}\n parser = cls(config_file, sections)\n parser._add_normalized(normalized)\n\n try:\n parser.parse()\n except iniparser.ParseError as pe:\n raise ConfigFileParseError(pe.filename, str(pe))\n except IOError as err:\n if err.errno == errno.ENOENT:\n namespace._file_not_found(config_file)\n return\n if err.errno == errno.EACCES:\n namespace._file_permission_denied(config_file)\n return\n raise\n\n namespace._add_parsed_config_file(config_file, sections, normalized)\n namespace._parse_cli_opts_from_config_file(\n config_file, sections, normalized)", "def parse_ini(self):\r\n cp = SafeConfigParser(defaults=self.DEFAULTS)\r\n if self.filenames['ini'] is not None:\r\n cp.read(self.filenames['ini'])\r\n return cp", "def iniparse(filename, flat=True):\n config = ConfigParser.RawConfigParser()\n config.read(filename)\n\n params = {}\n for section in config.sections():\n if not flat:\n params[section] = {}\n for key, value in config.items(section):\n try:\n eval_val = ast.literal_eval(value)\n except (SyntaxError, ValueError):\n eval_val = value\n\n if flat:\n params[key] = eval_val\n else:\n params[section][key] = eval_val\n\n return params", "def parse_ini_file_into_dict(filename):\n output = {}\n\n INIfile = SafeConfigParser()\n result = INIfile.read(filename) # returns an empty list if file error\n if result == []:\n raise IOError\n\n #iterate through INI file and build dictionary\n for section_name in INIfile.sections():\n section_dict = {}\n for option_name in INIfile.options(section_name):\n option_value = INIfile.get(section_name, option_name)\n section_dict[option_name] = option_value\n output[section_name] = section_dict\n\n return output", "def parse_configuration_file(config_file):\n try:\n parser = parser_bnf()\n result = parser.parseFile(config_file, parseAll=True)\n except (ParseException, ParseSyntaxException) as e:\n print(\"ERROR: {m}\".format(m=str(e)))\n sys.exit(1)\n return result", "def parse_config_file(self, filename):\n import ConfigParser\n self.cfg_file = ConfigParser.ConfigParser()\n self.cfg_file.optionxform = str # Make options case sensitive\n path = os.path.dirname(os.path.realpath(__file__))\n path = os.path.join(path, filename)\n self.logger.debug(\"config path=%s\"% path)\n try:\n self.cfg_file.readfp(open(path))\n except IOError as e:\n self.logger.error(\"Unable to load config file '%s'\" % path)\n exit(0) \n\n for section in self.cfg_file.sections():\n settings = self.cfg_file.options(section)\n for setting in settings:\n self._add_setting(section, setting, self.cfg_file.get(section, setting))", "def load_config():\n config = configparser.ConfigParser()\n config.read('config.ini')\n return config", "def parse_config():\n config_path = Path(\"config.ini\")\n if config_path.exists():\n config.read(config_path)\n else:\n config[\"database\"] = {\"location\": \"image-database.db\"}\n config[\"images\"] = {\"extensions\": \".jpeg,.jpg,.png,.gif,.tiff\"}\n with open(config_path, \"w\") as configfile:\n config.write(configfile)\n config.read(config_path)", "def load_ini(ini_path):\n ini = ConfigParser()\n try:\n # utf-8 with BOM will kill ConfigParser\n with open(ini_path, encoding='utf-8-sig') as f:\n ini.read_string('[DEFAULT]\\n' + f.read())\n except (ParsingError, FileNotFoundError) as e:\n die('error reading config file: %s' % e)\n ini = ini['DEFAULT']\n\n ret = {}\n ret.update(ini)\n # fix types\n for i in ('port', 'tun-port'):\n if i in ini:\n ret[i] = ini.getint(i)\n for i in ('client', 'server', 'debug', 'compatible'):\n if i in ini:\n ret[i] = ini.getboolean(i)\n\n for i in ret:\n if '-' in i:\n ret[i.replace('-', '_')] = ret.pop(i)\n return ret.items()", "def load_from_file(self, path):\n schema = self.schema\n \n # Set up the default values.\n if schema is not None:\n for sect, sect_obj in schema.items():\n for opt, val in sect_obj.items():\n # This call is to convert the value to\n # the type specified. We do this to\n # prevent the programmer from specifying\n # inconsistent type with the value in the \n # schema.\n self.set(*_convert(schema, sect, opt, val[1]))\n\n # Parse the INI file.\n parser = RawConfigParser()\n parser.read(path)\n \n sections = parser.sections()\n for section in sections:\n \n # If application has supplied a schema,\n # and it does not has such a section, we skip\n # it. No error raised.\n if schema is not None and \\\n not schema.has_key(section):\n continue\n\n options = parser.options(section)\n \n for option in options:\n \n # If application has supplied a schema,\n # we know the section is valid since it pass the\n # previus test, but if the option is not included\n # in the section, we skip it. No error raised.\n if schema is not None and \\\n (option not in schema[section]):\n continue \n \n # If there is a schema, then we convert the \n # option to its type stated in the schema,\n # otherwise we just leave it as string.\n if schema is not None:\n self.set(*_convert(schema, section, option,\n parser.get(section, option)))\n else:\n self.set(section, option,\n parser.get(section, option))", "def parseConfig(self, filename):\n parameters = {}\n try:\n f = open(filename)\n except Exception as inst:\n print(type(inst))\n print(inst.args)\n print(inst)\n print('cannot open', filename)\n raise\n else:\n for line in f:\n # Remove text after comment character.\n if self.comment_char in line:\n line, comment = line.split(self.comment_char,\n 1) # Split on comment character, keep only the text before the character\n\n # Find lines with parameters (param=something)\n if self.param_char in line:\n parameter, value = line.split(self.param_char, 1) # Split on parameter character\n parameter = parameter.strip() # Strip spaces\n value = value.strip()\n parameters[parameter] = value # Store parameters in a dictionary\n\n f.close()\n\n return parameters", "def parse(self, config_file):\n\t\tself.options = yaml.load(open(config_file))", "def parse_configuration(configuration_path):\n config = configparser.ConfigParser()\n config.read(['service.conf', os.path.expanduser(configuration_path)])\n\n configuration = {}\n for key, value in config['DEFAULT'].items():\n configuration[key] = value\n\n if 'port' in configuration:\n configuration['port'] = int(configuration['port'])\n\n return configuration", "def parse_config_file(config_file):\n parsed = {}\n\n try:\n with open(config_file, \"r\") as data:\n for line in data.readlines():\n if \"=\" not in line:\n continue\n key, val = line.split(\"=\", 1)\n parsed[key] = val.strip()[1:-1]\n except IOError:\n logging.error(\"%s doesn't exist\" % config_file)\n raise\n\n return parsed", "def load( self ):\n ini = codecs.open(self.filename,\"r\",\"utf-8\",errors=\"replace\",buffering=0)\n for l in ini:\n l = l.strip()\n if l:\n (name,value) = l.split(\"=\",1)\n self.conf[name.strip()] = value.strip()\n ini.close()", "def parse_conf(self):\n\n parser = configparser.RawConfigParser()\n parser.read(self.filename)\n\n try:\n self.id_node = parser['CONF_MACHINE']['ID_NODE']\n\n # eliminate possible white spaces between metrics\n temp = parser['CONF_MACHINE']['METRICS'].split(',')\n for itr in temp:\n self.metrics.append(itr.strip())\n\n except Exception:\n raise Exception(\"missing id or metrics\")\n\n try:\n self.interval = parser['CONF_MAHCINE']['INTERVAL']\n except Exception:\n self.interval = 1\n\n try:\n self.ampq_url = parser['ampq']['url']\n self.ampq_port = parser['ampq']['port']\n self.ampq_vhost = parser['ampq']['vhost']\n self.ampq_user = parser['ampq']['user']\n self.ampq_password = parser['ampq']['password']\n except Exception:\n raise Exception(\"missing ampq configs\")", "def parse_config(config_file):\n\n conf = {}\n config = configparser.ConfigParser()\n valid_schedule = r'\\d{1,2}:\\d{2}(:\\d{2})*\\s+[AM|PM]'\n \n #configparser does not throw exception (empty dataset if files are not found)\n if(len(config.read(config_file)) == 0):\n raise FileNotFoundError(\"Failed to find config file\")\n\n\n conf['credentials'] = {\"username\": config['credentials']['username'], \"password\": config['credentials']['password']}\n conf['hashtags'] = [hashtag for hashtag in config['hashtags'].values()]\n conf['schedule'] = [time.upper() for time in config['schedule'].values() if re.search(valid_schedule,time, re.IGNORECASE)]\n conf['driverpath'] = config['driver']['path']\n\n return conf", "def load_config():\n config = ConfigParser()\n config.read(os.path.join(os.path.dirname(__file__), 'config.ini'))\n return config", "def parse(cfile=None):\n if not cfile:\n dir_path = os.path.dirname(os.path.realpath(__file__))\n cfile = os.path.join(dir_path, 'my_bb_configs.ini')\n\n print('Loading configs: ' + cfile)\n parser = ConfigParser(interpolation=ExtendedInterpolation())\n parser.read(cfile)\n\n return parser", "def parse_config(cls, fname):\n with open(fname) as f:\n lines = [l.strip() for l in f.read().split('\\n') if l.strip()]\n\n comments = [l.replace('#', '').strip()\n for l in lines if l.startswith('#')]\n lines = [l for l in lines if not l.startswith('#')]\n\n # We use a simple state-machine approach to the parsing\n # in order to deal with multi-line sections.\n parsing = False\n keys = cls.config_guide.keys()\n vars = {var: [] for var in keys}\n for line in lines:\n if not parsing:\n k, csv = line.split(':')\n else:\n csv = line\n\n vars[k].extend([val.strip() for val in csv.split(',')])\n parsing = not line.endswith(';')\n if not parsing:\n vars[k][-1] = vars[k][-1][:-1] # remove semi-colon\n\n # Remove whitespace strings. These may have come from something like:\n # c: this, , that;\n for k in keys:\n vars[k] = [val for val in vars[k] if val] # already stripped\n\n return comments, vars", "def read_ini_file (path):\n # TODO write a code to read data from ini-file to dictionary\n\n\n pass", "def parsed_file(config_file):\n parser = ConfigParser(allow_no_value=True)\n parser.read_file(config_file)\n return parser", "def read_config(self, filename):\n heading = None\n with open(filename) as fin: # open the file\n for line in fin:\n line = line.strip() # cut the tail\n if line.startswith('==') and line.endswith('=='): # detect headings\n heading = line[2:-2] # heading\n self._config[heading] = {} # create a dictionary for the heading\n elif line.count(':') == 1 and heading is not None: # detect attribute\n attr, _, value = line.partition(':') # get attribute and their value\n self._config[heading][attr[:-1]] = value[1:] # update into dic\n elif line == \"\": # if line is empty, skip\n continue\n else: # bad line\n messagebox.showerror(\"Error\", \"Bad config file, I can't read it!\")\n return self._config", "def parseConfigFile(self, config_file_path):\n parser = configparser.SafeConfigParser()\n parser.read(config_file_path)\n self.seuil_snr = int(parser.get('seuils', 'snr'))\n self.seuil_elev_sat = int(parser.get('seuils', 'sat_elevation'))\n\n # nav data path\n self.nav_data_file = parser.get('data', 'nav')\n\n print(self.nav_data_file)\n\n # obs data paths\n self.obs_data_file = parser.get('data', 'obs').split(\",\")\n\n print(self.obs_data_file)", "def parse_config(path):\n class Conf(object):\n pass\n conf = Conf()\n\n parser = ConfigParser()\n parser.read(path)\n vars_config = {}\n for section in parser.sections():\n for option in parser.options(section):\n value = parser.get(section, option)\n vars_config.update({option: value})\n\n for key in vars_config:\n setattr(conf, str(key).upper(), vars_config[key])\n setattr(conf, \"USERNAME\", environ[\"ENUMPI_DB_USER\"])\n setattr(conf, \"PASSPHRASE\", environ[\"ENUMPI_DB_PASSWORD\"])\n\n return conf", "def parse(self):\n raw_config_lines = self.load_config()\n self.config_lines_str = raw_config_lines\n self._create_cfg_line_objects()", "def parse_config(path):\n configuration = load_config(path)\n validate_config(configuration)\n return configuration", "def parse_config(self):\n\n default = {\n \"endpoint\": \"https://notify.bugsnag.com/\",\n \"port\": 3829,\n \"listen\": \"127.0.0.1\",\n }\n\n parser = OptionParser()\n parser.add_option(\n \"-c\", \"--config\",\n dest=\"config_file\",\n default=\"/etc/bugsnag.conf\",\n help=\"The path to your config file (default /etc/bugsnag.conf)\"\n )\n parser.add_option(\n \"-e\", \"--endpoint\",\n dest=\"endpoint\",\n help=(\"the URL of your Bugsnag server (default %s)\" % default['endpoint'])\n )\n parser.add_option(\n \"-p\", \"--port\",\n dest=\"port\",\n type=\"int\",\n help=(\"the port to bind to (default %s)\" % default['port'])\n )\n parser.add_option(\n \"-l\", \"--listen\",\n dest=\"listen\",\n help=(\"the ip to listen to (use 0.0.0.0 to allow anyone to connect, default %s)\" % default[\"listen\"])\n )\n\n (options, args) = parser.parse_args()\n\n config = RawConfigParser()\n config.read(options.config_file)\n\n if options.endpoint is not None:\n self.endpoint = options.endpoint\n elif config.has_option(\"bugsnag\", \"endpoint\"):\n self.endpoint = config.get(\"bugsnag\", \"endpoint\")\n else:\n self.endpoint = default[\"endpoint\"]\n\n if options.port is not None:\n self.port = options.port\n elif config.has_option(\"bugsnag\", \"port\"):\n self.port = config.getint(\"bugsnag\", \"port\")\n else:\n self.port = default[\"port\"]\n\n if options.listen is not None:\n self.listen = options.listen\n elif config.has_option(\"bugsnag\", \"listen\"):\n self.listen = config.get(\"bugsnag\", \"listen\")\n else:\n self.listen = default['listen']", "def get_config_params():\n configParser = configparser.ConfigParser()\n configParser.read(os.path.splitext(sys.argv[0])[0]+'.ini')\n return configParser", "def read_config(self, config_filename):", "def __init__(self, ini_file):\n self.config = configparser.ConfigParser()\n self.config.read(ini_file)", "def load_config(self, filename, fileconfout=None):\n self._filename = filename\n self._init_config = open(filename).read().splitlines()\n metaconfig = [l for l in self._init_config\n if not (l.startswith(\"#\") or l.startswith(\"\\t\") or l.startswith(\" \")) and len(l)>0]\n\n for k in metaconfig:\n key, *value = k.split()\n if len(value)==1:\n self.set_value(key, value[0], None)\n \n elif len(value)>1:\n if value[1]==\"#\":\n self.set_value(key, value[0], \" \".join(value[2:]))\n else:\n raise IOError(\"Cannot parse the line %s\"%k)\n else:\n raise IOError(\"cannot parse the line %s\"%k)\n if fileconfout is not None:\n self.set_value(\"PARA_OUT\", fileconfout)", "def parse_ini(text):\n cp = ConfigParser()\n cp.readfp(StringIO(text))\n return dict(cp.items('HyperPage'))", "def load_config(f):\n config = ConfigParser.RawConfigParser()\n config.readfp(f)\n # Mininum required data. Raises exception if non-existent.\n config.get('memrise', 'username')\n config.get('beeminder', 'username')\n config.get('beeminder', 'auth_token')\n config.get('beeminder', 'goal_slug')\n return config", "def parse_config(filename):\n\t\n\tabsfilepath = os.path.abspath(filename)\t\t\t\t\t\t\t# determine the absolute path of file\n\t\n\tif not os.path.isfile(absfilepath):\t\t\t\t\t\t\t\t# make sure file exists on path\n\t\n\t\tprint '[x] No such file %s' % filename\n\t\tsys.exit(1)\n\n\tparser = ConfigParser.RawConfigParser(allow_no_value=1)\t\t\t\t\t\t\t# raw config parser instance\n\n\ttry:\n\n\t\twith open(absfilepath, 'r') as f:\t\t\t\t\t\t\t# open the configuration file\n\t\t\t\n\t\t\tparser.readfp(f)\t\t\t\t\t\t\t\t\t\t# read the configuration file\n\t\t\treturn parser \t\t\t\t\t\t\t\t\t\t\t# return the parser object\n\n\texcept ConfigParser.Error as configerror:\n\n\t\tprint '[x] Unable to parse configuration file.'\n\t\tprint '[x] %s' % configerror\n\t\tsys.exit(1)\n\n\texcept AssertionError as asserror:\n\n\t\tprint '[x] Name/value missing in the configuration file (%s).' % filename\n\t\tprint '[x] %s' % asserror\n\t\tsys.exit(1)\n\n\texcept IOError as ioerror:\n\n\t\tprint '[x] %s' % ioerror\n\t\tsys.exit(1)\n\n\texcept Exception as exception:\n\n\t\tprint '[x] %s' % exception\n\t\tsys.exit(1)", "def parse_config(path):\n #Open the file\n f = open(path)\n section = None\n\n #for each line in file:\n for line in f:\n #Get rid of extra spaces and carridge-returns\n line = line.rstrip('\\r\\n')\n\n #If there is a comment on the line, get rid of everything after the comment symbol and trim whitespace\n #Example: hi there #This is a comment\n if \"#\" in line:\n line, comment = line.split(\"#\", 1)\n line = line.strip()\n\n #If there is a section header on the line, figure out what it's name is, and save it\n if \"[\" in line:\n #Example: [StartupMods]\n section = line.split(\"[\", 1)[1].split(\"]\", 1)[0]\n parsed_config[section] = list()\n\n #If there is no section header, than the line must contian data, so save it under the current section\n else:\n if line is not \"\":\n parsed_config[section].append(line)\n\n #Message the system\n logging.info(\"Finished parsing \" + path)\n return parsed_config", "def read_config(filename):\n parser = configparser.ConfigParser()\n parser.read(filename)\n logging.info(f'Read config file \\'{filename}\\'.')\n return parser", "def config_parse(conf=None, args=None):\n if args is None:\n args = []\n args += ['--config-file', etcdir('neutron.conf')]\n if conf is None:\n config.init(args=args)\n else:\n conf(args)", "def _read_config(filename):\n\n c = {}\n with open(filename, \"r\") as f:\n for line in f:\n key, val = line.split(\"=\")\n key = key.strip()\n val = val.strip()\n c[key] = val\n return c", "def read_config(self):\n config = configparser.ConfigParser()\n config.read(self.configfile)\n return config", "def readConfig(file=\"dispatcher.conf\"):\n\n parser = configparser.ConfigParser()\n parser.read(file)\n machines = parser.items(\"MACHINES\")\n commands = parser.items(\"COMMANDS\")\n\n return machines, commands", "def parse(configs_file):\n if not configs_file:\n dir_path = os.path.dirname(os.path.realpath(__file__))\n configs_file = os.path.join(dir_path, 'my_bb_configs.ini')\n\n print('Loading configs: ' + configs_file)\n # parser = ConfigParser(interpolation=ExtendedInterpolation())\n # parser.read(configs_file)\n\n # return parser", "def read_config_info(ini_file):\n try:\n config = RawConfigParser()\n config.optionxform = lambda option: option\n config.read(ini_file)\n the_stuff = {}\n for section in config.sections():\n the_stuff[section] = {}\n for option in config.options(section):\n the_stuff[section][option] = config.get(section, option)\n\n return the_stuff\n except Exception as wtf:\n logger.error(f'Exception caught in read_config_info(): {wtf}')\n traceback.print_exc(file=sys.stdout)\n return sys.exit(1)", "def load_configuration(configuration_file=None):\n dir_path = os.path.dirname(os.path.realpath(__file__))\n if not isinstance(configuration_file, str):\n if os.path.isfile(os.getenv(\"HOME\") + \"/PATH.ini\"):\n configuration_file = os.getenv(\"HOME\") + \"/PATH.ini\"\n\n if not os.path.isfile(configuration_file):\n raise FileNotFoundError(\n \"No Configuration File 'PATH.ini' found. Please create one in your home directory \"\n \"or provide the path via the argument parsing -c.\")\n else:\n logging.info(\"Using configuration file: %s\" % configuration_file)\n\n config = configparser.ConfigParser(interpolation=ExtendedInterpolation())\n config.read(configuration_file)\n return config", "def load_conf(self):\n\n self.load_file(self.ini_file)\n self.files = []\n conf_file = open(self.ini_file, \"r\")\n for l in conf_file:\n self.files.append(l.strip())\n conf_file.close()", "def parse_inifile(self):\n parsed_info = {\n \"identity_type\": None,\n \"username\": None,\n \"api_key\": None,\n \"region\": None,\n }\n res = self.configparse.read(self.inifile)\n for field in parsed_info.keys():\n try:\n parsed_info[field] = self.configparse[\"rackspace_cloud\"][field]\n except KeyError:\n parsed_info[field] = None\n pass\n return LoginInfo(\n identity_type=parsed_info[\"identity_type\"],\n username=parsed_info[\"username\"],\n api_key=parsed_info[\"api_key\"],\n region=parsed_info[\"region\"],\n )", "def config(section, filename=os.path.dirname(__file__) + '/config.ini'):\n logging.info(\"Parsing configuration file....\")\n\n # create a parser\n parser = ConfigParser()\n # read config file\n parser.read(os.path.expanduser(filename))\n\n cfg = {}\n if parser.has_section(section):\n params = parser.items(section)\n for param in params:\n cfg[param[0]] = param[1]\n else:\n raise Exception('Section {0} not found in the {1} file'.format(section, filename))\n\n return cfg", "def read_config(config_file):\n config = configparser.ConfigParser()\n config.read(config_file)\n return config", "def _read_config(filename):\n parser = configparser.RawConfigParser()\n if filename and not parser.read(filename):\n sys.stderr.write(\"Unable to open configuration file %s. Use --config='' to disable this warning.\\n\" % filename)\n\n config = {}\n\n for section, defaults in BASE_CONFIG.items():\n # Patterns are handled separately\n if section == 'patterns':\n continue\n\n for name, descr in defaults.items():\n kind, default = descr\n if section in parser.sections() and name in parser.options(section):\n if kind == 'int':\n value = parser.getint(section, name)\n elif kind == 'float':\n value = parser.getfloat(section, name)\n elif kind == 'bool':\n value = parser.getboolean(section, name)\n else:\n value = parser.get(section, name)\n else:\n value = default\n config[name] = value\n\n if 'patterns' in parser.sections():\n patterns = [parser.get('patterns', opt) for opt in parser.options('patterns')]\n else:\n patterns = DEFAULT_PATTERNS\n config['patterns'] = patterns\n\n return config", "def parse_config(parser):\n parser.add_argument('--config-file', '-c', help='config filename',\n default='config.yaml')\n return parser", "def config_parse_file():\n global ANGELCO_EMAIL, ANGELCO_PASSWORD\n\n print(\"Parsing the config file...\")\n config = configparser.ConfigParser()\n with open('dwh.cfg') as configfile:\n config.read_file(configfile)\n\n ANGELCO_EMAIL = config.get('ANGELCO', 'EMAIL')\n ANGELCO_PASSWORD = config.get('ANGELCO', 'PASSWORD')", "def load_configuration(config_file):\n filename = config_file\n config = configparser.ConfigParser()\n config.read(filename)\n\n return config", "def load(self, file, config={}):\n if not os.path.exists(file):\n err = 'ERROR: config file at \"{f}\" does not exist'\n err = err.format(f=file)\n raise SettingsError(err)\n config = config.copy()\n cp = GoulashConfigParser()\n cp.read(file)\n return cp._sections", "def parse_qiime_config_file(qiime_config_file):\r\n result = {}\r\n for line in qiime_config_file:\r\n line = line.strip()\r\n # ignore blank lines or lines beginning with '#'\r\n if not line or line.startswith('#'):\r\n continue\r\n fields = line.split()\r\n param_id = fields[0]\r\n param_value = expandvars(' '.join(fields[1:])) or None\r\n result[param_id] = param_value\r\n return result", "def parse_ir(self, filename):\n with open(filename, 'r') as f:\n configs_data = f.readlines()\n for line in configs_data:\n # remove the \\n char\n cur_line = line[:-1]\n title = re.findall('.+?:', cur_line)\n # remove the : char\n cur_title = title[0][:-1]\n content = re.findall(':.+', cur_line)\n cur_content = content[0][1:]\n exec('self.config_dict[cur_title]=' + cur_content)", "def readConfig(self, cfg='hamsterPrinter.cfg'):\n from configparser import ConfigParser\n parser = ConfigParser()\n parser.read(cfg)\n return parser", "def __init__(self, ini_file):\n self.config = configparser.ConfigParser()\n self.config.read(ini_file)\n #print(self.config)", "def read_cfg(file_path, account):\n d = {}\n parser = SafeConfigParser()\n\n try:\n parser.read(os.path.expanduser(file_path))\n for option in parser.options(account):\n # [1:-1] strips apostrophes wrapping the string\n d[option] = parser.get(account, option)[1:-1]\n return d\n except:\n print \"Config read failed\"\n return None", "def test_config_from_ini(self):\n\n # Make ini-file\n path = self.write_temp_file(\"\"\"\n[section1]\nstring1:\nstring2: string2\nint1: 0\nint2: 1\nfloat1: 0.0\nfloat2: 1.1\nboolean1: false\nboolean2: true\n\n[section2]\nstring2: string2\nint2: 2\nfloat2: 2.2\nboolean2: false\n\"\"\")\n\n for namespace in [None, 'namespace']:\n config = Config()\n config.load_from_ini(path, namespace=namespace)\n\n namespace_prefix = '%s.' % namespace if namespace is not None else ''\n\n # Test section 1\n self.assert_equal_deep(8, len(config('%ssection1' % namespace_prefix)))\n self.assert_equal_deep('', config('%ssection1.string1' % namespace_prefix))\n self.assert_equal_deep('string2', config('%ssection1.string2' % namespace_prefix))\n self.assert_equal_deep(0, config('%ssection1.int1' % namespace_prefix))\n self.assert_equal_deep(1, config('%ssection1.int2' % namespace_prefix))\n self.assert_equal_deep(0.0, config('%ssection1.float1' % namespace_prefix))\n self.assert_equal_deep(1.1, config('%ssection1.float2' % namespace_prefix))\n self.assert_equal_deep(False, config('%ssection1.boolean1' % namespace_prefix))\n self.assert_equal_deep(True, config('%ssection1.boolean2' % namespace_prefix))\n\n # Test section 2\n self.assert_equal_deep(4, len(config('%ssection2' % namespace_prefix)))\n self.assert_equal_deep('string2', config('%ssection2.string2' % namespace_prefix))\n self.assert_equal_deep(2, config('%ssection2.int2' % namespace_prefix))\n self.assert_equal_deep(2.2, config('%ssection2.float2' % namespace_prefix))\n self.assert_equal_deep(False, config('%ssection2.boolean2' % namespace_prefix))\n\n # Test section 3\n self.assert_equal(None, config('%ssection3' % namespace_prefix))", "def load_config(self):\n\n with open(os.path.expanduser(self.config_filename), 'r') as f:\n lines = f.readlines()\n\n _usable = lambda l: not(l.startswith('#') or l.strip() == '')\n lines = filter(_usable, lines)\n\n def _build_config(key, value, d):\n \"\"\" Called recursively to split up keys \"\"\"\n pieces = key.split('.', 1)\n if len(pieces) == 1:\n d[pieces[0]] = value.strip()\n else:\n d[pieces[0]] = _build_config(pieces[1], value, {})\n\n return d\n\n d = {}\n for line in lines:\n if '=' not in line:\n continue\n\n key, value = line.split('=')\n d = _build_config(key, value, d)\n\n return d", "def load_config(configfile=\"../data/test.cfg\"):\n\n config = configparser.ConfigParser()\n config.read([configfile])\n return config", "def load(self, file, config={}):\n if not os.path.exists(file):\n raise SystemExit('ERROR: config file at \"{f}\" does not exist'.format(f=file))\n config = config.copy()\n cp = ConfigParser.ConfigParser()\n cp.read(file)\n for sec in cp.sections():\n name = sec.lower()\n for opt in cp.options(sec):\n config[name + \".\" + opt.lower()] = cp.get(sec, opt).strip()\n return config", "def load_ini_file(ini_file_path):\n config = configparser.ConfigParser()\n config.read(ini_file_path)\n cfg = {}\n\n # Load hyperparameters\n cfg[\"hyperparameters\"] = {}\n cfg[\"hyperparameters\"][\"gpu_id\"] = config.getint(\"hyperparameters\", \"gpu_id\")\n cfg[\"hyperparameters\"][\"seed\"] = config.getint(\"hyperparameters\", \"seed\")\n cfg[\"hyperparameters\"][\"optimizer\"] = config.get(\"hyperparameters\", \"optimizer\")\n cfg[\"hyperparameters\"][\"lr\"] = config.getfloat(\"hyperparameters\", \"lr\")\n cfg[\"hyperparameters\"][\"momentum\"] = config.getfloat(\"hyperparameters\", \"momentum\")\n cfg[\"hyperparameters\"][\"clip\"] = config.getfloat(\"hyperparameters\", \"clip\")\n cfg[\"hyperparameters\"][\"dropout\"] = config.getfloat(\"hyperparameters\", \"dropout\")\n cfg[\"hyperparameters\"][\"batch_size\"] = config.getint(\"hyperparameters\", \"batch_size\")\n cfg[\"hyperparameters\"][\"embedding_dim\"] = config.getint(\"hyperparameters\", \"embedding_dim\")\n cfg[\"hyperparameters\"][\"commun_embed_size\"] = config.getint(\"hyperparameters\", \"commun_embed_size\")\n cfg[\"hyperparameters\"][\"num_epochs\"] = config.getint(\"hyperparameters\", \"num_epochs\")\n cfg[\"hyperparameters\"][\"use_one_hot\"] = config.getboolean(\"hyperparameters\", \"use_one_hot\")\n cfg[\"hyperparameters\"][\"max_input_length\"] = config.getint(\"hyperparameters\", \"max_input_length\")\n cfg[\"hyperparameters\"][\"max_num_answers\"] = config.getint(\"hyperparameters\", \"max_num_answers\")\n cfg[\"hyperparameters\"][\"use_dnc_c\"] = config.getboolean(\"hyperparameters\", \"use_dnc_c\") \n cfg[\"hyperparameters\"][\"use_dnc_q\"] = config.getboolean(\"hyperparameters\", \"use_dnc_q\")\n cfg[\"hyperparameters\"][\"share_memory\"] = config.getboolean(\"hyperparameters\", \"share_memory\")\n cfg[\"hyperparameters\"][\"weight_decay\"] = config.getfloat(\"hyperparameters\", \"weight_decay\")\n cfg[\"hyperparameters\"][\"use_clip_grad\"] = config.getboolean(\"hyperparameters\", \"use_clip_grad\")\n cfg[\"hyperparameters\"][\"clip_value\"] = config.getfloat(\"hyperparameters\", \"clip_value\")\n cfg[\"hyperparameters\"][\"lr_reduce_after\"] = config.getint(\"hyperparameters\", \"lr_reduce_after\")\n cfg[\"hyperparameters\"][\"lr_decay_rate\"] = config.getfloat(\"hyperparameters\", \"lr_decay_rate\")\n cfg[\"hyperparameters\"][\"grad_flow_interval\"] = config.getfloat(\"hyperparameters\", \"grad_flow_interval\")\n cfg[\"hyperparameters\"][\"add_noise\"] = config.getboolean(\"hyperparameters\", \"add_noise\")\n cfg[\"hyperparameters\"][\"finetune\"] = config.getboolean(\"hyperparameters\", \"finetune\")\n cfg[\"hyperparameters\"][\"fc_flag\"] = config.getboolean(\"hyperparameters\", \"fc_flag\")\n\n # Load lstm parameters\n cfg[\"lstm\"] = {}\n cfg[\"lstm\"][\"hidden_dim\"] = config.getint(\"lstm\", \"hidden_dim\")\n cfg[\"lstm\"][\"num_layers\"] = config.getint(\"lstm\", \"num_layers\")\n cfg[\"lstm\"][\"dropout\"] = config.getfloat(\"lstm\", \"dropout\")\n\n # Load dnc_q parameters\n cfg[\"dnc_q\"] = {}\n cfg[\"dnc_q\"][\"input_size\"] = config.getint(\"dnc_q\", \"input_size\")\n cfg[\"dnc_q\"][\"output_size\"] = config.getint(\"dnc_q\", \"output_size\")\n cfg[\"dnc_q\"][\"rnn_type\"] = config.get(\"dnc_q\", \"rnn_type\")\n cfg[\"dnc_q\"][\"hidden_dim\"] = config.getint(\"dnc_q\", \"hidden_dim\")\n cfg[\"dnc_q\"][\"memory_type\"] = config.get(\"dnc_q\", \"memory_type\")\n cfg[\"dnc_q\"][\"num_layers\"] = config.getint(\"dnc_q\", \"num_layers\")\n cfg[\"dnc_q\"][\"num_layers_hidden\"] = config.getint(\"dnc_q\", \"num_layers_hidden\")\n cfg[\"dnc_q\"][\"n\"] = config.getint(\"dnc_q\", \"n\")\n cfg[\"dnc_q\"][\"w\"] = config.getint(\"dnc_q\", \"w\")\n cfg[\"dnc_q\"][\"r\"] = config.getint(\"dnc_q\", \"r\")\n cfg[\"dnc_q\"][\"s_r\"] = config.getint(\"dnc_q\", \"t_r\")\n cfg[\"dnc_q\"][\"t_r\"] = config.getint(\"dnc_q\", \"s_r\")\n cfg[\"dnc_q\"][\"pass_through_mem\"] = config.getboolean(\"dnc_q\", \"pass_through_mem\")\n cfg[\"dnc_q\"][\"reset_experience\"] = config.getboolean(\"dnc_q\", \"reset_experience\")\n cfg[\"dnc_q\"][\"debug\"] = config.getboolean(\"dnc_q\", \"debug\")\n cfg[\"dnc_q\"][\"lr\"] = config.getfloat(\"dnc_q\", \"lr\")\n cfg[\"dnc_q\"][\"dropout\"] = config.getfloat(\"dnc_q\", \"dropout\")\n\n # Load dnc_c parameters\n cfg[\"dnc_c\"] = {}\n cfg[\"dnc_c\"][\"output_size\"] = config.getint(\"dnc_c\", \"output_size\")\n cfg[\"dnc_c\"][\"rnn_type\"] = config.get(\"dnc_c\", \"rnn_type\")\n cfg[\"dnc_c\"][\"hidden_dim\"] = config.getint(\"dnc_c\", \"hidden_dim\")\n cfg[\"dnc_c\"][\"memory_type\"] = config.get(\"dnc_c\", \"memory_type\")\n cfg[\"dnc_c\"][\"num_layers\"] = config.getint(\"dnc_c\", \"num_layers\")\n cfg[\"dnc_c\"][\"num_layers_hidden\"] = config.getint(\"dnc_c\", \"num_layers_hidden\")\n cfg[\"dnc_c\"][\"n\"] = config.getint(\"dnc_c\", \"n\")\n cfg[\"dnc_c\"][\"w\"] = config.getint(\"dnc_c\", \"w\")\n cfg[\"dnc_c\"][\"r\"] = config.getint(\"dnc_c\", \"r\")\n cfg[\"dnc_c\"][\"s_r\"] = config.getint(\"dnc_c\", \"t_r\")\n cfg[\"dnc_c\"][\"t_r\"] = config.getint(\"dnc_c\", \"s_r\")\n cfg[\"dnc_c\"][\"pass_through_mem\"] = config.getboolean(\"dnc_c\", \"pass_through_mem\")\n cfg[\"dnc_c\"][\"reset_experience\"] = config.getboolean(\"dnc_c\", \"reset_experience\")\n cfg[\"dnc_c\"][\"debug\"] = config.getboolean(\"dnc_c\", \"debug\")\n cfg[\"dnc_c\"][\"lr\"] = config.getfloat(\"dnc_c\", \"lr\")\n cfg[\"dnc_c\"][\"dropout\"] = config.getfloat(\"dnc_c\", \"dropout\")\n cfg[\"dnc_c\"][\"type\"] = config.get(\"dnc_c\", \"type\")\n cfg[\"dnc_c\"][\"nonlinearity\"] = config.get(\"dnc_c\", \"nonlinearity\")\n cfg[\"dnc_c\"][\"concat_out_rv\"] = config.getboolean(\"dnc_c\", \"concat_out_rv\")\n cfg[\"dnc_c\"][\"bidirectional\"] = config.getboolean(\"dnc_c\", \"bidirectional\")\n\n # Load logging paths\n cfg[\"logging\"] = {}\n cfg[\"logging\"][\"tensorboard_dir\"] = config.get(\"logging\", \"tensorboard_dir\")\n cfg[\"logging\"][\"checkpoints_dir\"] = config.get(\"logging\", \"checkpoints_dir\")\n cfg[\"logging\"][\"results_dir\"] = config.get(\"logging\", \"results_dir\")\n cfg[\"logging\"][\"grad_flow_dir\"] = config.get(\"logging\", \"grad_flow_dir\")\n\n # Load paths\n cfg[\"paths\"] = {}\n cfg[\"paths\"][\"input\"] = config.get(\"paths\", \"input\")\n cfg[\"paths\"][\"json_q_path_tr\"] = config.get(\"paths\", \"json_q_path_tr\")\n cfg[\"paths\"][\"json_q_path_val\"] = config.get(\"paths\", \"json_q_path_val\")\n cfg[\"paths\"][\"json_a_path_tr\"] = config.get(\"paths\", \"json_a_path_tr\")\n cfg[\"paths\"][\"json_a_path_val\"] = config.get(\"paths\", \"json_a_path_val\")\n cfg[\"paths\"][\"json_q_path_test\"] = config.get(\"paths\", \"json_q_path_test\")\n cfg[\"paths\"][\"dnc_q\"] = config.get(\"paths\", \"dnc_q\")\n cfg[\"paths\"][\"dnc_c\"] = config.get(\"paths\", \"dnc_c\")\n return cfg", "def _read_project(self, filename):\n parser = configparser.ConfigParser()\n parser.read(filename, \"utf8\")\n return parser", "def _parseConfigFile(self):\n\n configFile = self._configFile()\n\n configs = configparser.SafeConfigParser()\n try:\n with open(configFile, 'r', encoding='utf-8') as fh:\n try:\n configs.readfp(fh)\n return configs\n except configparser.Error:\n log(ERROR, traceback.format_exc())\n return None\n except IOError:\n log(DEBUG, \"Error: Could not read from config file {0}\\n\".format(configFile))\n return None", "def parse(self):\n try:\n with open(self.path, 'r') as ymlfile:\n self.__cfg = yaml.load(ymlfile)\n except IOError:\n self.log(\"File {0} not found -- aborting\".format(self.path))\n raise ConfigFileException", "def build_config_parser(filename='GradientOneAuthConfig.txt'):\n cfg = ConfigParser(dict_type=dict)\n cfg.optionxform = str\n cfgfile = None\n try:\n cfgfile = find_file(filename)\n except IOError:\n raise ValueError(\"Could not find a {} file. Please download \"\n \"one for this machine.\".format(filename))\n try:\n cfg.read(cfgfile)\n except IOError:\n raise ValueError(\"Could not read the {} file. Please download a \"\n \"valid config file for this machine.\"\n .format(filename))\n return cfg", "def parse_config_file(config_file):\n\n config = ConfigParser.SafeConfigParser()\n config.read(config_file)\n\n config_dict = {}\n for section in config.sections():\n # TODO : Should I force all section names to lowercase?\n config_dict[section.strip()] = dict(config.items(section))\n\n\n return config_dict", "def parse_config(filepath, section):\n config = configparser.ConfigParser(defaults=_defaults)\n config.read(_get_config_file(filepath))\n if not config.has_section(section):\n raise configparser.NoSectionError(section)\n return {\n 'source': config[section]['source'],\n 'backups': config[section]['backups'],\n 'ignore': _parse_ignore(config[section]['ignore']),\n 'retain_all_after': parse_human_readable_relative_dates(config[section]['retain_all']),\n 'retain_daily_after': parse_human_readable_relative_dates(config[section]['retain_daily']),\n 'decay_before': parse_human_readable_relative_dates(config[section]['decay']),\n 'autodecay': _parse_bool(config[section]['autodecay']),\n 'autoprune': _parse_bool(config[section]['autoprune']),\n }", "def read_config_file(self, **kwargs):\n if 'config_filename' in kwargs:\n self.config_filename = kwargs['config_filename']\n self.file_parser.read(self.config_filename)\n self.__get_options()\n self.__get_implementations()\n self.__get_configurations()\n self.consistency_check()\n for opt, opt_desc in self.opt_dict.iteritems():\n self.optypelist.append((opt, opt_desc['imptype']))\n\n for imptype in self.imp_dict.keys():\n if 'none' not in self.imp_dict[imptype]:\n self.imp_dict[imptype].insert(0, 'none')", "def load_config(self, config_file):\n self.config = ConfigParser.ConfigParser()\n self.config.read(config_file)", "def onLoadConfig(self, inifile):\n cp = ConfigParser(self.defaults)\n cp.readfp(inifile)\n depth = self.getDepth(cp)\n self.baseurl = urljoin(self.inipath, depth)\n # create child loaders for any other l10n.ini files to be included\n try:\n for title, path in cp.items('includes'):\n # skip default items\n if title in self.defaults:\n continue\n # add child config parser\n self.addChild(title, path, cp)\n except NoSectionError:\n pass\n # try to load the \"dirs\" defined in the \"compare\" section\n try:\n self.dirs.extend(cp.get('compare', 'dirs').split())\n except (NoOptionError, NoSectionError):\n pass\n # try getting a top level compare dir, as used for fennec\n try:\n self.tld = cp.get('compare', 'tld')\n # remove tld from comparison dirs\n if self.tld in self.dirs:\n self.dirs.remove(self.tld)\n except (NoOptionError, NoSectionError):\n self.tld = None\n # try to set \"all_path\" and \"all_url\"\n try:\n self.all_path = cp.get('general', 'all')\n self.all_url = urljoin(self.baseurl, self.all_path)\n except (NoOptionError, NoSectionError):\n self.all_path = None\n self.all_url = None\n return cp", "def readConfig(filepath=None):\n result = None\n if filepath is None:\n filepath = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"res/\", \"config.ini\")\n if os.path.exists(filepath):\n config = ConfigParser.ConfigParser()\n config.read(filepath)\n result = config\n return result", "def parse_config(self, file):\n try:\n self.logger.debug(\"parsing config file: {}\".format(file))\n config = configparser.ConfigParser()\n config.read(file)\n self._blizzard_clientid = config['blizzard']['blizzard_clientid']\n self._blizzard_clientsecret = config['blizzard']['blizzard_clientsecret']\n self._db_filename = config['blizzard']['db_filename']\n except Exception as e:\n self.logger.error('Exception reading config from: {}'.Format(file))\n self.logger.error(str(e))\n raise", "def loadConfigFile(configfile):\n\tconfig = {}\n\tif os.path.isfile(configfile):\n\t\tfor line in open(configfile):\n\t\t\t# Remove bits after the comment marker\n\t\t\tline = line.split('#')[0].strip()\n\t\t\t# If anything is left\n\t\t\tif line:\n\t\t\t\tprint line\n\t\t\t\t# Split by first equals sign\n\t\t\t\tkey, val = line.split('=',1)\n\t\t\t\tconfig[key] = val\n\treturn config", "def read_configuration_files(configuration_file_paths):\n configuration_file_parser = configparser.RawConfigParser()\n configuration_file_paths = map(os.path.expanduser, configuration_file_paths)\n configuration_file_paths_read = configuration_file_parser.read(configuration_file_paths)\n configuration_file_parser.configuration_file_paths = configuration_file_paths\n configuration_file_parser.configuration_file_paths_read = configuration_file_paths_read\n return configuration_file_parser", "def _ConfigFileParse(self, key, confFile):\n try:\n f = open(confFile)\n data = f.readlines()\n f.close()\n values = []\n\n for line in data:\n line = line.replace(\"\\n\", \"\") # Remove carriage returns\n line = line.replace(\"\\t\", \"\") # Remove all tabs\n line = line.strip() # Remove leading and trailing spaces\n\n if (line.lower()).startswith(key.lower()): # look for key match\n if \"#\" in line:\n line = line[:line.find(\"#\")] # Remove all text after a \"#\"\n\n line = line[len(key):] # Remove key from beginning of line\n line = line.replace(\"=\", \"\") # Remove \"=\" signs\n line = line.strip() # Remove leading and trailing spaces\n line = line.split(\",\") # split into individual values\n\n for index in range(len(line)):\n line[index] = line[index].strip()\n\n values.append(line)\n\n return values\n\n except:\n print(\"Could not open file {}\".format(confFile))", "def readConfig(self, config_file):\n parser = ConfigParser.RawConfigParser()\n parser.read(config_file)\n self.NUM_PROCESSES = parser.getint('loader', 'NUM_PROCESSES')\n self.EXEC = parser.get('loader', 'EXECUTABLE')\n self.TILE_LOADER_JSON = parser.get('loader', 'TILE_LOADER_JSON')\n # If the load request was to a single tile db instance then\n # we can skip details on MPI, else MPI fields are required\n if self.NUM_PROCESSES == 1:\n return\n self.MPIRUN = parser.get('mpi', 'MPIRUN')\n self.HOSTS = parser.get('mpi', 'HOSTS')\n self.HOSTFLAG = parser.get('mpi', 'HOSTFLAG')\n\n if parser.has_option('mpi', 'BTL_TCP_IF_INCLUDE'):\n self.IF_INCLUDE = parser.get('mpi', 'BTL_TCP_IF_INCLUDE')\n if parser.has_option('mpi', 'INCLUDE_ENV'):\n self.ENV = parser.get('mpi', 'INCLUDE_ENV')", "def parse_config(config_file_name):\n\n config = ConfigParser.ConfigParser()\n config.read(config_file_name)\n main_cfg_section = config.sections()[0]\n\n options = config.options(main_cfg_section)\n results = {}\n for option in options:\n try:\n results[option] = config.get(main_cfg_section, option)\n except:\n print('exception on %s!' % option)\n results[option] = None\n return results", "def load_configuration(cls, configuration_filename=\"test.ini\", optional_files=None):\n global _config\n if _config != None:\n return\n\n _config = configparser.RawConfigParser()\n file_name = os.path.join(os.path.dirname(__file__), \"../config/%s\" % configuration_filename)\n _config.read(file_name, encoding=\"utf-8\")\n\n if optional_files != None and type(optional_files) == list:\n for f in optional_files:\n file_name = os.path.join(os.path.dirname(__file__), \"../config/%s\" % f)\n _config.read(file_name)", "def read_config():\n\tparser = SafeConfigParser()\n\tconfig = {}\n\tparser.read(os.path.join(os.path.abspath(os.path.dirname(__file__)), \"InternetChecker.ini\"))\n\t# Connections\n\tconfig[\"closeOnConnect\"] = string_to_bool(parser.get(\"connections\", \"closeOnConnect\"))\n\tconfig[\"closeOnDisconnect\"] = string_to_bool(parser.get(\"connections\", \"closeOnDisconnect\"))\n\tconfig[\"closeOnChange\"] = string_to_bool(parser.get(\"connections\", \"closeOnChange\"))\n\treturn config", "def process_config(config_file=''):\n if not config_file:\n config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"config\")\n config = configparser.ConfigParser()\n config.read(config_file)\n config_dict = {}\n for section in config.sections():\n config_dict[section] = {name: value for name, value in config.items(section)}\n return config_dict", "def load(self):\n config_dict = {}\n with open(\n os.path.join(\n os.path.dirname(\n os.path.abspath(\n inspect.stack()[0][1]\n )\n ),\n \"config.txt\"), 'r') as config_file:\n for line in config_file:\n if not line.startswith('#'):\n line = line.strip().split('=', 1)\n if len(line) == 2:\n config_dict[line[0]] = line[1]\n return config_dict", "def parse_config_file(config_file):\n # if the config file doesn't exist, prepopulate the config object\n # with the defaults, in the right section.\n #\n # otherwise, we have to put the defaults in the DEFAULT section,\n # to ensure that they don't override anyone's settings which are\n # in their config file in the default section (which is likely,\n # because sydent used to be braindead).\n use_defaults = not os.path.exists(config_file)\n cfg = configparser.ConfigParser()\n for sect, entries in CONFIG_DEFAULTS.items():\n cfg.add_section(sect)\n for k, v in entries.items():\n cfg.set(configparser.DEFAULTSECT if use_defaults else sect, k, v)\n\n cfg.read(config_file)\n\n return cfg", "def load_configuration(self) -> None:\n config_file = self.default_config_file\n if self.config_file:\n config_file = self.config_file\n self.config = configparser.ConfigParser(delimiters=\"=\")\n # mypy is unhappy with us assigning to a method - (monkeypatching?)\n self.config.optionxform = lambda option: option # type: ignore\n self.config.read(config_file)", "def parse():\n rcParams = configparser.ConfigParser(defaults=defaults())\n rcParams.read([os.path.join(os.getcwd(), 'watershed_workflowrc'),\n os.path.join(os.getcwd(), '.watershed_workflowrc'),\n os.path.join(home(), '.watershed_workflowrc')])\n return rcParams", "def parseConfig(conffile=None):\n f = open(conffile,'r')\n cp = ConfigParser.ConfigParser()\n try:\n cp.read(conffile)\n except Exception,e:\n print 'failed to read config file! check format'\n print 'Error returned:', e\n return\n obj = setAttributesfromConfigParser(cp)\n return obj", "def read_configuration (self):\n\t\tself.config.read(self._configfile)", "def read_configuration(file_path):\n parser = configparser.ConfigParser()\n parser.read(file_path)\n\n # Parse predefined configuration sections\n config = {}\n for part in ['SAVGOL', 'TRIM', 'BASELINE', 'SNV', 'RNV', 'LSNV', 'DETREND', 'MSC', 'EMSC', 'NORML', 'CLIP', 'SMOOTH', 'RESAMPLE', 'DERIVATE']:\n if part in parser:\n config[part] = parse_section(dict(parser[part]), part)\n\n pipelines = construct_pipelines(config)\n pipelines = remove_incompatible_operations(pipelines)\n\n return pipelines", "def _load_config():\n\tcfg = configparser.ConfigParser()\n\tcfg.read(os.path.join(get_current_directory(), 'citi.config'))\n\treturn cfg", "def parse_config(self, file):\n try:\n self.logger.debug(\"parsing config file: {}\".format(file))\n config = configparser.ConfigParser()\n config.read(file)\n self._blizzard_clientid = config['blizzard']['blizzard_clientid']\n self._blizzard_clientsecret = config['blizzard']['blizzard_clientsecret']\n self._token_filename = config['blizzard']['token_filename']\n except Exception as e:\n self.logger.error('Exception reading config from: {}'.Format(file))\n self.logger.error(str(e))\n raise", "def _read_config(self):\n config = configparser.ConfigParser()\n config.read('config.ini')\n\n self.batch_size = int(config['MODEL']['batch_size'])\n self.num_filters = int(config['MODEL']['num_filters'])\n self.dropout_dim = float(config['MODEL']['dropout_dim'])\n self.dense_neurons = int(config['MODEL']['dense_neurons'])\n _pool_size = config['MODEL']['pool_size']\n _kernel_size = config['MODEL']['kernel_size']\n self.IMG_SIZE = int(config['DATA']['image_size'])\n self.num_classes = int(config['CUSTOM']['num_classes'])\n self.epochs = int(config['MODEL']['epochs'])\n self.b_eval_advanced = (\n config['MODEL']['complex_analysis'] == 'true' or config['MODEL']['complex_analysis'] == 'True')\n\n self.pool_size = tuple(map(int, _pool_size.split(',')))\n self.kernel_size = tuple(map(int, _kernel_size.split(',')))\n\n self.img_rows, self.img_cols = self.IMG_SIZE, self.IMG_SIZE", "def read_config(config_file):\n config_dict = {\n \"port\": \"22\",\n \"persist_remote_files\": False\n }\n with open(config_file) as fin:\n for config_line in fin:\n config_line = config_line.strip()\n # check for commented out lines\n if config_line.startswith(\"#\") or len(config_line) == 0:\n continue\n key, value = config_line.split(\"=\")\n config_dict[key.rstrip()] = value.lstrip()\n\n return config_dict", "def dh_read_config(filename):\n parser = SafeConfigParser()\n c = parser.read(filename)\n if len(c)==0:\n raise ValueError('The configuration file %s is unreadable'%filename)\n\n def iteration():\n for section in parser.sections():\n results = _search_and_replace(parser, section)\n for item in results:\n key,value = item\n parser.set(section,key,value)\n\n iteration()\n count = 0\n max_depth = 10\n while _check_for_variables(parser):\n iteration()\n count = count + 1\n if count == max_depth:\n raise Exception('There are undefined variables in the configuration file [%s]'%filename)\n\n\n config = _setup_config_object(parser)\n print\n print\n print 'Study %s'%config.STUDY_NAME\n print '-------------------------------'\n print\n return config", "def read_config(filename, section):\n # create parser and read ini configuration file\n parser = ConfigParser()\n parser.read(filename)\n\n # get section\n db = {}\n if parser.has_section(section):\n items = parser.items(section)\n for item in items:\n db[item[0]] = item[1]\n else:\n raise Exception('{0} not found in the {1} file'.format(section, filename))\n\n return db", "def process_config_file(config, logger):\n # if the config file was not specified, just set default config values\n if not os.path.exists(config.config_file):\n setattr(config, \"watch_masks\", [])\n setattr(config, \"ignore_list\", [])\n logger.info(\"Specified config file '%s' does not exist, using \"\n \"default values.\" % config.config_file)\n return config\n logger.info(\"Processing config file '%s' ...\" % config.config_file)\n # Raw - doesn't do any interpolation\n parser = RawConfigParser()\n # by default it seems that value names are converted to lower case,\n # this way they should be case-sensitive\n parser.optionxform = str\n # does not fail even on a non-existing file\n parser.read(config.config_file)\n try:\n for (name, value) in parser.items(\"general\"):\n # assumes that ',' is the separator of configuration values\n values = value.split(',')\n # trim white spaces\n val_trimmed = [val.strip() for val in values]\n # entry will always be a list\n setattr(config, name, val_trimmed)\n except (ValueError, IndexError) as ex:\n msg = \"Error while processing configuration file, reason: %s\" % ex\n helpers.print_msg_exit(msg=msg, exit_code=1)\n return config", "def loadConfig():\n lines = []\n config = {}\n here = path.dirname(__file__)\n fn = path.join(here,'manatee.conf')\n try:\n with codecs.open(fn,'rU','utf-8') as conf:\n lines = conf.readlines()\n conf.close()\n except IOError as e:\n print \" Could not open configuration file: %s\" % e\n\n for line in lines:\n try:\n line = line.strip()\n if line:\n values = [x.strip() for x in line.split('=')]\n config[values[0]] = values[1]\n except Exception as e:\n print \"There was an error in the configuration file: %s\" % e\n # TODO: Any strings from the config file that might be displayed or passed into the SQL server need to be validated here.\n# config = validateConfig(config)\n return config", "def get_configuration(self, config_file_string, attribute_dict_list = [{}]):\n\n conf_results = {}\n\n try:\n parser = SafeConfigParser()\n parser.read(config_file_string)\n\n for di in attribute_dict_list:\n conf_results[di[\"conf\"]] = parser.get(di[\"section\"], di[\"conf\"])\n\n except Exception as e:\n logging.error(\"Error when parsing identica plugin info.\", exc_info=e)\n\n return conf_results", "def _parse_config_files(self):\n namespace = _Namespace(self)\n\n # handle --config-file args or the default_config_files\n for arg in self._args:\n if arg == '--config-file' or arg.startswith('--config-file='):\n break\n else:\n for config_file in self.default_config_files:\n ConfigParser._parse_file(config_file, namespace)\n\n # handle --config-dir args or the default_config_dirs\n for arg in self._args:\n if arg == '--config-dir' or arg.startswith('--config-dir='):\n break\n else:\n for config_dir in self.default_config_dirs:\n # for the default config-dir directories we just continue\n # if the directories do not exist. This is different to the\n # case where --config-dir is given on the command line.\n if not os.path.exists(config_dir):\n continue\n\n config_dir_glob = os.path.join(config_dir, '*.conf')\n\n for config_file in sorted(glob.glob(config_dir_glob)):\n ConfigParser._parse_file(config_file, namespace)\n\n self._oparser.parse_args(self._args, namespace)\n\n self._validate_cli_options(namespace)\n\n return namespace" ]
[ "0.76152253", "0.73512197", "0.7154713", "0.71381843", "0.7083268", "0.7074115", "0.7054041", "0.70176613", "0.6988784", "0.68394864", "0.6800065", "0.6791778", "0.6771396", "0.67589056", "0.67328686", "0.6729735", "0.6721972", "0.67042685", "0.66722804", "0.6669349", "0.6667586", "0.66674954", "0.66546726", "0.66502756", "0.6648313", "0.66313446", "0.66089046", "0.6595576", "0.6589324", "0.6589233", "0.65616655", "0.6559016", "0.65460044", "0.65363103", "0.65147334", "0.65105385", "0.65075547", "0.6507254", "0.65014976", "0.64771026", "0.6475017", "0.64688057", "0.6460294", "0.64495283", "0.6440467", "0.64309865", "0.6429303", "0.64227736", "0.64188814", "0.63977313", "0.63936484", "0.6390035", "0.63883275", "0.6373346", "0.6347508", "0.6342547", "0.63398284", "0.6323502", "0.6311001", "0.63008195", "0.62922025", "0.6291715", "0.628918", "0.62733203", "0.62727773", "0.6266817", "0.62626094", "0.62621886", "0.62325984", "0.6223832", "0.62206095", "0.62199175", "0.62170786", "0.6188974", "0.6182858", "0.61806965", "0.61751616", "0.61727375", "0.6166817", "0.6163124", "0.61610466", "0.6160362", "0.6160286", "0.61564666", "0.6153328", "0.61416", "0.6135751", "0.61252874", "0.61251974", "0.612289", "0.61215997", "0.611905", "0.61172986", "0.611018", "0.6108872", "0.610285", "0.6101241", "0.6095688", "0.6094522", "0.6092644", "0.6080359" ]
0.0
-1
Return a list of exposures and filenames given an INI configuration. Returns
def _getfilenames(self): # Set up the path and file prefix depending on the filetype. if self._filetype == 'nightwatch': fileprefix = 'qcframe' if self._location == 'nersc': prefix = '/global/project/projectdirs/desi/spectro/nightwatch/kpno' elif self._location == 'kpno': prefix = '/exposures/desi' # not correct path! else: raise ValueError('Unknown location {}'.format(self._location)) elif self._filetype == 'redux': fileprefix = 'sframe' if self._location == 'nersc': prefix = '/global/project/projectdirs/desi/spectro/redux/daily/exposures' elif self._location == 'kpno': prefix = '/exposures/desi' # not correct path! else: raise ValueError('Unknown location {}'.format(self._location)) else: raise ValueError('Unknown file type {}'.format(self._filetype)) # Find the exposures files. exfiles = {} for ex in self._exposures: folder = '{}/{}/{:08d}'.format(prefix, self._date, ex) files = sorted(glob('{}/{}*.fits'.format(folder, fileprefix))) exfiles[ex] = files return exfiles
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _configFiles(self):\n import glob\n ret = [] \n for ext in self.configManager.extensions:\n ret.extend(\n glob.glob(f\"{self.pipelinesDir}/{self.pipeName}/*{ext}\"))\n return ret", "def get_config_files(self):\n flag, i = self.inotify\n\n if flag:\n kwargs = {}\n\n if PY3:\n kwargs['timeout_s'] = 0\n\n filenames = set()\n\n for event in i.event_gen(**kwargs):\n if event is None:\n break\n\n filenames.add(event[3])\n\n return list(filenames)\n\n else:\n return os.listdir(self.watch)", "def index_files(self) -> List[str]:\n return self.get(\"index_files\", [\"index.html\", \"index.htm\"])", "def get_config_files(self):\n self.clear_lists()\n print self.abs_directory\n for file in os.listdir(self.abs_directory):\n print file\n if file.endswith('.json') and \"qemii\" in file:\n self.txt_files.append(file)", "def _config_files():\n from .plugin import plugins\n return [p for p in (p.config_file() for p in plugins()) if p is not None]", "def get_files(self, name):\n return self.apps[name]['configuration_files']", "def get_cfg_files(self):\n\t\tcfg_files = []\n\t\tfor config_object, config_value in self.maincfg_values:\n\t\t\t\n\t\t\t## Add cfg_file objects to cfg file list\n\t\t\tif config_object == \"cfg_file\" and os.path.isfile(config_value):\n\t\t\t\t\tcfg_files.append(config_value)\n\n\t\t\t## Parse all files in a cfg directory\n\t\t\tif config_object == \"cfg_dir\":\n\t\t\t\tdirectories = []\n\t\t\t\traw_file_list = []\n\t\t\t\tdirectories.append( config_value )\n\t\t\t\t# Walk through every subdirectory and add to our list\n\t\t\t\twhile len(directories) > 0:\n\t\t\t\t\tcurrent_directory = directories.pop(0)\n\t\t\t\t\t# Nagios doesnt care if cfg_dir exists or not, so why should we ?\n\t\t\t\t\tif not os.path.isdir( current_directory ): continue\n\t\t\t\t\tlist = os.listdir(current_directory)\n\t\t\t\t\tfor item in list:\n\t\t\t\t\t\t# Append full path to file\n\t\t\t\t\t\titem = \"%s\" % (os.path.join(current_directory, item.strip() ) )\n\t\t\t\t\t\tif os.path.islink( item ):\n\t\t\t\t\t\t\titem = os.readlink( item )\n\t\t\t\t\t\tif os.path.isdir(item):\n\t\t\t\t\t\t\tdirectories.append( item )\n\t\t\t\t\t\tif raw_file_list.count( item ) < 1:\n\t\t\t\t\t\t\traw_file_list.append( item )\n\t\t\t\tfor raw_file in raw_file_list:\n\t\t\t\t\tif raw_file.endswith('.cfg'):\n\t\t\t\t\t\tif os.path.exists(raw_file):\n\t\t\t\t\t\t\t'Nagios doesnt care if cfg_file exists or not, so we will not throws errors'\n\t\t\t\t\t\t\tcfg_files.append(raw_file)\n\n\t\treturn cfg_files", "def files(self):\n if self._files is None:\n if helpers['isoinfo']: # TODO\n # It's safe to specify -R even for non-rockridge ISOs\n args = [\"-i\", self.path, \"-f\", \"-R\"]\n # At this time we don't support Joliet extensions\n output = helpers['isoinfo'].call(args)\n result = []\n for line in output.split(\"\\n\"):\n # discard non-file output lines\n if not line or line[0] != \"/\":\n continue\n # Non-Rock-Ridge filenames look like this in isoinfo:\n # /IOSXR_CONFIG.TXT;1\n # but the actual filename thus is:\n # /iosxr_config.txt\n if self.disk_subformat != \"rockridge\" and \";1\" in line:\n line = line.lower()[:-2]\n # Strip the leading '/'\n result.append(line[1:])\n self._files = result\n return self._files", "def filepaths(self):\n pass", "def _iter_configurations() -> Iterable[pathlib.Path]:\n for ext in CONFIGURATION_FILE_FORMATS:\n yield from HERE.rglob(f\"*{ext}\")", "def _find_files(research_structure, raise_on_all_missing=True):\n found = []\n filenames = []\n paths_searched = []\n ## config file lookup resolution\n for enforce_file_existence, cascaded, fun in research_structure:\n candidate = fun()\n if candidate is None:\n continue\n paths_searched.append(candidate)\n filenames.append((cascaded, candidate))\n if os.path.exists(candidate):\n found.append(candidate)\n if cascaded is False:\n break\n else:\n if enforce_file_existence:\n raise ValueError(\"File %r does not exists.\" % candidate)\n if not found and raise_on_all_missing:\n raise ValueError(\"No config file was found in those paths: %s.\"\n % ', '.join(paths_searched))\n return filenames", "def get_filepaths_and_exts(self):\n filepaths = [prod.filepath for prod in self.products]\n exts = [prod.ext for prod in self.products]\n\n return filepaths, exts", "def find_config_files(project=None, prog=None, extension='.conf'):\n return _find_config_files(project, prog, extension)", "def get_fp_config_files(self):\n self.get_config_files()\n for file in self.txt_files: \n if \"fp\" in file:\n self.fp_config_files.append(file)\n return self.fp_config_files", "def get_configfiles():\r\n configArray=''\r\n try:\r\n #print(len(configArray))\r\n while len(configArray) == 0:\r\n configFiles = input(\"List of Configuration and Files sepearated by commas (vhosts.conf,sslhosts.conf) \\n\") # takes the whole line of n numbers\r\n configArray = list(map(str,configFiles.split(',')))\r\n ### DEBUGGING\r\n # print(\"config array 0\" + configArray[0])\r\n # print(\"config array 1\" + configArray[1])\r\n #print(\"config array 0\" + configArray[0])\r\n ### /DEBUGGING ###\r\n if configArray[0] == '':\r\n print(\"please enter configuration files \")\r\n del configArray[:]\r\n #print(len(configArray))\r\n #print(configArray[0])\r\n return configArray[0], configArray[1]\r\n except:\r\n print(\"something went wrong with getting the config files\")", "def filenames(self):\n pass", "def _get_doc_files(self):\n return [(path.join(self.DocDirectory, 'conf.py'), 'Python')]", "def inject_files():\n for filename, arcname in INJECT_FILES.items():\n filename = os.path.join('bee2', 'inject', filename)\n if os.path.exists(filename):\n yield filename, arcname\n\n # Additionally add files set in the config.\n for prop in CONF.find_children('InjectFiles'):\n filename = os.path.join('bee2', 'inject', prop.real_name)\n if os.path.exists(filename):\n yield filename, prop.value", "def ReadEntries(self):\n entries = []\n config = wx.Config.Get()\n config.SetPath(DEPS_CONFIG)\n step = config.GetFirstEntry()\n while (step[0]):\n entries.append(config.Read(step[1]))\n step = config.GetNextEntry(step[2])\n config.SetPath('..')\n return entries", "def configfiles(basename):\n dirs = (\"config\", \"config-\" + os.uname()[1].rsplit(\".\")[0])\n dirpaths = (join(d, basename) for d in dirs)\n realpaths = (join(scriptdir, d) for d in dirpaths)\n return [relpath(d) for d in realpaths]", "def file_list(load):\n if \"env\" in load:\n # \"env\" is not supported; Use \"saltenv\".\n load.pop(\"env\")\n\n ret = []\n\n if \"saltenv\" not in load:\n return ret\n\n saltenv = load[\"saltenv\"]\n metadata = _init()\n\n if not metadata or saltenv not in metadata:\n return ret\n for bucket in _find_files(metadata[saltenv]):\n for buckets in bucket.values():\n files = [f for f in buckets if not fs.is_file_ignored(__opts__, f)]\n ret += _trim_env_off_path(files, saltenv)\n\n return ret", "def get_reference_housenumber_paths() -> List[str]:\n Config.__get()\n assert Config.__config is not None\n relpaths = Config.__config.get(\"wsgi\", \"reference_housenumbers\").strip().split(' ')\n return [get_abspath(relpath) for relpath in relpaths]", "def extract_files(self) -> list:\n pass", "def get_eval_config_files(self):\n return list(\n resources.get_files_in_folder(\n \"config/tests/evaluation/evaluate_test_configs\"))", "def GetExpectationFilepaths(self) -> List[str]:\n raise NotImplementedError()", "def get_file_handlers(self):\n return []", "def list_output_files(self):\r\n fname = self.__get_output_filename()\r\n return [fname] if fname else []", "def showconfigfiles():\n process = subprocess.check_output(['ls', os.path.dirname(os.path.abspath(__file__))+'/ConfigFiles'])\n print()\n for eachFile in process.decode('utf-8').split('\\n'):\n if '__' not in eachFile and '~' not in eachFile:\n print(' {}'.format(eachFile))", "def __get_url(self, conf):\n url_file = conf[self.conf_item.get_url_list_file()]\n url_list = list()\n map((lambda url: url_list.append(url.strip())), open(url_file))\n return url_list", "def get_file_list(start):\n valid_files = []\n for root, dirs, files in os.walk(start):\n for name in files:\n if name[-5:] == \".conf\":\n valid_files.append(os.path.join(root,name))\n return valid_files", "def discover(self):\n ids = []\n for f in os.listdir(self.dirname):\n if self.file_prefix in f:\n ids.append(self.inv_filename(f))\n return sorted(ids)", "def find_config_files(create: bool = False) -> List[str]:\n files = [\".wpwatcher/wpwatcher.conf\", \"wpwatcher.conf\"]\n env = [\"HOME\", \"XDG_CONFIG_HOME\", \"APPDATA\", \"PWD\"]\n\n return Config.find_files(\n env, files, Config.TEMPLATE_FILE, create=create\n )", "def get_project_list(config):\n eggs_dir = config.get('eggs_dir', 'eggs')\n if os.path.exists(eggs_dir):\n projects = os.listdir(eggs_dir)\n else:\n projects = []\n try:\n projects += [x[0] for x in config.cp.items('settings')]\n except NoSectionError:\n pass\n return projects", "def get_urls(self):\r\n if self.mod.filename:\r\n return [x + self.mod.filename for x in self.mod.service.get_mirrors()]", "def listFiles(self):\n pass", "def parseInputFileList (self) :\n filelist = []\n try :\n with open (self.cfgName) as fIn:\n for line in fIn:\n line = (line.split(\"#\")[0]).strip()\n if line:\n self.lines.append(line)\n except IOError:\n print \"*** WARNING: cfg file \" , self.cfgName , \" not found\"\n return\n\n #return filelist", "def get_already_processed_files(config: Config) -> list[str]:\n already_processed_files = []\n if os.path.exists(config.already_processed):\n with open(config.already_processed, 'r') as f:\n already_processed_files = f.read().splitlines()\n\n return already_processed_files", "def get_list():\n\n print(f\"Корневой каталог: {config_tools.NAME_PATH}\")\n for dirpath, dirnames, filenames in os.walk(config_tools.NAME_PATH):\n # перебрать каталоги\n for dirname in dirnames:\n print(\"Каталог:\", os.path.join(dirpath, dirname))\n # перебрать файлы\n for filename in filenames:\n print(\"Файл:\", os.path.join(dirpath, filename))", "def list_backupable_files(files, config, file_filter):\n # For each file used by the application\n backupable_files = []\n for _filename in list(files):\n for filename in glob.glob(_filename):\n # print(filename)\n\n # ignore the user defined files\n if any(re.match(ignore, filename) for ignore in config.ignores):\n continue\n\n status = None\n # check for backuped files given from pipe:\n if filename in config.backuped_files:\n status = Status.EXISTS\n\n # If the file exists and is not already a link pointing to Original file\n if status is None:\n status = file_filter.get_status(filename)\n\n if status is None:\n status = Status.NOT_EXISTS\n\n backupable_files.append([status, filename])\n return backupable_files", "def get_all_path(self, conf):\n\t\tpass", "def GetFileNames(self):\n return self.files", "def opendir() -> list:\n fileexr = [nf for nf in listdir(path=format(getcwd()))\n if search(pattern=r'.exr$', string=nf) and (not search(pattern=r'^L', string=nf))]\n if fileexr:\n for nf in fileexr: \n yield nf\n else:\n print('Exr file not found!')", "def get_postprocess_config_files(self):\n return list(\n resources.get_files_in_folder(\n \"config/tests/postprocessing/postprocess_test_configs\"))", "def list_configurations(path):\n configurations = []\n\n for afile in os.listdir(path):\n afile = os.path.join(path, afile)\n if os.path.isfile(afile) and afile.endswith('.py'):\n configurations.append(afile)\n\n return configurations", "def list_contents(reader: UFOReader) -> list[str]:\n return reader.getImageDirectoryListing() # type: ignore", "def get_ins_photo_list(self):\n photo_fn_list = get_file_list(self.ins_folder)\n # print(self.home_folder+self.ins_folder)\n # print(len(photo_list), photo_list[:10])\n if len(photo_fn_list) == 0:\n logging.error(\"The Ins folder is empty.\")\n\n return photo_fn_list", "def list(self):\n path = \"authSettings/exemptedUrls\"\n return self._session.get(path)", "def list_files(self):\n ret = []\n for fname in self.files:\n ret.append('filename: %s\\t replica locations: %s' %\n (fname, ','.join(self.files[fname])))\n return ret", "def filelist(self):\n msg = \"Collection of (str) file paths to mock\"\n raise NotImplementedError(msg)", "def get_filenames(self):\n return self.filenames", "def parseConfigFindList(stringFind,configFile):\n read = 0\n listOfItems = []\n for line in configFile:\n if line:\n if read == 1:\n if 'Stop' in line:\n configFile.seek(0)\n break # exit the function and return the list of files or list information\n listOfItems.append(line.strip('\\n'))\n if stringFind in line:\n read = 1 # if find string specified, begin reading lines\n configFile.seek(0)\n return listOfItems", "def pathext_list():\n return (os.environ.get(\"PATHEXT\") or \"\").split(os.pathsep)", "def pdbfile_list():\n import glob, os\n os.chdir(\"../Data\")\n file_list = []\n for file in glob.glob(\"*.pdb\"):\n file_list.append(file)\n return file_list", "def get_doc_files(extensions=MARKDOWN_EXTENSIONS + STATIC_ASSET_EXTENSIONS):\n file_list = []\n # doc files on toplevel\n for ext in extensions:\n file_list += config[\"topdir\"].glob('*' + ext)\n # doc files in include dirs\n for incdir in config['incdirs']:\n for ext in extensions:\n file_list += config[\"topdir\"].joinpath(incdir).rglob('*' + ext)\n return file_list", "def _getDefaultConfigFiles(self, _os = os, _sys = sys):\n argv0 = util.filename.fromLocale(\n _sys.argv[0], self.runtime.path_encoding\n )\n if isinstance(argv0, unicode):\n candidates = [util.filename.toLocale(\n name, locale_enc = self.runtime.path_encoding\n ) for name in [\n _os.path.join(\n self.runtime.repository, u'conf', u'mailer.conf'\n ),\n _os.path.join(_os.path.dirname(argv0), u'mailer.conf'),\n u'/etc/svn-mailer.conf',\n ]\n ]\n else:\n # --path-encoding=none\n candidates = [\n _os.path.join(self.runtime.repository, 'conf', 'mailer.conf'),\n _os.path.join(_os.path.dirname(argv0), 'mailer.conf'),\n _os.path.join(_os.path.sep, \"etc\", \"svn-mailer.conf\"),\n ]\n\n return candidates", "def _readFiles(self):\n template_files = []\n for file in os.listdir(self.template_folder):\n if file.endswith(\".xml\"):\n template_files.append(file)\n return template_files", "def list_log_files():\n for filename in os.listdir(\"/home/malyhass/log-parser\"):\n if filename.startswith(\"access.log\"):\n yield filename", "def infile_list(args):\n infiles = []\n for arg in args:\n infiles += glob.glob(arg)\n infiles = [pipes.quote(f) for f in infiles]\n return infiles", "def read_gitignore():\n excludes = []\n gitignore = Path(\".gitignore\")\n if gitignore.exists():\n with gitignore.open() as f:\n excludes += f.read().split(\"\\n\")\n else:\n raise ValueError(\n \"No exclude configuration option and no .gitignore file present\"\n )\n return excludes", "def files(self):\n try:\n return glob.glob(self.path)\n except (AttributeError, TypeError):\n try:\n return glob.glob(self.alias)\n except (AttributeError, TypeError):\n return []", "def getExternalFiles(self):\n return []", "def get_output_config_paths(self) -> Generator[Path, None, None]:\n for app in [None, *self.manager.apps]:\n app_dir = self.manager.output_dir / app if app else self.manager.output_dir\n for path_name in [JUPYTERLITE_JSON, JUPYTERLITE_IPYNB]:\n config_path = app_dir / path_name\n yield config_path", "def search_exceptions(self):\n templates = 'cfgov/jinja2/v1'\n return [\n templates + '/_defaults',\n templates + '/_lib',\n templates + '/_queries',\n templates + '/_settings',\n 'test',\n 'config'\n ]", "def pdbfile_list():\n \n import glob, os\n os.chdir(\"../Data\")\n file_list = []\n for file in glob.glob(\"*.pdb\"):\n file_list.append(file)\n return file_list", "def retrieve_tracked_files(self):\n result = []\n\n for key in self.repo.index.entries.keys():\n\n result.append(os.path.join(self.repo.working_dir, key[0]))\n\n return result", "def set_in_files():\r\n\tindatadir = '/nobackup/ejblom/reddit'\r\n\tcom_dir = '/comments'\r\n\tsubm_dir = '/submissions'\r\n\tglob_end = '/filtered*'\r\n\tcom_glob_str = indatadir + com_dir + glob_end\r\n\tsubm_glob_str = indatadir + subm_dir + glob_end\r\n\tinfilenames = sorted(glob.glob(com_glob_str)) + sorted(glob.glob(subm_glob_str))\r\n\treturn infilenames", "def get_files(self):\r\n return self._filelist", "def ini_get_all():\n raise NotImplementedError()", "def dynamic_conf_filenames( self, include_migrated_tool_conf=False ):\n for dynamic_tool_conf_dict in self.dynamic_confs( include_migrated_tool_conf=include_migrated_tool_conf ):\n yield dynamic_tool_conf_dict[ 'config_filename' ]", "def get_fr_config_files(self):\n self.get_config_files()\n for file in self.txt_files:\n if \"fr\" in file:\n self.fr_config_files.append(file)\n return self.fr_config_files", "def buildListOfFiles(searchGlob):\n return [fpath for fpath in glob2.iglob(searchGlob) if os.path.isfile(fpath)]", "def get_files(self):\n return self.ebook_file.get_files()", "def test_invocations_ini():\n invocations = labeled.contents(label=\"invocations\")\n tox = Path(\"tox.ini\").read_text(encoding=\"utf-8\")\n gendir = \".gendir-ini\"\n assert gendir in invocations\n assert f\"output_directory = {gendir}\" in tox", "def locations(self):\n return [part.file for part in self.iterParts() if part]", "def locations(self):\n return [part.file for part in self.iterParts() if part]", "def locations(self):\n return [part.file for part in self.iterParts() if part]", "def get_browser_extensions(self, config_section):\n\n extension_string = None\n extensions = []\n if config_section is not None:\n try:\n extension_string = self.shishito_support.get_opt(config_section, 'browser_extensions') # browser config\n except configparser.NoOptionError:\n extension_string = None\n\n if extension_string is None:\n try:\n extension_string = self.shishito_support.get_opt('browser_extensions') # common config\n except configparser.NoOptionError:\n pass\n\n if extension_string is None:\n return []\n\n for item in re.split('\\s+', extension_string):\n if item != '':\n m = re.match('^\\$([A-Z][A-Z_]+)$', item)\n if m is not None:\n var_name = m.group(1)\n if var_name not in os.environ:\n raise Exception(\"Error getting browser_extensions: env variable '\" + item + \"' not defined\")\n extensions.append(os.environ[var_name]) # take the extension path as configured\n else:\n extensions.append(item) # take the extension path as configured\n\n return extensions", "def demo_paths(self):\n base_path = os.path.join(self.module.__path__[0], 'demo')\n paths = []\n if os.path.isdir(base_path):\n for item in os.listdir(base_path):\n # TODO: support examples which is not auto-loaded\n if not os.path.isdir(os.path.join(base_path, 'examples')):\n paths.append(os.path.join(base_path, item))\n return paths", "def pathAliases(ask=True):\n from bs4 import BeautifulSoup\n if ask:\n try:\n # set up the dialog\n lx.eval('dialog.setup yesNo')\n lx.eval('dialog.title {Confirm Operation}')\n lx.eval('dialog.msg {Save the modo config before getting the PathAliases?}')\n lx.eval('dialog.result ok')\n\n # Open the dialog and see which button was pressed\n lx.eval('dialog.open')\n lx.eval(\"dialog.result ?\")\n lx.eval(\"config.save\")\n lx.out(\"Proceeding with saved config.\")\n\n except:\n lx.out(\"Proceeding without saved config.\")\n else:\n lx.eval(\"config.save\")\n lx.out(\"Saved config.\")\n\n\n\n config = lx.eval(\"query platformservice path.path ? configname\")\n lx.out(config)\n soup = BeautifulSoup(open(config))\n config_pathaliases = soup.find(type='PathAliases')\n config_pathaliases = config_pathaliases.find_all(type='Alias')\n\n pathaliases = {}\n\n for alias in config_pathaliases:\n alias_alias = alias['key']\n for string in alias.atom.strings:\n alias_path = string\n\n pathaliases[alias_alias] = alias_path\n\n return pathaliases", "def list(self, config_path: str, results_filter: Optional[ObjectType]) -> List[str]:\n ...", "def find_files(self):\n # yield blueprint paths first\n if getattr(self, 'blueprint_name', None):\n for path in walk_directory(os.path.join(self.path, self.blueprint_name), ignore=self.project.EXCLUDES):\n yield 'preview', {'path': path}\n\n # then yield project paths\n for path in walk_directory(self.path, ignore=self.project.EXCLUDES):\n yield 'preview', {'path': path}", "def get_yml_files():\n repo_fs()\n return YML_FILES", "def cfgPathToList( arg ):\n from types import StringTypes\n listPath = []\n if type( arg ) not in StringTypes:\n return listPath\n while arg.find( '/' ) == 0:\n arg = arg[1:]\n return arg.split( '/' )", "def get_prop_list(self):\n import_list = []\n if os.path.exists(os.path.join(EXPORT_DIR, \"ENV\")):\n for i in os.listdir(os.path.join(EXPORT_DIR, \"ENV\")):\n import_list.append(os.path.join(EXPORT_DIR, \"ENV\", i))\n return import_list\n else:\n gui.MessageDialog(\n \"Could Not find Exported File from Daz Studio\",\n type=c4d.GEMB_ICONEXCLAMATION,\n )", "def _list_of_files(self):\n if self.only_gpw:\n path = 'NOTORIA_GPW_XLSX/'\n securities_list = os.listdir(path)\n else:\n path = 'NOTORIA_NC_XLSX/'\n securities_list = os.listdir(path)\n securities_list = [x for x in securities_list if not x.startswith('.')]\n securities_list.sort()\n self.securities_filenames = securities_list\n self.paths_to_securities_files = [path + x for x in securities_list]\n self.logger.debug('self.securities_filenames, n: {}, [0]: {}'.format(\n str(len(self.securities_filenames)),\n str(self.securities_filenames[0]))\n )\n self.logger.debug('self.paths_to_securities_files, n: {}, [0]: {}'.format(\n str(len(self.paths_to_securities_files)),\n str(self.paths_to_securities_files[0]))\n )", "def action_listall():\n\n def parse_file(filename):\n config = {}\n\n # get all content\n with open(filename, 'r') as f:\n lines = f.read().split('\\n')\n\n # parse the content\n for l_nb in range(len(lines)):\n items = [_.strip() for _ in lines[l_nb].split('#')[0].split('=')]\n if len(items) > 1:\n v = '='.join(items[1:]).strip()\n # handle [...] for param value\n if '[' in v and ']' not in v:\n l_nb += 1\n # get the next line until the array in not closed by ']'\n while ']' not in v:\n v += lines[l_nb].split('#')[0].strip()\n l_nb += 1\n # remove '' around param value\n if v[0] == \"'\" and v[-1:] == \"'\":\n v = v[1:len(v)]\n config[items[0]] = v\n return config\n\n out = []\n for root, dirs, files in os.walk('/etc/xen'):\n for cfgfile in files:\n if cfgfile.endswith('.cfg') and not cfgfile.startswith('.'):\n out.append(parse_file(os.path.join(root, cfgfile)))\n return out", "def find_config_files(create=False):\n files = [\".wpwatcher/wpwatcher.conf\", \"wpwatcher.conf\"]\n env = [\"HOME\", \"XDG_CONFIG_HOME\", \"APPDATA\", \"PWD\"]\n\n return WPWatcherConfig.find_files(env, files, WPWatcherConfig.TEMPLATE_FILE)", "def get_files(target_files, config):\n out = []\n find_fn = _find_file(config)\n for fname_in in target_files.keys():\n if isinstance(fname_in, (list, tuple)):\n fnames = fname_in\n else:\n fnames = fname_in.split(\";\")\n for fname in fnames:\n remote_fname = find_fn(fname)\n if remote_fname:\n if isinstance(remote_fname, (list, tuple)):\n out.extend(remote_fname)\n else:\n out.append(remote_fname)\n return out", "def filepaths(self) -> Dict[str, 'BinPackageFile']:\n return self._get_package_files()", "def make_files(self):\n return []", "def app_options(self):\n return [\n self.input()[0].path,\n self.input()[1].path,\n self.input()[2].path,\n self.input()[3].path,\n self.output().path,\n ]", "def filenames(self) -> dict[str, str]:\r\n ...", "def in_filepath_list(class_paths: List[str]) -> List:\n registry, not_founds = build_registry(class_paths)\n builder = FilepathListBuilder()\n source = builder.build(registry)\n\n return [source, not_founds]", "def getGlobusFiles(self):\n\t\treturn self.transfer_client.operation_ls(self.transfer_client.endpoint_search(DATA_ENDPOINT_NAME)[0]['name'])", "def list():\n project_root = get_project_root()\n config_file = os.path.join(project_root, CONFIG_DIR, CONFIG_FILE)\n if os.path.exists(config_file):\n kwargs = load_yaml_file(config_file)\n if PACKAGE_INDEX_KEY in kwargs:\n packages = load_yaml_url(kwargs[PACKAGE_INDEX_KEY])\n # Should update and look in .carme/config\n else:\n packages = load_yaml_url(PACKAGE_INDEX)\n\n ruamel.yaml.dump(packages, sys.stdout, Dumper=ruamel.yaml.RoundTripDumper)", "def getOpenFilenames(self):\n filenames = []\n for editor in self.editors:\n fn = editor.getFileName()\n if fn is not None and fn not in filenames and os.path.exists(fn):\n # only return names of existing files\n filenames.append(fn)\n \n return filenames", "def _get_files(self):\n # pylint: disable=unused-variable\n for dirpath, __, filenames in os.walk(self.start_location):\n for file_ in filenames:\n if file_.endswith('.py'):\n yield \"{0}{1}\".format(dirpath, file_)", "def list_logging_conf():\n import pkg_resources\n\n configs = set()\n for plugin in plugin_manager.load_all(__name__):\n configs.update({\n cfg for cfg in pkg_resources.resource_listdir(__name__, '.')\n if cfg.endswith('.json')\n })\n\n return configs", "def init_storage_handlers_from_ini(ini: ConfigParser) -> List[StorageHandlerBase]:\n storage_handlers = []\n for key in STORAGE_CONFIG_MAP.keys():\n storage_handler = init_storage_handler_from_ini(key, ini)\n if storage_handler:\n storage_handlers.append(storage_handler)\n\n return storage_handlers", "def parse_path(self) -> list:\n metadata = []\n for f in listdir(self.__path):\n inner_path = join(self.__path, f)\n if len(listdir(inner_path)) > 1:\n LOGGER.error(\"Unwanted files found at {}.\".format(inner_path))\n sys.exit(-1)\n try:\n inner_file = join(inner_path, listdir(inner_path)[0])\n except IndexError as ie:\n LOGGER.error(\"{} does not have any solution file.\".format(f))\n sys.exit(-1)\n if isdir(inner_path) and isfile(inner_file) and \"solution.\" in inner_file:\n metadata.append((f, inner_file))\n else:\n LOGGER.error(\"Unwanted files found at {} or {}.\".format(f, inner_path))\n sys.exit(-1)\n return metadata" ]
[ "0.6261923", "0.6095658", "0.6020707", "0.60147893", "0.59440166", "0.58393127", "0.5772027", "0.5740705", "0.57304424", "0.56908756", "0.5659648", "0.56340367", "0.5621146", "0.5619135", "0.5599532", "0.5568797", "0.55641794", "0.55604345", "0.5555875", "0.5541721", "0.5528318", "0.55053383", "0.5497269", "0.5479364", "0.5473706", "0.5454165", "0.54526633", "0.5437858", "0.5425498", "0.5425039", "0.5418036", "0.5407027", "0.53884256", "0.5385322", "0.5384247", "0.53758436", "0.5360522", "0.53546464", "0.53219026", "0.531904", "0.53065807", "0.52870995", "0.52705795", "0.5260268", "0.52326906", "0.52325517", "0.52309614", "0.5220961", "0.5207024", "0.52047485", "0.5199321", "0.5195071", "0.5180055", "0.51759416", "0.51745695", "0.51717997", "0.5164307", "0.51641846", "0.514187", "0.5140601", "0.5139351", "0.5131072", "0.5128452", "0.5127544", "0.5120287", "0.5117754", "0.510149", "0.5100739", "0.50984806", "0.5086265", "0.5084525", "0.5084039", "0.5077063", "0.50767267", "0.50767267", "0.50767267", "0.50740963", "0.5071504", "0.50670093", "0.50554657", "0.5046512", "0.5040384", "0.5040025", "0.50289243", "0.50262105", "0.5025379", "0.5022624", "0.5022607", "0.50223213", "0.5021115", "0.500345", "0.5002759", "0.49988326", "0.49969876", "0.49893662", "0.49866465", "0.49858615", "0.4985666", "0.49854448", "0.49842033" ]
0.5931038
5
Loop through the exposure list and construct an observation table.
def _buildtable(self): tabrows = [] for i, (expid, exfiles) in enumerate(self._exposure_files.items()): specflux_b, specflux_r, specflux_z = [], [], [] tab = None if len(exfiles) == 0: continue print(expid) for exfile in exfiles: print(exfile) hdu = fits.open(exfile) # The following tables are present in the redux sframes and the # nightwatch qcframes. wave = hdu['WAVELENGTH'].data # However, in the nightwatch files the wavelength data are a # table of size nfiber x nwavelength. if self._filetype == 'nightwatch': if wave.ndim > 1: wave = wave[0] fluxhead = hdu['FLUX'].header fluxdata = hdu['FLUX'].data ivardata = hdu['IVAR'].data fibermap = hdu['FIBERMAP'].data exptime = fluxhead['EXPTIME'] if not np.all(self._unditherfa['FIBER'] == np.arange(len(self._unditherfa))): raise ValueError('weird fiberassign file format!') fibermap = self._unditherfa[fibermap['FIBER']] target_id = fibermap['TARGETID'] target_ra = fibermap['TARGET_RA'] target_dec = fibermap['TARGET_DEC'] fiber = fibermap['FIBER'] objtype = fibermap['OBJTYPE'] flux_g = fibermap['FLUX_G'] flux_r = fibermap['FLUX_R'] flux_z = fibermap['FLUX_Z'] x, y = [fibermap['FIBERASSIGN_{}'.format(val)] for val in ('X', 'Y')] camera = fluxhead['CAMERA'][0].upper() if getattr(self, '_deltara', None) is not None: dra = self._deltara[i]*np.ones(len(fiber)) ddec = self._deltadec[i]*np.ones(len(fiber)) elif self._dithertype == 'telescope': dithra = self._ditherfa['target_ra'] dithdec = self._ditherfa['target_dec'] udithra = self._unditherfa['target_ra'] udithdec = self._unditherfa['target_dec'] ontarget = ((self._ditherfa['targetid'] == self._unditherfa['targetid']) & (self._ditherfa['objtype'] == 'TGT')) dfiberra = (dithra-udithra)*np.cos(np.radians(udithdec))*60*60 dfiberdec = (dithdec-udithdec)*60*60 if not np.all(self._ditherfa['FIBER'] == np.arange(len(self._ditherfa))): raise ValueError('unexpected shape of dither file') dfiberra[~ontarget] = np.nan dfiberdec[~ontarget] = np.nan dfiberra = dfiberra[fiber] dfiberdec = dfiberdec[fiber] wcs = self.lookup_wcs(fluxhead['MJD-OBS']) centralwcs = self._central_wcs if (~np.isfinite(centralwcs['cenra'][1]) or ~np.isfinite(centralwcs['cendec'][1])): raise ValueError('central pointing ra/dec is NaN!') dtelra = (wcs['cenra'][1]-centralwcs['cenra'][1]) dtelra *= np.cos(np.radians(centralwcs['cendec'][1])) dteldec = wcs['cendec'][1]-centralwcs['cendec'][1] dra = dfiberra + dtelra*60*60 ddec = dfiberdec + dteldec*60*60 if np.all(~np.isfinite(dra)): print('warning: no good telescope offset for %s' % exfile) else: raise ValueError('not implemented') for j, fiber_id in enumerate(fiber): flux = fluxdata[j] ivar = ivardata[j] if not np.any(ivar > 0): specflux = 0 specflux_ivar = 0 else: meanivar = np.mean(ivar[ivar > 0]) mask = ivar > meanivar / 100 specflux = np.trapz(flux*mask, wave) specflux_ivar = 1./np.sum(ivar[mask]**-1) # Schlegel: sum over correct wavelengths, all three # filters, plus 11 pixel median filter to reject # cosmics. # will require being better about reading in # the spectrographs together. tabrows.append((expid, exptime, target_id[j], target_ra[j], target_dec[j], fiber[j], objtype[j], flux_g[j], flux_r[j], flux_z[j], specflux, specflux_ivar, camera, dra[j], ddec[j], x[j], y[j])) tab = Table(rows=tabrows, names=('EXPID', 'EXPTIME', 'TARGETID', 'TARGET_RA', 'TARGET_DEC', 'FIBER', 'OBJTYPE', 'FLUX_G', 'FLUX_R', 'FLUX_Z', 'SPECTROFLUX', 'SPECTROFLUX_IVAR', 'CAMERA', 'DELTA_X_ARCSEC', 'DELTA_Y_ARCSEC', 'XFOCAL', 'YFOCAL'), meta={'EXTNAME' : 'DITHER', 'TILEID' : '{}'.format(self._tileid)}) return tab
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def buildExposureTable(exposures, fields, instruments):\n name = []\n ra = []\n dec= []\n field= []\n inst = []\n airmass = []\n mjd = []\n exptime = []\n epoch = []\n apcorr = []\n index = 0\n for k,e in exposures.items():\n name.append(e.name)\n ra.append(getDegree(e.coords.ra))\n dec.append(getDegree(e.coords.dec))\n field.append(fields[e.field].index)\n if e.instrument in specialInstruments:\n inst.append(specialInstruments[e.instrument])\n else:\n inst.append(instruments[e.instrument].index)\n e.index = index\n index += 1\n\n airmass.append(e.airmass)\n mjd.append(e.mjd)\n exptime.append(e.exptime)\n epoch.append(e.epoch)\n apcorr.append(e.apcorr)\n hdu = pf.BinTableHDU.from_columns(\\\n pf.ColDefs( [pf.Column(name='NAME',format=py_to_fits(name),array=name),\n pf.Column(name='RA',format=py_to_fits(ra),array=ra),\n pf.Column(name='DEC',format=py_to_fits(dec),array=dec),\n pf.Column(name='FIELDNUMBER',format=py_to_fits(field),array=field),\n pf.Column(name='INSTRUMENTNUMBER',format=py_to_fits(inst),\\\n array=inst),\n pf.Column(name=\"MJD\",format=py_to_fits(mjd),array=mjd),\n pf.Column(name=\"AIRMASS\",format=py_to_fits(airmass),array=airmass),\n pf.Column(name=\"EXPTIME\",format=py_to_fits(exptime),array=exptime),\n pf.Column(name=\"EPOCH\",format=py_to_fits(epoch),array=epoch),\n pf.Column(name=\"APCORR\",format=py_to_fits(apcorr),array=apcorr)] ),\n name = 'Exposures')\n # hdu.header['EXTNAME'] = 'Exposures'\n return hdu", "def make_test_observation_table(observatory_name='HESS', n_obs=10,\n az_range=Angle([0, 360], 'deg'),\n alt_range=Angle([45, 90], 'deg'),\n date_range=(Time('2010-01-01'),\n Time('2015-01-01')),\n use_abs_time=False,\n n_tels_range=(3, 4),\n random_state='random-seed'):\n from ..data import ObservationTable, observatory_locations\n random_state = get_random_state(random_state)\n\n n_obs_start = 1\n\n obs_table = ObservationTable()\n\n # build a time reference as the start of 2010\n dateref = Time('2010-01-01T00:00:00')\n dateref_mjd_fra, dateref_mjd_int = np.modf(dateref.mjd)\n\n # define table header\n obs_table.meta['OBSERVATORY_NAME'] = observatory_name\n obs_table.meta['MJDREFI'] = dateref_mjd_int\n obs_table.meta['MJDREFF'] = dateref_mjd_fra\n if use_abs_time:\n # show the observation times in UTC\n obs_table.meta['TIME_FORMAT'] = 'absolute'\n else:\n # show the observation times in seconds after the reference\n obs_table.meta['TIME_FORMAT'] = 'relative'\n header = obs_table.meta\n\n # obs id\n obs_id = np.arange(n_obs_start, n_obs_start + n_obs)\n obs_table['OBS_ID'] = obs_id\n\n # obs time: 30 min\n ontime = Quantity(30. * np.ones_like(obs_id), 'minute').to('second')\n obs_table['ONTIME'] = ontime\n\n # livetime: 25 min\n time_live = Quantity(25. * np.ones_like(obs_id), 'minute').to('second')\n obs_table['LIVETIME'] = time_live\n\n # start time\n # - random points between the start of 2010 and the end of 2014 (unless\n # otherwise specified)\n # - using the start of 2010 as a reference time for the header of the table\n # - observations restrict to night time (only if specified time interval is\n # more than 1 day)\n # - considering start of astronomical day at midday: implicit in setting\n # the start of the night, when generating random night hours\n datestart = date_range[0]\n dateend = date_range[1]\n time_start = random_state.uniform(datestart.mjd, dateend.mjd, len(obs_id))\n time_start = Time(time_start, format='mjd', scale='utc')\n\n # check if time interval selected is more than 1 day\n if (dateend - datestart).jd > 1.:\n # keep only the integer part (i.e. the day, not the fraction of the day)\n time_start_f, time_start_i = np.modf(time_start.mjd)\n time_start = Time(time_start_i, format='mjd', scale='utc')\n\n # random generation of night hours: 6 h (from 22 h to 4 h), leaving 1/2 h\n # time for the last run to finish\n night_start = Quantity(22., 'hour')\n night_duration = Quantity(5.5, 'hour')\n hour_start = random_state.uniform(night_start.value,\n night_start.value + night_duration.value,\n len(obs_id))\n hour_start = Quantity(hour_start, 'hour')\n\n # add night hour to integer part of MJD\n time_start += hour_start\n\n if use_abs_time:\n # show the observation times in UTC\n time_start = Time(time_start.isot)\n else:\n # show the observation times in seconds after the reference\n time_start = time_relative_to_ref(time_start, header)\n # converting to quantity (better treatment of units)\n time_start = Quantity(time_start.sec, 'second')\n\n obs_table['TSTART'] = time_start\n\n # stop time\n # calculated as TSTART + ONTIME\n if use_abs_time:\n time_stop = Time(obs_table['TSTART'])\n time_stop += TimeDelta(obs_table['ONTIME'])\n else:\n time_stop = TimeDelta(obs_table['TSTART'])\n time_stop += TimeDelta(obs_table['ONTIME'])\n # converting to quantity (better treatment of units)\n time_stop = Quantity(time_stop.sec, 'second')\n\n obs_table['TSTOP'] = time_stop\n\n # az, alt\n # random points in a portion of sphere; default: above 45 deg altitude\n az, alt = sample_sphere(size=len(obs_id),\n lon_range=az_range,\n lat_range=alt_range,\n random_state=random_state)\n az = Angle(az, 'deg')\n alt = Angle(alt, 'deg')\n obs_table['AZ'] = az\n obs_table['ALT'] = alt\n\n # RA, dec\n # derive from az, alt taking into account that alt, az represent the values\n # at the middle of the observation, i.e. at time_ref + (TIME_START + TIME_STOP)/2\n # (or better: time_ref + TIME_START + (TIME_OBSERVATION/2))\n # in use_abs_time mode, the time_ref should not be added, since it's already included\n # in TIME_START and TIME_STOP\n az = Angle(obs_table['AZ'])\n alt = Angle(obs_table['ALT'])\n if use_abs_time:\n obstime = Time(obs_table['TSTART'])\n obstime += TimeDelta(obs_table['ONTIME']) / 2.\n else:\n obstime = time_ref_from_dict(obs_table.meta)\n obstime += TimeDelta(obs_table['TSTART'])\n obstime += TimeDelta(obs_table['ONTIME']) / 2.\n location = observatory_locations[observatory_name]\n altaz_frame = AltAz(obstime=obstime, location=location)\n alt_az_coord = SkyCoord(az, alt, frame=altaz_frame)\n sky_coord = alt_az_coord.transform_to('icrs')\n obs_table['RA'] = sky_coord.ra\n obs_table['DEC'] = sky_coord.dec\n\n # positions\n\n # number of telescopes\n # random integers in a specified range; default: between 3 and 4\n n_tels = random_state.randint(n_tels_range[0], n_tels_range[1] + 1, len(obs_id))\n obs_table['N_TELS'] = n_tels\n\n # muon efficiency\n # random between 0.6 and 1.0\n muoneff = random_state.uniform(low=0.6, high=1.0, size=len(obs_id))\n obs_table['MUONEFF'] = muoneff\n\n return obs_table", "def add_observations(instrument_id, obstable):\n\n if Session.registry.has():\n session = Session()\n else:\n session = Session(bind=DBSession.session_factory.kw[\"bind\"])\n\n # if the fields do not yet exist, we need to add them\n if ('RA' in obstable) and ('Dec' in obstable) and not ('field_id' in obstable):\n instrument = session.query(Instrument).get(instrument_id)\n regions = Regions.parse(instrument.region, format='ds9')\n field_data = obstable[['RA', 'Dec']]\n field_ids = add_tiles(\n instrument.id, instrument.name, regions, field_data, session=session\n )\n obstable['field_id'] = field_ids\n\n try:\n observations = []\n for index, row in obstable.iterrows():\n field_id = int(row[\"field_id\"])\n field = (\n session.query(InstrumentField)\n .filter(\n InstrumentField.instrument_id == instrument_id,\n InstrumentField.field_id == field_id,\n )\n .first()\n )\n if field is None:\n return log(\n f\"Unable to add observations for instrument {instrument_id}: Missing field {field_id}\"\n )\n\n observation = (\n session.query(ExecutedObservation)\n .filter_by(\n instrument_id=instrument_id, observation_id=row[\"observation_id\"]\n )\n .first()\n )\n if observation is not None:\n log(\n f\"Observation {row['observation_id']} for instrument {instrument_id} already exists... continuing.\"\n )\n continue\n\n # enable multiple obstime formats\n try:\n # can catch iso and isot this way\n obstime = Time(row[\"obstime\"])\n except ValueError:\n # otherwise catch jd as the numerical example\n obstime = Time(row[\"obstime\"], format='jd')\n\n observations.append(\n ExecutedObservation(\n instrument_id=instrument_id,\n observation_id=row[\"observation_id\"],\n instrument_field_id=field.id,\n obstime=obstime.datetime,\n seeing=row.get(\"seeing\", None),\n limmag=row[\"limmag\"],\n exposure_time=row[\"exposure_time\"],\n filt=row[\"filter\"],\n processed_fraction=row[\"processed_fraction\"],\n target_name=row[\"target_name\"],\n )\n )\n session.add_all(observations)\n session.commit()\n\n flow = Flow()\n flow.push('*', \"skyportal/REFRESH_OBSERVATIONS\")\n\n return log(f\"Successfully added observations for instrument {instrument_id}\")\n except Exception as e:\n return log(f\"Unable to add observations for instrument {instrument_id}: {e}\")\n finally:\n session.close()\n Session.remove()", "def movie_tbl(band,tranges,verbose=0,framesz=0,retries=20):\n\tif verbose:\n\t\tprint_inline('Populating exposure time table.')\n\ttstarts,tstops,exptimes=[],[],[]\n\tfor trange in tranges:\n\t\tstepsz = framesz if framesz else trange[1]-trange[0]\n\t\tsteps = np.ceil((trange[1]-trange[0])/stepsz)\n\t\tfor i,t0 in enumerate(np.arange(trange[0],trange[1],stepsz)):\n\t\t\tt1 = trange[1] if i==steps else t0+stepsz\n\t\t\ttstarts.append(t0)\n\t\t\ttstops.append(t1)\n\t\t\texptimes.append(dbt.compute_exptime(band,[t0,t1],\n\t\t\t\t\t\t\tverbose=verbose,retries=retries))\n\tcol1 = pyfits.Column(name='tstart',format='E',array=np.array(tstarts))\n\tcol2 = pyfits.Column(name='tstop',format='E',array=np.array(tstops))\n\tcol3 = pyfits.Column(name='exptime',format='E',array=np.array(exptimes))\n\tcols = pyfits.ColDefs([col1,col2,col3])\n\ttbl = pyfits.new_table(cols)\n\n\treturn tbl", "def info_for_all_observations(self):\n # Get all combinations of instrument, detector, filter, exp_type,\n all_combinations = []\n for i in range(len(self.info['Instrument'])):\n # Get instrument information for the exposure\n instrument = self.info['Instrument'][i]\n detector = self.info['detector'][i]\n if instrument == 'NIRCAM':\n detector = 'NRC{}'.format(detector)\n if '5' in detector:\n filtername = self.info['LongFilter'][i]\n pupilname = self.info['LongPupil'][i]\n detector = detector.replace('5', 'LONG')\n else:\n filtername = self.info['ShortFilter'][i]\n pupilname = self.info['ShortPupil'][i]\n elif instrument == 'NIRISS':\n filtername = self.info['ShortFilter'][i]\n pupilname = self.info['ShortPupil'][i]\n elif instrument == 'FGS':\n filtername = 'N/A'\n pupilname = 'N/A'\n readpattern = self.info['ReadoutPattern'][i]\n\n if instrument == 'NIRCAM':\n exptype = 'NRC_IMAGE'\n elif instrument == 'NIRISS':\n exptype = 'NIS_IMAGE'\n elif instrument == 'FGS':\n exptype = 'FGS_IMAGE'\n\n entry = (instrument, detector, filtername, pupilname, readpattern, exptype)\n all_combinations.append(entry)\n unique_combinations = list(set(all_combinations))\n return all_combinations, unique_combinations", "def file_table(list_observations, indir, informat, outfile):\n print('Creating file summary table ...')\n\n # We gather all infos in a list of dicts and write this\n # as a FITS table at the end.\n # for documentation see http://gamma-astro-data-formats.readthedocs.org/en/latest/data_storage/hdu_index/index.html\n\n HDU_CLASS_TAGS = dict(\n events='events',\n aeff='aeff_2d',\n edisp='edisp_2d',\n psf_3gauss='psf_3gauss',\n psf_king='psf_king',\n psf_table='psf_table',\n gti='gti'\n )\n\n rows = []\n for obs in list_observations.observations:\n testfile=obs.out_filename(\"events\", format=informat, dir=indir)\n try:\n table = Table.read(str(testfile), hdu='EVENTS')\n except Exception:\n print \"fits corrupted for file \"+str(filename)\n continue\n #for filetype in ['events', 'aeff', 'edisp', 'psf_3gauss']:\n #for filetype in ['events']:\n for filetype in ['events', 'aeff', 'edisp', 'psf_3gauss']:\n filename = obs.out_filename(filetype, format=informat, dir=indir)\n\n if filename.is_file():\n print('Processing {}'.format(filename))\n\n data = dict()\n\n # OBS_ID\n data['OBS_ID'] = obs.obs_id\n\n # HDU_TYPE\n if filetype in ('psf_3gauss'):\n data['HDU_TYPE'] = 'psf'\n else:\n data['HDU_TYPE'] = str(filetype)\n\n # HDU_CLASS\n data['HDU_CLASS'] = HDU_CLASS_TAGS[filetype]\n\n # FILE_DIR (relative path)\n data['FILE_DIR'] = str(os.path.relpath(str(obs.out_filename(filetype).parent), str(Path(outfile).parent)))\n\n # FILE_NAME\n data['FILE_NAME'] = str(obs.filename(filetype, format=informat).parts[-1])\n\n # HDU-INFOS\n hdu_list = fits.open(str(filename))\n hdu = hdu_list[1]\n header = hdu.header\n data['HDU_NAME'] = hdu.name\n\n # FILE-INFOS\n stat = filename.stat()\n data['SIZE'] = stat.st_size\n data['MTIME'] = stat.st_mtime\n data['MD5'] = hashlib.md5(filename.open('rb').read()).hexdigest()\n\n # if 'HDUCLAS2' in header:\n # data['HDUCLASS'] = header['HDUCLAS2']\n # else:\n # data['HDUCLASS'] = 'EVENTS'\n\n # if its the events-file, use a second dict for the gti-hdu\n if filetype == 'events':\n data_gti = dict()\n data_gti['OBS_ID'] = obs.obs_id\n data_gti['HDU_TYPE'] = 'gti'\n data_gti['HDU_CLASS'] = 'gti'\n data_gti['FILE_DIR'] = data['FILE_DIR']\n data_gti['FILE_NAME'] = data['FILE_NAME']\n data_gti['HDU_NAME'] = hdu_list[2].name\n data_gti['SIZE'] = data['SIZE']\n data_gti['MTIME'] = data['MTIME']\n data_gti['MD5'] = data['MD5']\n\n rows.append(data_gti)\n\n rows.append(data)\n hdu_list.close()\n\n else:\n print('File not found: {}'.format(filename))\n\n names = [\n 'OBS_ID', 'HDU_TYPE', 'HDU_CLASS',\n 'FILE_DIR', 'FILE_NAME', 'HDU_NAME',\n 'SIZE', 'MTIME', 'MD5'\n ]\n table = Table(rows=rows, names=names)\n\n print('Writing {}'.format(outfile))\n table.write(str(outfile), overwrite=True)\n # add hdu name\n hdulist = fits.open(str(outfile), mode='update')\n hdulist[1].name = 'HDU_INDEX'\n hdulist.close()", "def fill_obs(self, observation_table, data_store):\n for obs in observation_table:\n events = data_store.obs(obs_id=obs['OBS_ID']).events\n\n # TODO: filter out (mask) possible sources in the data\n # for now, the observation table should not contain any\n # run at or near an existing source\n\n self.counts_cube.fill_events([events])\n self.livetime_cube.data += events.observation_live_time_duration", "def file_table(list_observations, indir, informat, outfile):\n print('Creating file summary table ...')\n\n # We gather all infos in a list of dicts and write this\n # as a FITS table at the end.\n # for documentation see http://gamma-astro-data-formats.readthedocs.org/en/latest/data_storage/hdu_index/index.html\n\n HDU_CLASS_TAGS = dict(\n events='events',\n aeff='aeff_2d',\n edisp='edisp_2d',\n psf_3gauss='psf_3gauss',\n psf_king='psf_king',\n psf_table='psf_table',\n gti='gti'\n )\n\n rows = []\n for obs in list_observations.observations:\n events_filename = Path(indir) / obs.filename('events', format=informat)\n try:\n table = Table.read(str(events_filename), hdu='EVENTS')\n except Exception:\n print \"fits corrupted for file \" + str(events_filename)\n continue\n if table.meta[\"OBS_ID\"]!=obs.obs_id:\n continue\n # for filetype in ['events', 'aeff', 'edisp', 'psf_3gauss']:\n # for filetype in ['events']:\n #for filetype in ['events', 'aeff', 'edisp', 'psf_3gauss']:\n for filetype in ['events', 'aeff', 'edisp', 'psf_table']:\n filename = Path(indir) / obs.filename(filetype, format=informat)\n\n if filename.is_file():\n print('Processing {}'.format(filename))\n\n data = dict()\n\n # OBS_ID\n data['OBS_ID'] = obs.obs_id\n\n # HDU_TYPE\n if filetype in ('psf_3gauss'):\n data['HDU_TYPE'] = 'psf'\n elif filetype in ('psf_table'):\n data['HDU_TYPE'] = 'psf'\n else:\n data['HDU_TYPE'] = str(filetype)\n\n # HDU_CLASS\n data['HDU_CLASS'] = HDU_CLASS_TAGS[filetype]\n\n # FILE_DIR (relative path)\n data['FILE_DIR'] = str(\n os.path.relpath(str(obs.out_filename(filetype).parent), str(Path(outfile).parent)))\n\n # FILE_NAME\n data['FILE_NAME'] = str(obs.filename(filetype, format=informat).parts[-1])\n\n # HDU-INFOS\n hdu_list = fits.open(str(filename))\n hdu = hdu_list[1]\n header = hdu.header\n data['HDU_NAME'] = hdu.name\n\n # FILE-INFOS\n stat = filename.stat()\n data['SIZE'] = stat.st_size\n data['MTIME'] = stat.st_mtime\n data['MD5'] = hashlib.md5(filename.open('rb').read()).hexdigest()\n\n # if 'HDUCLAS2' in header:\n # data['HDUCLASS'] = header['HDUCLAS2']\n # else:\n # data['HDUCLASS'] = 'EVENTS'\n\n # if its the events-file, use a second dict for the gti-hdu\n if filetype == 'events':\n data_gti = dict()\n data_gti['OBS_ID'] = obs.obs_id\n data_gti['HDU_TYPE'] = 'gti'\n data_gti['HDU_CLASS'] = 'gti'\n data_gti['FILE_DIR'] = data['FILE_DIR']\n data_gti['FILE_NAME'] = data['FILE_NAME']\n data_gti['HDU_NAME'] = hdu_list[2].name\n data_gti['SIZE'] = data['SIZE']\n data_gti['MTIME'] = data['MTIME']\n data_gti['MD5'] = data['MD5']\n\n rows.append(data_gti)\n\n rows.append(data)\n hdu_list.close()\n\n else:\n print('File not found: {}'.format(filename))\n\n names = [\n 'OBS_ID', 'HDU_TYPE', 'HDU_CLASS',\n 'FILE_DIR', 'FILE_NAME', 'HDU_NAME',\n 'SIZE', 'MTIME', 'MD5'\n ]\n\n table = Table(rows=rows, names=names)\n\n print('Writing {}'.format(indir + \"/\" + str(outfile)))\n table.write(indir + \"/\" + str(outfile), overwrite=True)\n # add hdu name\n hdulist = fits.open(indir + \"/\" + str(outfile), mode='update')\n hdulist[1].name = 'HDU_INDEX'\n hdulist.close()", "def _generate_exposure(self, expstart, number):\n\n index_number = number - 1 # for zero indexing\n\n filename = '{:04d}_raw.fits'.format(number)\n\n exp_gen = ExposureGenerator(self.detector, self.grism, self.NSAMP,\n self.SAMPSEQ, self.SUBARRAY,\n self.planet, filename, expstart)\n\n if not self.spatial_scan:\n self.sample_rate = 1 * u.year # high number reverts to read times\n\n _, sample_mid_points, sample_durations, read_index = \\\n exp_gen._gen_scanning_sample_times(self.sample_rate)\n\n time_array = (sample_mid_points + expstart).to(u.day)\n\n if self.transmission_spectroscopy:\n star_norm_flux = self.generate_lightcurves(time_array)\n planet_depths = 1 - star_norm_flux\n else:\n planet_depths = None\n\n # x shifts - linear shift with exposure, second exposure shifted by\n # x_shifts, direct image and first exp will match.\n x_ref = self._try_index(self.x_ref, index_number)\n y_ref = self._try_index(self.y_ref, index_number)\n sky_background = self._try_index(self.sky_background, index_number)\n\n # X and Y Shifts\n x_ref += self.x_shifts * index_number\n y_ref += self.y_shifts * index_number\n x_jitter = self.x_jitter\n y_jitter = self.y_jitter\n\n if self._visit_trend:\n scale_factor = self._visit_trend.get_scale_factor(index_number)\n else:\n scale_factor = None\n\n if self.spatial_scan:\n exp_frame = exp_gen.scanning_frame(\n x_ref, y_ref, x_jitter, y_jitter,\n self.wl, self.stellar_flux, planet_depths,\n self.scan_speed, self.sample_rate, sample_mid_points,\n sample_durations, read_index, ssv_generator=self.ssv_gen,\n noise_mean=self.noise_mean, noise_std=self.noise_std,\n add_flat=self.add_flat, add_dark=self.add_dark,\n scale_factor=scale_factor, sky_background=sky_background,\n cosmic_rate=self.cosmic_rate,\n add_gain_variations=self.add_gain_variations,\n add_non_linear=self.add_non_linear,\n clip_values_det_limits=self.clip_values_det_limits,\n add_read_noise=self.add_read_noise,\n add_stellar_noise=self.add_stellar_noise,\n add_initial_bias=self.add_initial_bias,\n progress_bar=self.progess,\n threads=self.threads\n )\n else:\n exp_frame = exp_gen.staring_frame(\n x_ref, y_ref, x_jitter, y_jitter,\n self.wl, self.stellar_flux, planet_depths,\n sample_mid_points, sample_durations, read_index,\n noise_mean=self.noise_mean, noise_std=self.noise_std,\n add_flat=self.add_flat, add_dark=self.add_dark,\n scale_factor=scale_factor, sky_background=sky_background,\n cosmic_rate=self.cosmic_rate,\n add_gain_variations=self.add_gain_variations,\n add_non_linear=self.add_non_linear,\n clip_values_det_limits=self.clip_values_det_limits,\n add_read_noise=self.add_read_noise,\n add_stellar_noise=self.add_stellar_noise,\n add_initial_bias=self.add_initial_bias,\n progress_bar=self.progess,\n threads=self.threads\n )\n\n exp_frame.generate_fits(self.outdir, filename, ldcoeffs=self.ldcoeffs)\n\n return exp_frame", "def generate_exptime_table(self, ):\n\n # Perform calculation for all stars in biased sample\n Ndraw = self.NBIAS\n\n np.random.seed(seed=None)\n\n # Allocate memory for exposure times\n t_tots = np.zeros(Ndraw)\n tpbpcs = []\n pct_obs_iwas = []\n lammax_obs_iwas = []\n specs = []\n\n \"\"\"\n Calculate the exposure times and spectra in each bandpass for each\n star in biased sample\n \"\"\"\n\n # Loop over stars in this sample\n for i in range(Ndraw):\n #print(\"HIP %i, %.2f pc, %s \" %(hip[i], dist[i], stype[i]))\n\n # Set system parameters for this star\n self.prep_ith_star(i)\n\n # Calculate the time to observe the complete spectrum\n t_tots[i], tpbpc, spectrum, iwa = self.complete_spectrum_time()\n\n tpbpcs.append(tpbpc)\n pct_obs_iwas.append(iwa[0])\n specs.append(spectrum)\n\n # Calculate channel widths\n deltas = []\n for channel in CHANNELS:\n l = default_luvoir(channel=channel)\n deltas.append(l.lammax - l.lammin)\n self.deltas = np.array(deltas)\n\n # Calculate channel fractional completeness\n self.channel_weights = (self.deltas / np.sum(self.deltas))\n\n # Calculate completeness for each star in sample\n self.completeness = np.sum(np.array(pct_obs_iwas) * self.channel_weights, axis = 1)\n\n \"\"\"\n Make a Lookup Table of Exposure times for each star in sample\n \"\"\"\n\n tpbpcs_rect = [] # Time per bandpass\n tpcs_rect = [] # Time per channel\n\n # Loop over all the stars in sample\n for idrew in range(self.NBIAS):\n\n tpbpcs_rect.append([])\n tpcs_rect.append([])\n bp_names = []\n bp_chan = []\n\n # Loop over all the LUVOIR channels\n for ichan in range(len(CHANNELS)):\n\n tpcs_rect[idrew].append(0.0)\n\n # Loop over all the bands in this channel\n for iband in range(len(tpbpcs[0][ichan])):\n\n bp_names.append(\"%s %i\" %(CHANNELS[ichan], iband+1))\n bp_chan.append(ichan)\n tpbpcs_rect[idrew].append(tpbpcs[idrew][ichan][iband])\n tpcs_rect[idrew][ichan] += tpbpcs[idrew][ichan][iband]\n\n # Make np arrays\n tpbpcs_rect = np.array(tpbpcs_rect)\n tpcs_rect = np.array(tpcs_rect)\n bp_names = np.array(bp_names)\n bp_chan = np.array(bp_chan)\n\n # Make infs --> nans\n infmask = ~np.isfinite(tpbpcs_rect)\n tpbpcs_rect[infmask] = np.nan\n infmask = ~np.isfinite(tpcs_rect)\n tpcs_rect[infmask] = np.nan\n\n # Set attributes\n self.tpbpcs_rect = tpbpcs_rect\n self.tpcs_rect = tpcs_rect\n self.bp_names = bp_names\n self.bp_chan = bp_chan\n\n \"\"\"\n New completeness calculations\n \"\"\"\n\n bandpasses = []\n\n # Loop over telescope channels\n for j, channel in enumerate(CHANNELS):\n\n # Channel dependent bandwidth?\n if type(self.bandwidth) is float:\n bandwidth = self.bandwidth\n else:\n assert len(self.bandwidth) == len(CHANNELS)\n bandwidth = self.bandwidth[j]\n\n # Get the channel specific telescope parameters\n luvoir = default_luvoir(channel=channel)\n self.cn.telescope = luvoir\n\n # Calculate the bandpass edges\n edges = calculate_bandpass_edges(luvoir.lammin, luvoir.lammax, bandwidth = bandwidth)\n\n # Calculate the number of bandpasses\n Nbands = len(edges) - 1\n\n # Loop over bandpasses\n for i in range(Nbands):\n\n # Get the max, min, and middle wavelenths for this bandpass\n lammin = edges[i]\n lammax = edges[i+1]\n\n bandpasses.append([lammin, lammax])\n\n bandpasses = np.array(bandpasses)\n lmin, lmax = np.min(np.hstack(bandpasses)), np.max(np.hstack(bandpasses))\n\n # Fractional completeness of each bandpass\n bp_frac = ((bandpasses[:,1] - bandpasses[:,0]) / (lmax - lmin)) / np.sum((bandpasses[:,1] - bandpasses[:,0]) / (lmax - lmin))\n\n # Completeness by target\n tot_completeness = np.sum(np.isfinite(self.tpbpcs_rect) * bp_frac, axis=1)\n\n # Fraction of stars in biased sample that can completely observe each bandpass\n frac_bias_bp = np.sum(np.isfinite(tpbpcs_rect)*1.0, axis=0) / self.NBIAS\n\n # Set attributes\n self.bandpasses = bandpasses\n self.bp_frac = bp_frac\n self.tot_completeness = tot_completeness\n self.frac_bias_bp = frac_bias_bp\n\n self._make_pandas_table()\n\n return", "def attribute_irrigation():\n fc = ee.FeatureCollection(IRRIGATION_TABLE)\n for state in TARGET_STATES:\n for yr in range(2011, 2021):\n images = os.path.join(ASSET_ROOT, '{}_{}'.format(state, yr))\n coll = ee.Image(images)\n tot = coll.select('classification').remap([0, 1, 2, 3], [1, 0, 0, 0])\n means = tot.reduceRegions(collection=fc,\n reducer=ee.Reducer.mean(),\n scale=30)\n\n task = ee.batch.Export.table.toCloudStorage(\n means,\n description='{}_{}'.format(state, yr),\n bucket='wudr',\n fileNamePrefix='attr_{}_{}'.format(state, yr),\n fileFormat='CSV')\n\n print(state, yr)\n task.start()", "def Table(self: Any, accessories: List[Dict[str, Any]]) -> List[Dict[str, Any]]:\n\n table: List[Dict[str, Any]] = Utility.ReadCSV(\n self, f\"{self.iXAssets}/mp/accessorytable.csv\", AccessoryTable\n )\n\n if table is None:\n return accessories\n\n for accessory in accessories:\n for entry in table:\n if accessory.get(\"altId\") != entry.get(\"ref\"):\n continue\n\n accessory[\"name\"] = self.localize.get(entry.get(\"name\"))\n accessory[\"description\"] = self.localize.get(entry.get(\"description\"))\n accessory[\"image\"] = entry.get(\"lootImage\")\n accessory[\"hidden\"] = bool(entry.get(\"hideInUI\"))\n\n return accessories", "def update_hdrtab(image, level, total_obj_list, input_exposures):\n # Convert input_exposure filenames into HAP product filenames\n name_col = []\n orig_tab = image['hdrtab'].data\n\n for row in orig_tab:\n rootname = str(row['rootname'])\n\n # The rootname is ipppssoot, but the expname is only contains ipppssoo,\n # so remove the last character for the comparisons\n rootname = rootname[0:-1]\n\n for expname in input_exposures:\n if rootname in expname:\n if level == 1:\n # Intrepret inputs as exposures (FLT/FLC) filename not HAP names\n name_col.append(expname)\n else:\n # Convert input exposure names into HAP names\n foundit = False\n for tot_obj in total_obj_list:\n for exposure in tot_obj.edp_list:\n if rootname in exposure.full_filename:\n name_col.append(exposure.drizzle_filename)\n foundit = True\n break\n\n # define new column with HAP expname\n max_len = min(max([len(name) for name in name_col]), 51)\n hapcol = Column(array=np.array(name_col, dtype=np.str), name=HAPCOLNAME, format='{}A'.format(max_len + 4))\n newcol = fits.ColDefs([hapcol])\n\n # define new extension\n haphdu = fits.BinTableHDU.from_columns(orig_tab.columns + newcol)\n haphdu.header['extname'] = 'HDRTAB'\n haphdu.header['extver'] = 1\n # remove old extension\n del image['hdrtab']\n # replace with new extension\n image.append(haphdu)", "def build_data(self):\n from desiutil.io import combine_dicts\n # Loop on exposures\n odict = {}\n for qanight in self.qa_nights:\n for qaexp in qanight.qa_exps:\n # Get the exposure dict\n idict = write_qa_exposure('foo', qaexp, ret_dict=True)\n odict = combine_dicts(odict, idict)\n # Finish\n self.data = odict", "def exp_scan(self, exposure_time_list):\n self.generic_scan(self.exp, exposure_time_list)", "def factor_exposure(self):\n exp_hs_all = pd.DataFrame([])\n exp_zz_all = pd.DataFrame([])\n for i in range(len(self.weekly_date)):\n date = self.weekly_date.iloc[i,0]\n factor = get_barra_factor_from_sql(date)\n factor['secID'] = factor.index.tolist()\n stocklist = factor.index.tolist()\n \n hs300 = get_index_composition(date,'000300.SH')\n zz500 = get_index_composition(date,'000905.SH')\n hs300['secID'] = hs300.index.tolist()\n zz500['secID'] = zz500.index.tolist()\n \n stocklist_hs300 = list(set(hs300.index.tolist()).intersection(set(stocklist)))\n stocklist_zz500 = list(set(zz500.index.tolist()).intersection(set(stocklist)))\n stocklist_hs300.sort()\n stocklist_zz500.sort()\n \n factor_hs = extract_part_from_all(stocklist_hs300,factor,'secID')\n factor_zz = extract_part_from_all(stocklist_zz500,factor,'secID')\n hs_weight = extract_part_from_all(stocklist_hs300,hs300,'secID')\n zz_weight = extract_part_from_all(stocklist_zz500,zz500,'secID')\n del factor_hs['secID'],factor_zz['secID'],hs_weight['secID'],zz_weight['secID']\n \n \n exp_hs = pd.DataFrame(np.dot(hs_weight.T,factor_hs))\n exp_zz = pd.DataFrame(np.dot(zz_weight.T,factor_zz))\n \n \n exp_hs_all = pd.concat([exp_hs_all,exp_hs], axis = 0)\n exp_zz_all = pd.concat([exp_zz_all,exp_zz], axis = 0) \n print(i)\n exp_hs_all.columns = ['Beta','Momentum','Size','EY','RV','Growth',\\\n 'BP','Leverage','Liquidity']\n exp_zz_all.columns = ['Beta','Momentum','Size','EY','RV','Growth',\\\n 'BP','Leverage','Liquidity']\n exp_hs_all.index = self.weekly_date.iloc[:,0]\n exp_zz_all.index = self.weekly_date.iloc[:,0]\n return exp_hs_all,exp_zz_all", "def run_observation(self):\n\n self._generate_direct_image() # to calibrate x_ref and y_ref\n\n num_frames = len(self.exp_start_times)\n progress = Progress(num_frames)\n self.progess = progress\n\n progress_line = 'Generating frames 0/{} done'.format(num_frames)\n progress.print_status_line(progress_line)\n progress.progress_line = progress_line\n\n for i, start_time in enumerate(self.exp_start_times):\n filenum = i + 1\n self._generate_exposure(start_time, filenum)\n\n progress.increment()\n progress_line = 'Generating frames {}/{} done'.format(filenum,\n num_frames)\n progress.print_status_line(progress_line)\n\n # so it can be retreived by exposure_generator\n progress.progress_line = progress_line", "def parse_ipac_table(table_file):\n file_lines = table_file.readlines()\n if len(file_lines) < 5:\n raise ValueError(\"No images found!\")\n \n columns = file_lines[0].replace(\"|\",\" \").split()\n \n # Each row in the table starting at index 4 is metadata for a new image / observation\n metadatas = []\n for image_data in file_lines[4:]:\n line = image_data.replace(\"|\",\" \").split()\n obsdate_idx = columns.index(\"obsdate\")\n tmp = line[obsdate_idx] + \" \" + line[obsdate_idx+1]\n del line[obsdate_idx+1]\n line[obsdate_idx] = tmp\n metadatas.append(dict(zip(columns, line)))\n \n return metadatas", "def create_observation(obs):\n\n obs = {\n \"resourceType\": \"Bundle\",\n \"entry\": [\n {\n \"resource\": {\n \"resourceType\": \"Observation\",\n \"text\": \"Respiratory Rate\",\n \"effectiveDateTime\": obs.datetime,\n \"subject\": {\n \"reference\": obs.pat_no\n },\n \"valueQuantity\": {\n \"value\": obs.resp_rate,\n }\n }\n },\n {\n \"resource\": {\n \"resourceType\": \"Observation\",\n \"text\": \"Body Temperature\",\n \"effectiveDateTime\": obs.datetime,\n \"subject\": {\n \"reference\": obs.pat_no\n },\n \"valueQuantity\": {\n \"value\": obs.temp,\n }\n }\n },\n {\n \"resource\": {\n \"resourceType\": \"Observation\",\n \"text\": \"BP Systolic\",\n \"effectiveDateTime\": obs.datetime,\n \"subject\": {\n \"reference\": obs.pat_no\n },\n \"valueQuantity\": {\n \"value\": obs.bp_sys,\n }\n }\n },\n {\n \"resource\": {\n \"resourceType\": \"Observation\",\n \"text\": \"Heart Rate\",\n \"effectiveDateTime\": obs.datetime,\n \"subject\": {\n \"reference\": obs.pat_no\n },\n \"valueQuantity\": {\n \"value\": obs.pulse,\n }\n }\n }\n ]\n }\n\n return obs", "def make_sourceframe_with_observations(\n n_observations: int,\n exposure_id: str = \"exposure\",\n obscode: str = \"obs\",\n exposure_mjd_start: float = 50000.0,\n exposure_mjd_mid: float = 50000.0,\n healpixel: int = 1,\n) -> SourceFrame:\n observations = [\n make_sourceobs(\n exposure_id=exposure_id,\n obscode=obscode,\n healpixel=healpixel,\n mjd=exposure_mjd_mid,\n exposure_mjd_start=exposure_mjd_start,\n exposure_mjd_mid=exposure_mjd_mid,\n )\n for _ in range(n_observations)\n ]\n\n return SourceFrame(\n exposure_id=exposure_id,\n obscode=obscode,\n filter=\"filter\",\n exposure_mjd_start=exposure_mjd_start,\n exposure_mjd_mid=exposure_mjd_mid,\n exposure_duration=30.0,\n healpixel=1,\n observations=observations,\n )", "def summarize_observing_conditions(fitsFiles):\n count = len(fitsFiles)\n\n # Here is the data we are going to collect from the fits headers\n year = np.zeros(count, dtype=int)\n month = np.zeros(count, dtype=int)\n day = np.zeros(count, dtype=int)\n hour = np.zeros(count, dtype=int)\n minute = np.zeros(count, dtype=int)\n airmass = np.zeros(count, dtype=float)\n water_column = np.zeros(count, dtype=float)\n \n for ii in range(len(fitsFiles)):\n # Get header info\n hdr = pyfits.getheader(fitsFiles[ii])\n\n airmass[ii] = float(hdr['AIRMASS'])\n\n date = hdr['DATE-OBS'].split('-')\n _year = int(date[0])\n _month = int(date[1])\n _day = int(date[2])\n\n utc = hdr['UTC'].split(':')\n _hour = int(utc[0])\n _minute = int(utc[1])\n _second = int(math.floor(float(utc[2])))\n\n utc = datetime.datetime(_year, _month, _day, _hour, _minute, _second)\n utc2hst = datetime.timedelta(hours=-10)\n hst = utc + utc2hst\n\n year[ii] = hst.year\n month[ii] = hst.month\n day[ii] = hst.day\n hour[ii] = hst.hour\n minute[ii] = hst.minute\n\n # Get the water column in mm of H2O\n water_column[ii] = weather.cso_water_column(_year, _month, _day, \n _hour, _minute)\n\n # Now lets fetch the CFHT weather data\n (temperature, pressure, humidity, wind_speed, wind_dir) = \\\n weather.cfht_weather_data(year, month, day, hour, minute)\n\n # Print out a nicely formatted table\n print('%-20s %4s %2s %2s %2s %2s %4s %4s %5s %5s %4s %4s %4s' % \\\n ('Filename', 'Year', 'M', 'D', 'h', 'm', 'AirM', 'H2O', 'Temp', \n 'Press', 'Humi', 'Wind', 'Dir'))\n print('%-20s %4s %2s %2s %2s %2s %4s %4s %5s %5s %4s %4s %4s' % \\\n ('HST', '', '', '', '', '', '', 'mm', 'C', 'mbar', '%', 'km/h', 'deg'))\n print('%-20s %4s %2s %2s %2s %2s %4s %4s %5s %5s %4s %4s %4s' % \\\n ('--------', '----', '--', '--', '--', '--', '----', '----', '-----', \n '-----', '----', '----', '----'))\n\n for ii in range(len(fitsFiles)):\n print('%-20s %4d %2d %2d %2d %2d ' % \\\n (fitsFiles[ii], year[ii], month[ii], day[ii], hour[ii], minute[ii]),)\n print('%4.2f %4.2f %5.1f %5.1f %4.1f %4.1f %4d' % \\\n (airmass[ii], water_column[ii], temperature[ii], pressure[ii],\n humidity[ii], wind_speed[ii], wind_dir[ii]))\n\n # Print out the average values\n print('%-20s %4s %2s %2s %2s %2s %4s %4s %5s %5s %4s %4s %4s' % \\\n ('--------', '----', '--', '--', '--', '--', '----', '----', '-----', \n '-----', '----', '----', '----'))\n print('%-20s %4d %2d %2d %2d %2d ' % \\\n ('Average', year.mean(), month.mean(), day.mean(), hour.mean(), \n minute.mean()),)\n print('%4.2f %4.2f %5.1f %5.1f %4.1f %4.1f %4d' % \\\n (airmass.mean(), water_column.mean(), temperature.mean(), \n pressure.mean(), humidity.mean(), wind_speed.mean(), wind_dir.mean()))\n print('%-20s %4d %2d %2d %2d %2d ' % \\\n ('Std. Dev.', year.std(), month.std(), day.std(), hour.std(), \n minute.std()),)\n print('%4.2f %4.2f %5.1f %5.1f %4.1f %4.1f %4d' % \\\n (airmass.std(), water_column.std(), temperature.std(), \n pressure.std(), humidity.std(), wind_speed.std(), wind_dir.std()))", "def create_exposure(event_class,event_type,egy,cth):\n\n if isinstance(event_type,int):\n event_type = evtype_string[event_type]\n \n irf_factory=pyIrfLoader.IrfsFactory.instance()\n irf = irf_factory.create('%s::%s'%(event_class,event_type))\n\n irf.aeff().setPhiDependence(False)\n \n theta = np.degrees(np.arccos(cth))\n \n # Exposure Matrix\n # Dimensions are Etrue and incidence angle\n m = np.zeros((len(egy),len(cth)))\n\n for i, x in enumerate(egy):\n for j, y in enumerate(theta): \n m[i,j] = irf.aeff().value(10**x,y,0.0)\n\n return m", "def tabulate(store: ObservationStore) -> \\\n Generator[LabelledObservation, None, None]:\n for k in store:\n for ob in store[k]:\n yield when(ob), measured(ob), k", "def observeField(target, exposure):\n\n status = 2\n real_exposure = exposure + np.random.normal(0.0, 20.0)\n realSN2 = target['DESsn2'] + np.random.uniform(0.0, 1.0)\n\n return status, real_exposure, realSN2", "def make_test_eventlist(observation_table,\n obs_id,\n sigma=Angle(5., 'deg'),\n spectral_index=2.7,\n random_state='random-seed'):\n from ..data import EventList\n random_state = get_random_state(random_state)\n\n # find obs row in obs table\n obs_ids = observation_table['OBS_ID'].data\n obs_index = np.where(obs_ids == obs_id)\n row = obs_index[0][0]\n\n # get observation information\n alt = Angle(observation_table['ALT'])[row]\n livetime = Quantity(observation_table['LIVETIME'])[row]\n\n # number of events to simulate\n # it is linearly dependent on the livetime, taking as reference\n # a trigger rate of 300 Hz\n # it is linearly dependent on the zenith angle (90 deg - altitude)\n # it is n_events_max at alt = 90 deg and n_events_max/2 at alt = 0 deg\n n_events_max = Quantity(300., 'Hz') * livetime\n alt_min = Angle(0., 'deg')\n alt_max = Angle(90., 'deg')\n slope = (n_events_max - n_events_max / 2) / (alt_max - alt_min)\n free_term = n_events_max / 2 - slope * alt_min\n n_events = alt * slope + free_term\n\n # simulate energy\n # the index of `~numpy.random.RandomState.power` has to be\n # positive defined, so it is necessary to translate the (0, 1)\n # interval of the random variable to (emax, e_min) in order to\n # have a decreasing power-law\n e_min = Quantity(0.1, 'TeV')\n e_max = Quantity(100., 'TeV')\n energy = sample_powerlaw(e_min.value, e_max.value, spectral_index,\n size=n_events, random_state=random_state)\n energy = Quantity(energy, 'TeV')\n\n E_0 = Quantity(1., 'TeV') # reference energy for the model\n\n # define E dependent sigma\n # it is defined via a PL, in order to be log-linear\n # it is equal to the parameter sigma at E max\n # and sigma/2. at E min\n sigma_min = sigma / 2. # at E min\n sigma_max = sigma # at E max\n s_index = np.log(sigma_max / sigma_min)\n s_index /= np.log(e_max / e_min)\n s_norm = sigma_min * ((e_min / E_0) ** -s_index)\n sigma = s_norm * ((energy / E_0) ** s_index)\n\n # simulate detx, dety\n detx = Angle(random_state.normal(loc=0, scale=sigma.deg, size=n_events), 'deg')\n dety = Angle(random_state.normal(loc=0, scale=sigma.deg, size=n_events), 'deg')\n\n # fill events in an event list\n event_list = EventList()\n event_list['DETX'] = detx\n event_list['DETY'] = dety\n event_list['ENERGY'] = energy\n\n # store important info in header\n event_list.meta['LIVETIME'] = livetime.to('second').value\n event_list.meta['EUNIT'] = str(energy.unit)\n\n # effective area table\n aeff_table = Table()\n\n # fill threshold, for now, a default 100 GeV will be set\n # independently of observation parameters\n energy_threshold = Quantity(0.1, 'TeV')\n aeff_table.meta['LO_THRES'] = energy_threshold.value\n aeff_table.meta['name'] = 'EFFECTIVE AREA'\n\n # convert to BinTableHDU and add necessary comment for the units\n aeff_hdu = table_to_fits_table(aeff_table)\n aeff_hdu.header.comments['LO_THRES'] = '[' + str(energy_threshold.unit) + ']'\n\n return event_list, aeff_hdu", "def obs_assimilation_statistics(prior, obs): #post, obs):\n # Make sure these states are the right kind of object\n assert isinstance(prior, EnsembleState)\n #assert isinstance(post, EnsembleState)\n\n # Build a list of dictionaries\n oblist = []\n for ob in obs:\n obd = {}\n obd['validtime'] = ob.time\n obd['flead'] = (ob.time - pd.to_datetime(prior['validtime'].values[0])).total_seconds()/3600\n obd['lat'] = ob.lat\n obd['lon'] = ob.lon\n obd['obtype'] = ob.obtype\n obd['description'] = ob.description\n obd['ob error'] = ob.error\n obd['value'] = ob.value\n obd['assimilated'] = ob.assimilated\n prior_ye = ob.estimate(prior)\n #post_ye = ob.estimate(post)\n obd['prior mean'] = prior_ye.mean()\n #obd['post mean'] = post_ye.mean()\n obd['prior variance'] = prior_ye.var()\n #obd['post variance'] = post_ye.var()\n oblist.append(obd)\n print(len(oblist))\n # Build a dataframe from this list of objects\n df = pd.DataFrame(oblist)\n return df", "def make_data_table(self, events):\n data_table = []\n for event in events:\n table_row = {}\n for key, val in PROOFPOINT_TRAP_EVENTS_MAP.items():\n if key in event.keys():\n table_row[val] = {\"value\" : event[key]}\n data_table.append({\"cells\" : table_row})\n LOG.debug(\"Table Row created with values: %s\", table_row)\n LOG.info(\"Data Table Assembled with %d rows\", len(data_table))\n\n # TODO: POST Data Table to Resilient Incident\n LOG.debug(pprint.pformat(data_table, indent=4))\n return {'proofpoint_trap_events' : data_table}", "def create_observation(self):", "def create_observation(self):", "def _get_dataset_table(self):\n\n model_dataset = self.get_model_dataset()\n index_as_str = np.expand_dims(model_dataset.index.astype(str), 1)\n observation_data = np.column_stack((index_as_str, model_dataset.as_matrix()))\n observation_data_headers = ['DateTime']\n observation_data_headers.extend(model_dataset.keys())\n observation_table = SimpleTable(data=observation_data,\n headers=observation_data_headers)\n\n return observation_table", "def get_exp_columns(self, wanted_exps):\n # Get the dict.\n exp_id_str = get_id_str(self.req_exps)\n table_name = '%s_datatable_%s' % (self.app_label, exp_id_str)\n # Make experiment unique.\n if not db_table_exists(table_name):\n self.create_base_table(table_name)\n column_names = list(get_columnnames(table_name))\n divisor_col = None\n dividend_col = None\n #exp_cols = []\n #potential_columns = []\n # Iterate over all column names, and make a list of those that hold data.\n datacol_pattern = re.compile(r'_\\d+$')\n dividend_pattern = r'_{}$'.format(str(wanted_exps['dividend'].id))\n divisor_pattern = r'_{}$'.format(str(wanted_exps['divisor'].id))\n for column_name in column_names:\n if datacol_pattern.search(column_name) is not None:\n if divisor_col is None:\n if re.search(divisor_pattern, column_name):\n divisor_col = column_name\n if dividend_col is None:\n if re.search(dividend_pattern, column_name):\n dividend_col = column_name\n \n sql = \"SELECT %s, %s FROM %s;\" % (dividend_col, divisor_col, table_name)\n return from_db(sql, fetch_as='tuple')", "def generateExtrapolationTable(sex, region):\n pop1 = dataStore.data[dataStore.data.Location == region]\n pop1 = pop1[['Time', 'Age', SEXES[sex]]]\n # pop1 = data[['Time', 'Age', SEX]].query('Location' == CNTRY)\n #print pop1\n\n july1from1950to2100 = [inPosixDays(date(y, 7, 1)) for y in xrange(1950, 2100+1)]\n\n dateRange1950to2100inPosixDays = range(inPosixDays(date(1950,1,1)), inPosixDays(date(2100,12,31))+1)\n\n ''' --- Date interpolation function --- '''\n def dateInterp(iage):\n popi = np.asarray(pop1.loc[dataStore.data.Age == iage.name, SEXES[sex]])\n\n # spline interpolation function from Scipy Package\n iuspl = InterpolatedUnivariateSpline(july1from1950to2100, popi, k=4)\n return iuspl(dateRange1950to2100inPosixDays)\n\n # --- store the results of the date interpolation --- #\n result1 = pd.DataFrame(index = range(0,len(dateRange1950to2100inPosixDays)), columns = range(0,100))\n table = result1.apply(dateInterp, axis=0)\n\n # Change column names by appending \"age_\"\n oldHeaders = table.columns\n newHeaders = []\n for i in oldHeaders:\n newHeaders.append(\"age\" + \"_\" + str(i))\n table.columns = newHeaders\n #print result1.head # results: \"age_0, age_1, ...\"\n\n # Convert the numerical days to date string\n def toDate(d):\n return (date(1970, 1, 1) + timedelta(days=d)).strftime('%Y-%m-%d')\n toDate = np.vectorize(toDate) # vectorize the function to iterate over numpy ndarray\n #fullDateRange = toDate(dateRange1970to2100inPosixDays) # 1st result: 1950-01-01\n fullDateRange = len(dateRange1950to2100inPosixDays)*[None]\n for i in range(0,len(dateRange1950to2100inPosixDays)):\n fullDateRange[i] = toDate(dateRange1950to2100inPosixDays[i])\n\n # Add the fullDateRange to the result1\n table['date1'] = fullDateRange\n\n return table", "def make_obslog(path):\n name_pattern = '^HI\\.\\d{8}\\.\\d{5}\\.fits$'\n\n # scan the raw files\n fname_lst = sorted(os.listdir(path))\n\n # prepare logtable\n logtable = Table(dtype=[\n ('frameid', 'i2'), ('fileid', 'S17'), ('imgtype', 'S3'),\n ('object', 'S20'), ('i2cell', 'bool'), ('exptime', 'f4'),\n ('obsdate', Time),\n ('deckname', 'S2'), ('filter1', 'S5'), ('filter2', 'S5'),\n ('nsat_1', 'i4'), ('nsat_2', 'i4'), ('nsat_3', 'i4'),\n ('q95_1', 'i4'), ('q95_2', 'i4'), ('q95_3', 'i4'),\n ])\n\n # prepare infomation to print\n pinfo = FormattedInfo(all_columns,\n ['frameid', 'fileid', 'imgtype', 'object', 'i2cell', 'exptime',\n 'obsdate', 'deckname', 'nsat_2', 'q95_2'])\n\n # print header of logtable\n print(pinfo.get_separator())\n print(pinfo.get_title())\n print(pinfo.get_separator())\n\n # start scanning the raw files\n prev_frameid = -1\n for fname in fname_lst:\n if not re.match(name_pattern, fname):\n continue\n fileid = fname[0:17]\n filename = os.path.join(path, fname)\n hdu_lst = fits.open(filename)\n # parse images\n data_lst, mask_lst = parse_3ccd_images(hdu_lst)\n\n head0 = hdu_lst[0].header\n\n frameid = prev_frameid + 1\n\n # get obsdate in 'YYYY-MM-DDTHH:MM:SS' format\n date = head0.get('DATE-OBS')\n utc = head0.get('UTC', head0.get('UT'))\n obsdate = Time('%sT%s'%(date, utc))\n\n exptime = head0.get('ELAPTIME')\n i2in = head0.get('IODIN', False)\n i2out = head0.get('IODOUT', True)\n i2cell = i2in\n imagetyp = head0.get('IMAGETYP')\n targname = head0.get('TARGNAME', '')\n lampname = head0.get('LAMPNAME', '')\n\n if imagetyp == 'object':\n # science frame\n imgtype = 'sci'\n objectname = targname\n elif imagetyp == 'flatlamp':\n # flat\n imgtype = 'cal'\n objectname = '{} ({})'.format(imagetyp, lampname)\n elif imagetyp == 'arclamp':\n # arc lamp\n imgtype = 'cal'\n objectname = '{} ({})'.format(imagetyp, lampname)\n elif imagetyp == 'bias':\n imgtype = 'cal'\n objectname = 'bias'\n else:\n print('Unknown IMAGETYP:', imagetyp)\n\n # get deck and filter information\n deckname = head0.get('DECKNAME', '')\n filter1 = head0.get('FIL1NAME', '')\n filter2 = head0.get('FIL2NAME', '')\n\n # determine the numbers of saturated pixels for 3 CCDs\n mask_sat1 = (mask_lst[0] & 4)>0\n mask_sat2 = (mask_lst[1] & 4)>0\n mask_sat3 = (mask_lst[2] & 4)>0\n nsat_1 = mask_sat1.sum()\n nsat_2 = mask_sat2.sum()\n nsat_3 = mask_sat3.sum()\n\n # find the 95% quantile\n q95_lst = [np.sort(data.flatten())[int(data.size*0.95)]\n for data in data_lst]\n q95_1, q95_2, q95_3 = q95_lst\n\n # close the fits file\n hdu_lst.close()\n\n item = [frameid, fileid, imgtype, objectname, i2cell, exptime, obsdate,\n deckname, filter1, filter2,\n nsat_1, nsat_2, nsat_3, q95_1, q95_2, q95_3]\n\n logtable.add_row(item)\n # get table Row object. (not elegant!)\n item = logtable[-1]\n\n # print log item with colors\n string = pinfo.get_format(has_esc=False).format(item)\n print(print_wrapper(string, item))\n\n prev_frameid = frameid\n\n print(pinfo.get_separator())\n\n # sort by obsdate\n #logtable.sort('obsdate')\n\n # determine filename of logtable.\n # use the obsdate of the LAST frame.\n obsdate = logtable[-1]['obsdate'].iso[0:10]\n outname = '{}.obslog'.format(obsdate)\n if os.path.exists(outname):\n i = 0\n while(True):\n i += 1\n outname = '{}.{}.obslog'.format(obsdate, i)\n if not os.path.exists(outname):\n outfilename = outname\n break\n else:\n outfilename = outname\n\n # save the logtable\n\n # loginfo is not pinfo because not all columns need to be printed in the\n # screen, but all columns should be written in logfile.\n loginfo = FormattedInfo(all_columns)\n outfile = open(outfilename, 'w')\n outfile.write(loginfo.get_title()+os.linesep)\n outfile.write(loginfo.get_dtype()+os.linesep)\n outfile.write(loginfo.get_separator()+os.linesep)\n for row in logtable:\n outfile.write(loginfo.get_format(has_esc=False).format(row)+os.linesep)\n outfile.close()", "def get_camfiber_table(date, exp_id):\n filename = '/exposures/nightwatch/{}/{:08d}/qa-{:08d}.fits'.format(date, exp_id, exp_id)\n\n tab = None\n if os.path.isfile(filename):\n tab = Table.read(filename, hdu='PER_CAMFIBER')\n tab = tab['FIBER', 'MEDIAN_CALIB_SNR', 'CAM']\n return tab", "def make_weather_features(self, timeline_dt_list):\n\n print \"Making weather features...\"\n\n N_FEATURES = 2\n n_examples = len(timeline_dt_list)\n XX = numpy.zeros((n_examples, N_FEATURES))\n indices = numpy.zeros(n_examples,dtype='int')\n ind_weatherday = 0\n\n # Loop over all times in the timeline\n for ii, time in enumerate(timeline_dt_list):\n # Find where this time in the timeline matches the date\n # of some weather data.\n jj = ind_weatherday\n while time.date() != self.datetimes[jj].date():\n # Make sure jj does not get too large to be an index to\n # the list.\n # Note this is probably a bad idea to do it this way.\n if jj == len(self.datetimes)-1:\n break\n jj += 1\n## print jj\n\n ind_weatherday = jj\n indices[ii] = ind_weatherday\n\n# XX[ii, 0] = self.table['PrecipIn'][ind_weatherday]\n# XX[ii, 1] = self.table['Mean TemperatureF'][ind_weatherday]\n## XX[ii, 2] = self.table['MeanDew PointF'][ind_weatherday]\n\n XX[:,0] = self.table['PrecipIn'][indices]\n XX[:,1] = self.table['Mean TemperatureF'][indices]\n self.weather_features = XX\n return XX", "def _get_metadata(self): \n def add_dates(date_list, dates):\n \"\"\"\n Append dates to date_list which are not already within date_list.\n \n \"\"\"\n for date in dates:\n if date.strftime('%d-%b') not in date_list:\n date_list.append(date.strftime('%d-%b'))\n return date_list\n \n metadata = {'DATA_TYPE':'Observation Data'} \n \n self.cube_dates = []\n years = []\n \n for cube in self.cubelist:\n cube_metadata = self._get_obs_metadata(cube)\n \n self.cube_dates = add_dates(self.cube_dates, \n cube_metadata['DATES'])\n # Years are based on the earliest date.\n years.append(min(cube_metadata['DATES']).year)\n del cube_metadata['DATES']\n \n for key, val in cube_metadata.items():\n # Find unique metadata which has not already been added by \n # previous cubes. Years are the common one.\n current_vals = metadata.get(key)\n if current_vals is not None:\n for this_val in current_vals:\n if hasattr(this_val, '__iter__'):\n try: \n if numpy.array_equal(this_val, val):\n break\n except AttributeError:\n # If the array type is not comparable for \n # example array of strings.\n equal = True\n for this_item, item in zip(this_val, val):\n if this_item != item:\n equal = False\n break\n if equal:\n break\n else:\n if this_val == val:\n break\n metadata[key].append(val)\n else:\n metadata[key] = [val]\n \n bound_names = []\n # Tidy up lists of length 1.\n for key, val in metadata.items():\n if type(val) == list and len(val) == 1:\n metadata[key] = val[0]\n # Retrieve the exact bound names.\n if key[-7:] == '_BOUNDS':\n bound_names.append(key)\n \n metadata['YEARS'] = sorted(list(set(years)))\n metadata['DATES'] = self.cube_dates\n \n return self.MetaData(metadata, bound_names)", "def get_data_iue(obsid, filt):\n\n # This error code will be used unless there's a problem reading any\n # of the FITS files in the list, or the FILTER value is not understood.\n errcode = 0\n\n # This defines a data point for a DataSeries object as a namedtuple.\n data_point = collections.namedtuple('DataPoint', ['x', 'y'])\n\n # For IUE, this defines the x-axis and y-axis units as a string.\n iue_xunit = \"Angstroms (vacuum, heliocentric)\"\n iue_yunit = \"ergs/cm^2/s/Angstrom\"\n\n # Parse the obsID string to determine the paths+files to read. Note:\n # this step will assign some of the error codes returned to the top level.\n if filt == ' ':\n filt = \"UNKNOWN\"\n if filt.upper() in [\"LOW_DISP\", \"HIGH_DISP\"] or filt == \"UNKNOWN\":\n parsed_files_result = parse_obsid_iue(obsid, filt.upper())\n errcode = parsed_files_result.errcode\n else:\n errcode = 4\n\n # In the case of low dispersion spectra, there can be two apertures for\n # a single obsID. In that case, we return a list of TWO DataSeries, one\n # for each aperture. In other words, we treat the single obsID as if it\n # were two different obsIDs in the case of a double-aperture.\n all_data_series = []\n\n # For each file, read in the contents and create a return JSON object.\n if errcode == 0:\n for sfile in parsed_files_result.specfiles:\n # Figure out if this is an mxhi or mxlo spectrum.\n if sfile[-7:] == \"mxlo.gz\":\n is_lo = True\n is_hi = False\n else:\n is_lo = False\n is_hi = True\n\n try:\n with fits.open(sfile) as hdulist:\n if is_lo:\n # Get the dispersion type from the primary header.\n dispersion = hdulist[0].header[\"disptype\"]\n # Get the aperture size(s) from the header.\n apertures = hdulist[1].data[\"aperture\"]\n n_apertures = len(apertures)\n # Number of spectral data points for each aperture size.\n n_wls = [int(x) for x in hdulist[1].data[\"npoints\"]]\n # Initial wavelength value(s).\n starting_wl = [float(x) for x in\n hdulist[1].data[\"wavelength\"]]\n # Step size(s) for each subsequent wavelength.\n delta_wl = [float(x) for x in hdulist[1].data[\"deltaw\"]]\n\n # Generate the full array of wavelength values, and get\n # full array of flux values, for each aperture.\n for aper in range(n_apertures):\n wls = [starting_wl[aper] +\n x*delta_wl[aper] for\n x in range(n_wls[aper])]\n fls = [float(x) for\n x in hdulist[1].data[\"flux\"][aper]]\n # Make sure wavelengths and fluxes are sorted\n # from smallest wavelength to largest.\n sort_indexes = [x[0] for x in\n sorted(enumerate(wls),\n key=itemgetter(1))]\n wls = [wls[x] for x in sort_indexes]\n fls = [fls[x] for x in sort_indexes]\n wlfls = [(x, y) for x, y in zip(wls, fls) if\n y != 0.]\n if wlfls != []:\n datapoints = [\n [data_point(x=float(\"{0:.8f}\".format(x)),\n y=float(\"{0:.8e}\".format(y)))\n for x, y in wlfls]]\n # Create the return DataSeries object.\n all_data_series.append(\n DataSeries('iue', obsid,\n datapoints,\n ['IUE_' + obsid + ' DISP:'\n + dispersion + ' APER:' +\n apertures[aper]],\n [iue_xunit], [iue_yunit],\n errcode))\n\n if is_hi:\n # Get the aperture from the primary header.\n aperture = hdulist[0].header[\"aperture\"].strip()\n # Get the dispersion type from the primary header.\n dispersion = hdulist[0].header[\"disptype\"].strip()\n # Get the camera used (SWP, LWP, LWR).\n camera = hdulist[0].header[\"camera\"].strip()\n # Get a list of spectral orders. Those that are beyond\n # the range defined in Solano are not considered.\n if camera == \"LWP\":\n max_order = 124\n elif camera == \"LWR\":\n max_order = 119\n else:\n max_order = 120\n orders = [int(x) for x in hdulist[1].data[\"order\"] if x\n <= max_order]\n n_orders = len(orders)\n # This lists will store each orders' spectral info.\n order_spectra = []\n\n # Loop over each order.\n for order in range(n_orders):\n # Number of fluxes for this order.\n n_p = int(hdulist[1].data[\"npoints\"][order])\n # Starting pixel within the array of 768 elements.\n s_pix = int(\n hdulist[1].data[\"startpix\"][order])\n # Wavelength corresponding to this start pixel.\n starting_wl = float(\n hdulist[1].data[\"wavelength\"][order])\n # Step size for each subsequent wavelength.\n delta_wl = float(\n hdulist[1].data[\"deltaw\"][order])\n # Generate the full array of wavelength values.\n wls = [starting_wl + x*delta_wl for x in\n range(n_p)]\n # Extract the fluxes that go along with these wls.\n all_fluxes = hdulist[1].data[\"abs_cal\"][order]\n fls = [float(x) for x in\n all_fluxes[(s_pix-1):(s_pix-1+n_p-1+1)]]\n # Extract the quality flags that go along with\n # these wls.\n all_qfs = hdulist[1].data[\"quality\"][order]\n qfs = [int(x) for x in all_qfs[(s_pix-1):(s_pix-1+\n n_p-1+1)]]\n # Only keep good Quality Flags, if the order is all\n # bad flags, don't add it.\n keep = [i for i, x in enumerate(qfs) if (qfs[i] >\n -16384)]\n if keep != [] and fls != [0.]*len(fls):\n wls = [wls[i] for i in keep]\n fls = [fls[i] for i in keep]\n # Create a dict that will store this order's\n # info.\n order_spec = {'order':orders[order],\n 'wls':numpy.asarray(wls),\n 'fls':numpy.asarray(fls)}\n order_spectra.append(order_spec)\n\n # Order-combine the spectra.\n comb_spec = order_combine(order_spectra, camera, False)\n\n # Resample onto an evenly-spaced wavelength scale.\n comb_spec_reb = resample_spectrum(comb_spec, camera,\n False)\n\n # Create the return DataSeries object.\n datapoints = [\n [data_point(x=float(\"{0:.8f}\".format(x)),\n y=float(\"{0:.8e}\".format(y)))\n for x, y in comb_spec_reb]]\n all_data_series.append(\n DataSeries('iue', obsid,\n datapoints,\n ['IUE_' + obsid + ' DISP:'\n + dispersion + ' APER:' +\n aperture],\n [iue_xunit], [iue_yunit],\n errcode))\n\n except IOError:\n errcode = 3\n all_data_series.append(\n DataSeries('iue', obsid, [], [''], [''], [''], errcode))\n\n else:\n # This is where an error DataSeries object would be returned.\n all_data_series.append(\n DataSeries('iue', obsid, [], [], [],\n [], errcode))\n\n # Return the DataSeries object back to the calling module.\n if len(all_data_series) == 1:\n return all_data_series[0]\n return all_data_series", "def createObsTable(df):\n \n textarea = ''\n \n for idx in range(len(df)):\n # Convert the dataframe ra and dec into Sky Coordinates\n c = SkyCoord(df['ra'].iloc[idx]*u.degree, df['dec'].iloc[idx]*u.degree)\n # Convert RA and DEC into hour-minute-second and degree-minute-second\n ra_hms = c.ra.hms\n dec_dms = c.dec.dms\n # Get the observation time and convert it into a standard format\n date_obs = df['date_obs'].iloc[idx].decode()#[2:-1]\n time_obj = Time(date_obs, format='iso', scale='utc')\n # Convert observation time and sky coords into a string\n if dec_dms.d != 0:\n name = (\" %07i %s %s %s.%s %02i %02i %06.3f%+03i %02i %05.2f W84\\n\" %\n (int(df['visit_id'].iloc[idx]), date_obs[:4], date_obs[5:7], \n date_obs[8:10], str(time_obj.mjd)[6:11],\n ra_hms.h, ra_hms.m, ra_hms.s,\n dec_dms.d, np.abs(dec_dms.m), np.abs(dec_dms.s)))\n else:\n if copysign(1, dec_dms.d) == -1.0:\n dec_dms_d = '-00'\n else:\n dec_dms_d = '+00'\n name = (\" %07i %s %s %s.%s %02i %02i %06.3f%s %02i %05.2f W84\\n\" %\n (df['visit_id'].iloc[idx], date_obs[:4], date_obs[5:7],\n date_obs[8:10], str(time_obj.mjd)[6:11],\n ra_hms.h, ra_hms.m, ra_hms.s,\n dec_dms_d, np.abs(dec_dms.m), np.abs(dec_dms.s)))\n textarea += name\n \n return textarea", "def update_iemaccess(obs):\n icursor = IEM.cursor()\n for sid in obs:\n ob = obs[sid]\n iemob = Observation(sid, \"IA_RWIS\", ob['valid'])\n for varname in ['tmpf', 'dwpf', 'drct', 'sknt', 'gust', 'vsby',\n 'pday', 'tsf0', 'tsf1', 'tsf2', 'tsf3', 'scond0',\n 'scond1', 'scond2', 'scond3', 'relh']:\n # Don't insert NaN values into iemaccess\n thisval = ob.get(varname)\n if thisval is None:\n continue\n # strings fail the isnan check\n if isinstance(thisval, str):\n iemob.data[varname] = ob.get(varname)\n elif not np.isnan(thisval):\n iemob.data[varname] = ob.get(varname)\n for varname in ['tsub0', 'tsub1', 'tsub2', 'tsub3']:\n if ob.get(varname) is not None:\n iemob.data['rwis_subf'] = ob.get(varname)\n break\n iemob.save(icursor)\n icursor.close()\n IEM.commit()", "def generateObservation(self):\n observation = {\n \"system_action\": self.AGENT_TYPES_DICT[self.last_system_action[\"action\"]],\n \"user_action\": self.USER_TYPES_DICT[self.last_user_action[\"action\"]],\n \"function_specified\": 1 if self.current_function != None else 0,\n \"dont_know\": 1 if self.dont_know else 0,\n \"turns\": self.current_turn,\n \"results\": self.dataset.function_scores,\n }\n return flatten(self.observation_space, observation)", "def run(self):\r\n #print 'WriteFITS.run'\r\n\r\n # construct the name of the file\r\n runtime = self.previous_results['runtime']\r\n fitsname = '%s.fits' % runtime\r\n\r\n # get list of instrument observations\r\n observe = self.previous_results['observe']\r\n obs_timeline = observe['observed_timeline']\r\n observed_times = obs_timeline.keys()\r\n observed_times.sort()\r\n\r\n # construct lists of the values to be stored in each Table column\r\n for t in observed_times:\r\n timelist = []\r\n smec_position = []\r\n smec_nominal_position = []\r\n flag = []\r\n data = []\r\n pointing1_x = []\r\n pointing1_y = []\r\n pointing2_x = []\r\n pointing2_y = []\r\n\r\n config = obs_timeline[t]\r\n\r\n timelist.append(config.time)\r\n smec_position.append(config.smec_position)\r\n smec_nominal_position.append(config.smec_nominal_position)\r\n flag.append(config.flag)\r\n data.append(config.data)\r\n pointing1_x.append(config.pointing1_x)\r\n pointing1_y.append(config.pointing1_y)\r\n pointing2_x.append(config.pointing2_x)\r\n pointing2_y.append(config.pointing2_y)\r\n\r\n # create a Header object and primary HDU - this just contains\r\n # some very basic, general information\r\n prihdr = pyfits.Header()\r\n prihdr['COMMENT'] = 'This FITS file was created by pyfiins at %s' % \\\r\n runtime\r\n prihdu = pyfits.PrimaryHDU(header=prihdr)\r\n\r\n # create list of Header Data Unit objects, include the primary HDU\r\n hdulist = pyfits.HDUList([prihdu])\r\n\r\n # create an HDU to contain the Table and append it to the list\r\n hdulist.append(pyfits.BinTableHDU.from_columns(\r\n pyfits.ColDefs([\r\n pyfits.Column(name='Time', format='D',\r\n array=np.array(timelist)),\r\n pyfits.Column(name='SMEC Position', format='E',\r\n array=np.array(smec_position)),\r\n pyfits.Column(name='SMEC Nominal Position', format='E',\r\n array=np.array(smec_nominal_position)),\r\n pyfits.Column(name='Flag', format='L',\r\n array=np.array(flag)),\r\n pyfits.Column(name='Data', format='E',\r\n array=np.array(data)),\r\n pyfits.Column(name='Pointing1 X', format='E',\r\n array=np.array(pointing1_x)),\r\n pyfits.Column(name='Pointing1 Y', format='E',\r\n array=np.array(pointing1_y)),\r\n pyfits.Column(name='Pointing2 X', format='E',\r\n array=np.array(pointing2_x)),\r\n pyfits.Column(name='Pointing2 Y', format='E',\r\n array=np.array(pointing2_y))])))\r\n\r\n # write the HDU list to a file\r\n hdulist.writeto(fitsname, clobber=True)\r\n self.result['fitsfile'] = fitsname\r\n\r\n return self.result", "def _get_vif_table(self):\n\n vif_data = [['']]\n\n exog = self._model.exog\n\n # for variable in self._explanatory_variables:\n for exog_idx in range(1, exog.shape[1]):\n vif = variance_inflation_factor(exog, exog_idx)\n\n vif_data.append([self._FLOAT_STRING_FORMAT.format(vif)])\n\n vif_table = SimpleTable(vif_data, headers=['VIF'])\n\n return vif_table", "def generate_observations(self, eval_mode, augment_frames=None):\n episode_idx = 0\n augment_frames = (\n augment_frames if augment_frames is not None\n else self._augment_frames and not eval_mode)\n for t, obs in enumerate(self._flat_observations):\n if augment_frames:\n obs = image_utils.random_crop_image(obs)\n if self._split_by_episodes:\n yield obs, episode_idx\n else:\n yield obs\n if self._is_terminal[t]:\n episode_idx += 1", "def get_observation_list(self):\n return self.observations", "def annotation_objects_to_table(self, truth_objs):\n\n # Ensure we have list of declaration and truth objects.\n if not isinstance(truth_objs, list):\n truth_objs = [truth_objs]\n\n # During loading, if validate_inputs=True, the DECL and ANNOT files are\n # read using our SCHEMA to ensure JSON validity. These generate a list\n # of decl_objs. If validate_inputs=False, we instead get a dictionary.\n # Not validating is MUCH faster for very large JSON files, so we handle\n # both cases with the is_dict flag.\n is_dict = False\n if isinstance(truth_objs[0], dict):\n is_dict = True\n\n uids = []\n frames = []\n bboxes = []\n ranges = []\n aspects = []\n classes = []\n user_datas = []\n\n # Throughout, use:\n # val = o['str'] if is_dict else o.str\n # to handle both input data types\n for obj in truth_objs:\n uid = obj[\"fileUID\"] if is_dict else obj.uid\n frame_annots = (\n obj[\"frameAnnotations\"] if is_dict else obj.frameAnnotations\n )\n\n for frame_str, frame_anno in frame_annots.items():\n frame_num = int(frame_str[1:])\n\n anno_vec = (\n frame_anno[\"annotations\"]\n if is_dict\n else frame_anno.annotations\n )\n num_anno = len(anno_vec)\n\n if num_anno > 0:\n uids.extend(num_anno * [uid])\n frames.extend(num_anno * [frame_num])\n\n if not is_dict:\n for a in anno_vec:\n classes.append(\n self.annotation_class_alias_dict.get(\n a.obj_class, a.obj_class\n )\n )\n bboxes.append(a.shape)\n ranges.append(a.range)\n aspects.append(a.aspect)\n user_datas.append(a.userData)\n else:\n for a in anno_vec:\n classes.append(\n self.annotation_class_alias_dict.get(\n a[\"class\"], a[\"class\"]\n )\n )\n bboxes.append(self._shape_to_object(a[\"shape\"]))\n # Some annotations may not have a range. For\n # example, for DEER and other animals the range to\n # the object is not known. If the 'range' field is\n # not present, we replace with None. The same is\n # true for aspect.\n ranges.append(a.get(\"range\", None))\n aspects.append(a.get(\"aspect\", None))\n user_datas.append(a.get(\"userData\", {}))\n\n return pd.DataFrame(\n {\n \"uid\": uids,\n \"frame\": frames,\n \"bbox\": bboxes,\n \"range\": ranges,\n \"aspect\": aspects,\n \"class\": classes,\n \"userData\": user_datas,\n }\n )", "def get_pmx_fiberassign_table(date, exp_id):\n filename = glob('/exposures/desi/{}/{:08d}/fiberassign*.fits'.format(\n date, exp_id))\n\n tab = None\n if filename:\n tab = Table.read(filename[0], hdu='FASSIGN')\n tab = tab['FIBER', 'TARGETID', 'LOCATION', 'FIBERSTATUS', 'TARGET_RA', 'TARGET_DEC', 'FIBERASSIGN_X', 'FIBERASSIGN_Y']\n return tab", "def summary_table(countries: List[str]):\n \n df_list = []\n \n for country in countries:\n acceleration_figures = acceleration(country)\n pop = COUNTRY_DATA[country]['population']\n df_list.append(\n [\n country,\n COUNTRY_DATA[country]['data'].confirmed[-1],\n int(acceleration_figures[0] * pop),\n COUNTRY_DATA[country]['data'].deaths[-1],\n int(acceleration_figures[1] * pop),\n ]\n )\n\n return df_list", "def process(self):\n dataobj = Data()\n targetmap = {}\n sta_indices = {}\n hdulist = self.hdulist\n # First get all the OI_TARGET, OI_WAVELENGTH and OI_ARRAY tables\n for hdu in hdulist:\n header = hdu.header\n data = hdu.data\n if hdu.name == \"OI_WAVELENGTH\":\n if dataobj.wavelength == None:\n dataobj.wavelength = {}\n insname = header[\"INSNAME\"]\n dataobj.wavelength[insname] = OI_WAVELENGTH(\n data.field(\"EFF_WAVE\"), data.field(\"EFF_BAND\")\n )\n elif hdu.name == \"OI_TARGET\":\n for row in data:\n target_id = row[\"TARGET_ID\"]\n target = OI_TARGET(\n target=row[\"TARGET\"],\n raep0=row[\"RAEP0\"],\n decep0=row[\"DECEP0\"],\n equinox=row[\"EQUINOX\"],\n ra_err=row[\"RA_ERR\"],\n dec_err=row[\"DEC_ERR\"],\n sysvel=row[\"SYSVEL\"],\n veltyp=row[\"VELTYP\"],\n veldef=row[\"VELDEF\"],\n pmra=row[\"PMRA\"],\n pmdec=row[\"PMDEC\"],\n pmra_err=row[\"PMRA_ERR\"],\n pmdec_err=row[\"PMDEC_ERR\"],\n parallax=row[\"PARALLAX\"],\n para_err=row[\"PARA_ERR\"],\n spectyp=row[\"SPECTYP\"],\n )\n dataobj.target = np.append(dataobj.target, target)\n targetmap[target_id] = target\n elif hdu.name == \"OI_ARRAY\":\n if dataobj.array == None:\n dataobj.array = {}\n arrname = header[\"ARRNAME\"]\n frame = header[\"FRAME\"]\n arrxyz = np.array(\n [header[\"ARRAYX\"], header[\"ARRAYY\"], header[\"ARRAYZ\"]]\n )\n dataobj.array[arrname] = OI_ARRAY(frame, arrxyz, stations=data)\n # Save the sta_index for each array, as we will need it\n # later to match measurements to stations\n sta_indices[arrname] = data.field(\"sta_index\")\n\n # Then get any science measurements\n for hdu in hdulist:\n header = hdu.header\n data = hdu.data\n if hdu.name in (\"OI_VIS\", \"OI_VIS2\", \"OI_T3\"):\n if \"ARRNAME\" in header.keys():\n arrname = header[\"ARRNAME\"]\n else:\n arrname = None\n if arrname and dataobj.array:\n array = dataobj.array[arrname]\n else:\n array = None\n wavelength = dataobj.wavelength[header[\"INSNAME\"]]\n if hdu.name == \"OI_VIS\":\n for row in data:\n date = header[\"DATE-OBS\"].split(\"-\")\n timeobs = datetime.datetime(\n int(date[0]), int(date[1]), int(date[2])\n ) + datetime.timedelta(seconds=np.around(row.field(\"TIME\"), 2))\n int_time = row.field(\"INT_TIME\")\n visamp = np.reshape(row.field(\"VISAMP\"), -1)\n visamperr = np.reshape(row.field(\"VISAMPERR\"), -1)\n visphi = np.reshape(row.field(\"VISPHI\"), -1)\n visphierr = np.reshape(row.field(\"VISPHIERR\"), -1)\n if \"CFLUX\" in row.array.names:\n cflux = np.reshape(row.field(\"CFLUX\"), -1)\n else:\n cflux = None\n if \"CFLUXERR\" in row.array.names:\n cfluxerr = np.reshape(row.field(\"CFLUXERR\"), -1)\n else:\n cfluxerr = None\n flag = np.reshape(row.field(\"FLAG\"), -1)\n ucoord = row.field(\"UCOORD\")\n vcoord = row.field(\"VCOORD\")\n target = targetmap[row.field(\"TARGET_ID\")]\n if array:\n sta_index = row.field(\"STA_INDEX\")\n s1 = array.station[sta_indices[arrname] == sta_index[0]][0]\n s2 = array.station[sta_indices[arrname] == sta_index[1]][0]\n station = [s1, s2]\n else:\n station = [None, None]\n dataobj.vis = np.append(\n dataobj.vis,\n OI_VIS(\n timeobs=timeobs,\n int_time=int_time,\n visamp=visamp,\n visamperr=visamperr,\n visphi=visphi,\n visphierr=visphierr,\n flag=flag,\n ucoord=ucoord,\n vcoord=vcoord,\n wavelength=wavelength,\n target=target,\n array=array,\n station=station,\n cflux=cflux,\n cfluxerr=cfluxerr,\n ),\n )\n elif hdu.name == \"OI_VIS2\":\n for row in data:\n date = header[\"DATE-OBS\"].split(\"-\")\n timeobs = datetime.datetime(\n int(date[0]), int(date[1]), int(date[2])\n ) + datetime.timedelta(seconds=np.around(row.field(\"TIME\"), 2))\n int_time = row.field(\"INT_TIME\")\n vis2data = np.reshape(row.field(\"VIS2DATA\"), -1)\n vis2err = np.reshape(row.field(\"VIS2ERR\"), -1)\n flag = np.reshape(row.field(\"FLAG\"), -1)\n ucoord = row.field(\"UCOORD\")\n vcoord = row.field(\"VCOORD\")\n target = targetmap[row.field(\"TARGET_ID\")]\n if array:\n sta_index = row.field(\"STA_INDEX\")\n s1 = array.station[sta_indices[arrname] == sta_index[0]][0]\n s2 = array.station[sta_indices[arrname] == sta_index[1]][0]\n station = [s1, s2]\n else:\n station = [None, None]\n dataobj.vis2 = np.append(\n dataobj.vis2,\n OI_VIS2(\n timeobs=timeobs,\n int_time=int_time,\n vis2data=vis2data,\n vis2err=vis2err,\n flag=flag,\n ucoord=ucoord,\n vcoord=vcoord,\n wavelength=wavelength,\n target=target,\n array=array,\n station=station,\n ),\n )\n elif hdu.name == \"OI_T3\":\n for row in data:\n date = header[\"DATE-OBS\"].split(\"-\")\n timeobs = datetime.datetime(\n int(date[0]), int(date[1]), int(date[2])\n ) + datetime.timedelta(seconds=np.around(row.field(\"TIME\"), 2))\n int_time = row.field(\"INT_TIME\")\n t3amp = np.reshape(row.field(\"T3AMP\"), -1)\n t3amperr = np.reshape(row.field(\"T3AMPERR\"), -1)\n t3phi = np.reshape(row.field(\"T3PHI\"), -1)\n t3phierr = np.reshape(row.field(\"T3PHIERR\"), -1)\n flag = np.reshape(row.field(\"FLAG\"), -1)\n u1coord = row.field(\"U1COORD\")\n v1coord = row.field(\"V1COORD\")\n u2coord = row.field(\"U2COORD\")\n v2coord = row.field(\"V2COORD\")\n target = targetmap[row.field(\"TARGET_ID\")]\n if array:\n sta_index = row.field(\"STA_INDEX\")\n s1 = array.station[sta_indices[arrname] == sta_index[0]][0]\n s2 = array.station[sta_indices[arrname] == sta_index[1]][0]\n s3 = array.station[sta_indices[arrname] == sta_index[2]][0]\n station = [s1, s2, s3]\n else:\n station = [None, None, None]\n dataobj.t3 = np.append(\n dataobj.t3,\n OI_T3(\n timeobs=timeobs,\n int_time=int_time,\n t3amp=t3amp,\n t3amperr=t3amperr,\n t3phi=t3phi,\n t3phierr=t3phierr,\n flag=flag,\n u1coord=u1coord,\n v1coord=v1coord,\n u2coord=u2coord,\n v2coord=v2coord,\n wavelength=wavelength,\n target=target,\n array=array,\n station=station,\n ),\n )\n return dataobj", "def add_crds_reffile_names(self):\n all_obs_info, unique_obs_info = self.info_for_all_observations()\n\n # Add empty placeholders for reference file entries\n empty_col = np.array([' ' * 500] * len(self.info['Instrument']))\n superbias_arr = deepcopy(empty_col)\n linearity_arr = deepcopy(empty_col)\n saturation_arr = deepcopy(empty_col)\n gain_arr = deepcopy(empty_col)\n distortion_arr = deepcopy(empty_col)\n photom_arr = deepcopy(empty_col)\n ipc_arr = deepcopy(empty_col)\n ipc_invert = np.array([True] * len(self.info['Instrument']))\n transmission_arr = deepcopy(empty_col)\n badpixmask_arr = deepcopy(empty_col)\n pixelflat_arr = deepcopy(empty_col)\n\n # Loop over combinations, create metadata dict, and get reffiles\n for status in unique_obs_info:\n updated_status = deepcopy(status)\n (instrument, detector, filtername, pupilname, readpattern, exptype) = status\n\n # Make sure NIRISS filter and pupil values are in the correct wheels\n if instrument == 'NIRISS':\n filtername, pupilname = utils.check_niriss_filter(filtername, pupilname)\n\n # Create metadata dictionary\n date = datetime.date.today().isoformat()\n current_date = datetime.datetime.now()\n time = current_date.time().isoformat()\n status_dict = {'INSTRUME': instrument, 'DETECTOR': detector,\n 'FILTER': filtername, 'PUPIL': pupilname,\n 'READPATT': readpattern, 'EXP_TYPE': exptype,\n 'DATE-OBS': date, 'TIME-OBS': time,\n 'SUBARRAY': 'FULL'}\n if instrument == 'NIRCAM':\n if detector in ['NRCA5', 'NRCB5', 'NRCALONG', 'NRCBLONG', 'A5', 'B5']:\n status_dict['CHANNEL'] = 'LONG'\n else:\n status_dict['CHANNEL'] = 'SHORT'\n if instrument == 'FGS':\n if detector in ['G1', 'G2']:\n detector = detector.replace('G', 'GUIDER')\n status_dict['DETECTOR'] = detector\n updated_status = (instrument, detector, filtername, pupilname, readpattern, exptype)\n\n # Query CRDS\n # Exclude transmission file for now\n files_no_transmission = list(CRDS_FILE_TYPES.values())\n files_no_transmission.remove('transmission')\n reffiles = crds_tools.get_reffiles(status_dict, files_no_transmission,\n download=not self.offline)\n\n # If the user entered reference files in self.reffile_defaults\n # use those over what comes from the CRDS query\n if self.reffile_overrides is not None:\n manual_reffiles = self.reffiles_from_dict(updated_status)\n\n for key in manual_reffiles:\n if manual_reffiles[key] != 'none':\n if key == 'badpixmask':\n crds_key = 'mask'\n elif key == 'pixelflat':\n crds_key = 'flat'\n elif key == 'astrometric':\n crds_key = 'distortion'\n else:\n crds_key = key\n reffiles[crds_key] = manual_reffiles[key]\n\n # Transmission image file\n # For the moment, this file is retrieved from NIRCAM_GRISM or NIRISS_GRISM\n # Down the road it will become part of CRDS, at which point\n if 'transmission' not in reffiles.keys():\n reffiles['transmission'] = get_transmission_file(status_dict)\n self.logger.info('Using transmission file: {}'.format(reffiles['transmission']))\n\n # Check to see if a version of the inverted IPC kernel file\n # exists already in the same directory. If so, use that and\n # avoid having to invert the kernel at run time.\n inverted_file, must_invert = SimInput.inverted_ipc_kernel_check(reffiles['ipc'])\n if not must_invert:\n reffiles['ipc'] = inverted_file\n reffiles['invert_ipc'] = must_invert\n\n # Identify entries in the original list that use this combination\n match = [i for i, item in enumerate(all_obs_info) if item==status]\n\n # Populate the reference file names for the matching entries\n superbias_arr[match] = reffiles['superbias']\n linearity_arr[match] = reffiles['linearity']\n saturation_arr[match] = reffiles['saturation']\n gain_arr[match] = reffiles['gain']\n distortion_arr[match] = reffiles['distortion']\n photom_arr[match] = reffiles['photom']\n ipc_arr[match] = reffiles['ipc']\n ipc_invert[match] = reffiles['invert_ipc']\n transmission_arr[match] = reffiles['transmission']\n badpixmask_arr[match] = reffiles['mask']\n pixelflat_arr[match] = reffiles['flat']\n\n self.info['superbias'] = list(superbias_arr)\n self.info['linearity'] = list(linearity_arr)\n self.info['saturation'] = list(saturation_arr)\n self.info['gain'] = list(gain_arr)\n self.info['astrometric'] = list(distortion_arr)\n self.info['photom'] = list(photom_arr)\n self.info['ipc'] = list(ipc_arr)\n self.info['invert_ipc'] = list(ipc_invert)\n self.info['transmission'] = list(transmission_arr)\n self.info['badpixmask'] = list(badpixmask_arr)\n self.info['pixelflat'] = list(pixelflat_arr)", "def list_of_calibs_for_run (ins, exp, runnum) :\n return dict_of_recs_for_run(ins, exp, runnum)['calibrations']", "def populate_images(self):\n print \"Populating images info...\"\n images = self.get_all_images()\n for i in images:\n\n associated_snapshots = self.get_snapshots_of(i)\n\n self.spreadsheet[i.id] = dict(name=i.name, Name_tag=self.get_name_tag(i), id=i.id,\n KEEP_tag=self.get_keep_tag(i), PROD_tag=self.is_production(i),\n region=i.region.name,\n created=i.creationDate,\n associated_snapshots=associated_snapshots,\n description=i.description)", "def exposure(frameType, expTime):\n\n blobEvent.clear() \n\n # set the specified frame type\n if frameType.lower() == 'light':\n ccd_frame[0].s = PyIndi.ISS_ON\n ccd_frame[1].s = PyIndi.ISS_OFF\n ccd_frame[2].s = PyIndi.ISS_OFF\n ccd_frame[3].s = PyIndi.ISS_OFF \n indiclient.sendNewSwitch(ccd_frame)\n elif frameType.lower() == 'bias':\n ccd_frame[0].s = PyIndi.ISS_OFF\n ccd_frame[1].s = PyIndi.ISS_ON\n ccd_frame[2].s = PyIndi.ISS_OFF\n ccd_frame[3].s = PyIndi.ISS_OFF \n indiclient.sendNewSwitch(ccd_frame)\n elif frameType.lower() == 'dark':\n ccd_frame[0].s = PyIndi.ISS_OFF\n ccd_frame[1].s = PyIndi.ISS_OFF\n ccd_frame[2].s = PyIndi.ISS_ON\n ccd_frame[3].s = PyIndi.ISS_OFF \n indiclient.sendNewSwitch(ccd_frame)\n elif frameType.lower() == 'flat':\n ccd_frame[0].s = PyIndi.ISS_OFF\n ccd_frame[1].s = PyIndi.ISS_OFF\n ccd_frame[2].s = PyIndi.ISS_OFF\n ccd_frame[3].s = PyIndi.ISS_ON \n indiclient.sendNewSwitch(ccd_frame)\n\n # set the value for the next exposure\n ccd_exposure[0].value=expTime\n\n indiclient.sendNewNumber(ccd_exposure)\n\n # wait for the exposure\n blobEvent.wait()\n\n for blob in ccd_ccd1:\n # pyindi-client adds a getblobdata() method to IBLOB item\n # for accessing the contents of the blob, which is a bytearray in Python\n image_data=blob.getblobdata()\n\n # write the byte array out to a FITS file\n global imgNum\n global imgName\n imgNum += 1\n fileName = fileDir+'raw-'+str(imgNum).zfill(8)+'.fits'\n f = open(fileName, 'wb')\n f.write(image_data)\n f.close()\n imgName = fileName\n \n return fileName", "def observation(self):\n return {\n name: np.asarray(\n self._env.observation(name), self._observation_spec[name].dtype)\n for name in self._obs_names\n }", "def _make_tables(self, df):\n # Time table:\n time_keys = ['time', 'endtime', 'event_number_nv']\n self.df_event_time = df.loc[:, time_keys]\n\n # Properties tables:\n pos_keys = ['angle', 'pos_x', 'pos_x_spread', 'pos_y',\n 'pos_y_spread', 'pos_z', 'pos_z_spread']\n self.df_event_position = df.loc[:, pos_keys]\n\n keys = df.columns.values\n keys = [k for k in keys if k not in time_keys + pos_keys]\n self.df_event_properties = df.loc[:, keys]\n\n # Table panels:\n index = self.evt_sel_slid.value\n self.time_table = pn.panel(self.df_event_time.loc[index],\n )\n self.pos_table = pn.panel(self.df_event_position.loc[index:index, :],\n sizing_mode='scale_width')\n\n self.prop_table = pn.panel(self.df_event_properties.loc[index:index, :],\n sizing_mode='scale_width')", "def get_exposure(exposure_id,b_mean,b_sd,c_mean,c_sd,non_rate,dist_type,mortalities):#id in db\n\te_id \t\t= int(long(exposure_id))\n\texposure_outcomes = DBHelper.exposure_outcome\n\toutcome_ids \t= DBHelper.exposure_outcome.get(e_id)\n\n\tsamples_rr \t= DBHelper.samples_rr.get(e_id)\n\tsamples_pop \t= DBHelper.samples_pop.get(e_id)\n\trisks \t\t= DBHelper.risks.get(e_id)\n\tmeasure \t= DBHelper.measures.get(e_id)\n\tdist_type \t= get_dist_type(e_id)\n\n\t#get population distribution \n\tpopDistribution = PopDistribution(DBHelper.age_group_num,non_rate,b_mean,b_sd,c_mean,c_sd,samples_pop,dist_type)\n\n\t#get outcomes\n\toutcomes = []\n\tfor o_id in outcome_ids:\n\t\t# mortality\n\t\tm_mortality = mortalities.get(2*o_id)\n\t\tf_mortality = mortalities.get(2*o_id+1)\n\t\t# risks\n\t\tm_risks = risks.get(2*o_id)\n\t\tf_risks = risks.get(2*o_id+1)\n\t\t# outcome name\n\t\tname = DBHelper.get_outcome_name(o_id)\n\t\t# limit estimates\n\t\tlle = DBHelper.exposure_outcome.get(e_id).get(o_id)[0]\n\t\tule = DBHelper.exposure_outcome.get(e_id).get(o_id)[1]\n\t\t# outcome\n\t\toutcome = PrimeOutcome(name,o_id,m_mortality,f_mortality,samples_rr,m_risks,f_risks,lle,ule,measure,e_id) \n\t\toutcomes.append(outcome)\n\n\texposure = PrimeExposure(mortalities,outcome_ids,samples_rr,samples_pop,outcomes,popDistribution)\n\treturn exposure", "def __init__(self, mpcobslist):\n\n # Creating a list of dictionaries. Each dictionary holds information for an observatory\n try:\n tmp=pd.read_fwf(mpcobslist,widths=[4,10,9,9])\n except:\n sys.exit(\"Unable to load MPC Observatory list: %s\" %(mpcobslist))\n \n self.obsdict=list(tmp.T.to_dict().values())\n\n count=0\n for i in self.obsdict:\n\n # Computing cartesian coordinates of observatories in Earth frame \n rcos=i['cos']\n rsin=i['sin']\n long=np.radians(i['Long.'])\n x=rcos*np.cos(long)*shared.REarth\n y=rcos*np.sin(long)*shared.REarth\n z=rsin*shared.REarth\n\n # Inserting cartesian x, y, z, into list of dictionaries.\n self.obsdict[count]['x']=x\n self.obsdict[count]['y']=y\n self.obsdict[count]['z']=z \n\n count +=1", "def get_stats(evts_perigee) -> Table:\n rows = []\n\n for evt in reversed(evts_perigee):\n rows.append(evt.info)\n\n out = Table(rows=rows)\n return out", "def expected_exposure_list(telescope_name, year=None, month=None, \n night=None):\n exposure_list = []\n if year is None:\n year_range = (2012, 2013)\n else:\n year_range = (year,)\n for year_int in year_range:\n if month is None:\n month_range = range(1, 13)\n else:\n month_range = (month,)\n for month_int in month_range:\n if night is None:\n night_range = (1, monthrange(year_int, month_int)[1])\n else:\n night_range = (night,)\n for night_int in night_range:\n for run_number in (1, 2):\n exposure_list.append(\n '<Exposure: %s %04i/%02i/%02i run %i>' %\n (telescope_name, year_int, month_int, night_int, \n run_number))\n return exposure_list", "def make_point_source_list(output_file, exp_time, fov, sky_center,\r\n area=40000.0, prng=None):\r\n ra0, dec0, fluxes, ind = generate_sources(exp_time, fov, sky_center,\r\n area=area, prng=prng)\r\n\r\n t = Table([ra0, dec0, fluxes, ind],\r\n names=('RA', 'Dec', 'flux_0.5_2.0_keV', 'index'))\r\n t[\"RA\"].unit = \"deg\"\r\n t[\"Dec\"].unit = \"deg\"\r\n t[\"flux_0.5_2.0_keV\"].unit = \"erg/(cm**2*s)\"\r\n t[\"index\"].unit = \"\"\r\n t.write(output_file, format='ascii.ecsv', overwrite=True)", "def make_doi_table(dataset: ObservatoryDataset) -> List[Dict]:\n\n records = []\n for paper in dataset.papers:\n # Doi, events and grids\n doi = paper.doi.upper()\n events = make_doi_events(doi, paper.events)\n\n # Affiliations: institutions, countries, regions, subregion, funders, journals, publishers\n institutions = make_doi_institutions(paper.authors)\n countries = make_doi_countries(paper.authors)\n regions = make_doi_regions(paper.authors)\n subregions = make_doi_subregions(paper.authors)\n funders = make_doi_funders(paper.funders)\n journals = make_doi_journals(paper.in_unpaywall, paper.journal)\n publishers = make_doi_publishers(paper.publisher)\n\n # Make final record\n records.append(\n {\n \"doi\": doi,\n \"crossref\": {\n \"type\": paper.type,\n \"title\": paper.title,\n \"published_year\": paper.published_date.year,\n \"published_month\": paper.published_date.month,\n \"published_year_month\": f\"{paper.published_date.year}-{paper.published_date.month}\",\n \"funder\": [{\"name\": funder.name, \"DOI\": funder.doi} for funder in paper.funders],\n },\n \"unpaywall\": {},\n \"unpaywall_history\": {},\n \"open_citations\": {},\n \"events\": events,\n \"affiliations\": {\n \"doi\": doi,\n \"institutions\": institutions,\n \"countries\": countries,\n \"subregions\": subregions,\n \"regions\": regions,\n \"groupings\": [],\n \"funders\": funders,\n \"authors\": [],\n \"journals\": journals,\n \"publishers\": publishers,\n },\n }\n )\n\n # Sort to match with sorted results\n records.sort(key=lambda r: r[\"doi\"])\n\n return records", "def Summary(row_dicts,domain=\"$(dom)\",vtype=\"temp\",aspectratio=0.65):\n screen = EdmObject(\"Screen\")\n if vtype == \"motor\" or vtype == \"eloss\":\n table = EdmTable(yborder=0, xjustify=\"c\", yjustify=\"c\")\n else:\n table = EdmTable()\n screen.addObject(table)\n if vtype == \"temp\":\n headerText = \"Temperature Summary\"\n elif vtype ==\"flow\":\n headerText = \"Water Flow Summary\"\n elif vtype == \"motor\":\n headerText = \"Motion Summary\"\n else:\n headerText = \"Eloss Summary\"\n done_devices = []\n if vtype == \"eloss\":\n nvtype = \"NMOTOR\"\n else:\n nvtype = \"N\"+vtype.upper()\n totalv = 0\n maxv = 0\n nvtypev = 0\n height = 0\n init_flag = True\n \n # find the table height in number of blocks\n for dict in row_dicts:\n totalv = totalv + int(dict[nvtype])\n maxv = max(maxv,int(dict[nvtype]))\n height = max(int(totalv**aspectratio)+2,maxv+2)\n\n for dict in row_dicts:\n # set the number of cells to be filled in\n nvtypev = int(dict[nvtype])\n if nvtypev > 0:\n p = dict[\"P\"]\n skip_list = []\n # a flow can be used for more than one device. Only show it for\n # the first device it appears in\n for i in range(int(dict[\"NFLOW\"])):\n wn = dict[\"W\"]+dict[\"W\"+str(i+1)]\n if wn in done_devices:\n skip_list.append(wn)\n else:\n done_devices.append(wn)\n # only add device text if there are more cells to write\n if vtype !=\"flow\" or not len(skip_list) == int(dict[\"NFLOW\"]):\n i = 1 \n if init_flag:\n # don't make an extra cell at the start\n init_flag = False\n else:\n # if there is no room in current column, force a new one\n table.nextCell(max_y=height-nvtypev-1)\n # write the device header\n dfilename = dict[\"FILE\"] \n if can_optimise(dfilename):\n dfilename=dict[\"P\"]+\"-device-screen-0.edl\"\n if vtype==\"motor\":\n xs = 110\n ob = shell_visible(0,0,xs,20,\"Home \"+dict[\"NAME\"],\n 'gnome-terminal --disable-factory --hide-menubar -t \"Home %s\" -e \"$(dom)-motorhome.py %s\"'\n %(dict[\"NAME\"],dict[\"NAME\"]) )\n table.addObject(ob,xoff=xs)\n xoff = -xs \n else:\n xoff = 0\n xs = 90\n table.addObject(rd(0,0,xs,20,dfilename,dict[\"EDM_MACROS\"]),xoff=xoff)\n table.addObject(tooltip(0,0,xs,20,dict[\"DESCRIPTION\"]),xoff=xoff)\n lab = label(0,0,xs,20,dict[\"NAME\"],\"center\")\n lab[\"font\"]=quoteString(\"arial-bold-r-14.0\")\n table.addObject(lab,xoff=xoff) \n table.nextCell()\n # write the cells\n while not i > nvtypev:\n if vtype==\"temp\":\n ob = embed(0,0,90,20,\"BLGui-temp-embed\",\"label=T\"+\\\n str(i)+\",temp=\"+p+dict[\"T\"+str(i)]+\",P=\"+\\\n dict[\"P\"])\n elif vtype==\"flow\":\n ob = embed(0,0,90,20,\"BLGui-flow-embed\",\"flow=\"+\\\n dict[\"W\"]+dict[\"W\"+str(i)]+\",label=Flow \"+\\\n str(i)+\",P=\"+dict[\"P\"])\n elif vtype==\"eloss\":\n # Strip off the colon from the motor name\n elossLabel = dict[\"M\"+str(i)]\n elossLabel = elossLabel[1:]\n ob = embed(0, 0, 149, 22, \"BLGui-elossSummary-embed\", \"motor=\"+\\\n dict[\"P\"]+dict[\"M\"+str(i)]+\",label=\"+\\\n elossLabel)\n else:\n # Strip off the colon from the motor name\n motorLabel = dict[\"M\"+str(i)]\n motorLabel = motorLabel[1:]\n ob = embed(0,0,223,22,\"BLGui-motorSummary-embed\",\"motor=\"+\\\n dict[\"P\"]+dict[\"M\"+str(i)]+\",label=\"+\\\n motorLabel)\n table.addObject(ob)\n table.nextCell()\n i += 1\n\n # create screen\n if vtype==\"motor\":\n ob = embed(0,0,223,22,\"BLGui-motor-key\",\"a=b\")\n table.addObject(ob)\n elif vtype==\"eloss\":\n ob = embed(0,0,156,22,\"BLGui-eloss-key\",\"a=b\")\n table.addObject(ob)\n elif vtype==\"temp\":\n bms_lines = open(\"/dls_sw/prod/etc/init/BMS_pvs.csv\").readlines()\n ids = {}\n for line in bms_lines:\n split = line.split(\"|\")\n # id, desc, ....., pv \n if len(split) > 3 and domain.replace(\"BL\", \"SV\") in split[-1]:\n ids[split[0].strip('\"')] = split[1].strip('\"')\n for i, (id, desc) in enumerate(ids.items()):\n if len(ids) > 1:\n txt = \"BMS%d\" % (i+1)\n else:\n txt = \"BMS\" \n ob = rd_visible(0,0,90,20,txt,\"DLS_dev%s.edl\" % id)\n ob[\"fgColor\"] = ob.Colour[\"Related display\"]\n table.addObject(ob)\n table.nextCell()\n else:\n interlock = rd_visible(0,0,90,20,\"Interlocks\",domain+\"-interlocks\")\n interlock[\"fgColor\"] = interlock.Colour[\"Related display\"]\n table.addObject(interlock)\n screen.autofitDimensions()\n table.ungroup()\n Titlebar(screen,button=\"text\",button_text=domain,header=\"text\",\\\n header_text=headerText,tooltip=\"generic-tooltip\",title=headerText)\n return screen", "def make_individual_agents_2016(self):\r\n for hh_row in agents: # agents is a list of ints 1-94 from excel_import\r\n individual_id_list = return_values(hh_row, 'name')\r\n hh_id = return_values(hh_row, 'hh_id')\r\n self.hh_id = hh_id\r\n agelist = return_values(hh_row, 'age') # find the ages of people in hh\r\n genderlist = return_values(hh_row, 'gender')\r\n marriagelist = return_values(hh_row, 'marriage')\r\n educationlist = return_values(hh_row, 'education')\r\n income_local_off_farm = float(return_values(hh_row, 'income_local_off_farm'))\r\n income_local_off_farm_list[hh_row - 1] = income_local_off_farm\r\n household_income_list[hh_row - 1] = household_income_list[hh_row - 1] + income_local_off_farm\r\n if individual_id_list is not None and individual_id_list is not []:\r\n for i in range(len(individual_id_list)):\r\n self.individual_id = str(self.hh_id) + str(individual_id_list[i]) # example: 2c\r\n self.age = int(agelist[i])\r\n # if genderlist is not None and genderlist is not []:\r\n self.gender = int(genderlist[i])\r\n try:\r\n self.education = educationlist[i]\r\n except:\r\n self.education = 0\r\n self.marriage = marriagelist[i]\r\n IndividualAgent.create_initial_migrant_list(self, hh_row)\r\n self.age_at_step_0 = self.age\r\n self.income_local_off_farm = return_values(self.hh_row, 'income_local_off_farm')\r\n ind = IndividualAgent(hh_row, self, self.hh_id, self.individual_id, self.age, self.gender,\r\n self.education, self.marriage, self.past_hh_id, self.non_gtgp_area,\r\n self.step_counter, self.age_at_step_0, self.income_local_off_farm)\r\n self.schedule.add(ind)", "def buildObservationSpace(self):\n self.observation_space = Dict({\n \"system_action\": Discrete(len(self.AGENT_TYPES)), \n \"user_action\": Discrete(len(self.USER_TYPES)), \n \"function_specified\": Discrete(2),\n \"dont_know\": Discrete(2),\n # \"command_ignored\": Discrete(2),\n \"turns\": Discrete(self.MAX_TURNS+1),\n \"results\": Box(low=np.zeros(self.dataset.getDatabaseSize()), high=np.ones(self.dataset.getDatabaseSize())),\n })\n self.observation_space.shape = (flatdim(self.observation_space),)", "def exposuretimes(self):\n all = self.allexposuretimes\n return [all[layer-1] for layer in self.__layers]", "def run(self):\r\n #print 'WriteFITS_IDI.run'\r\n\r\n # construct the name of the file\r\n readfits = self.previous_results['readfits']\r\n obs_date = readfits['obs date']\r\n idifitsfile = '%s.idi.fits' % obs_date\r\n\r\n configxml = 'firi.xml'\r\n\r\n # midnight on date to Julian day\r\n obs_date_midnight = astro_time.Time('%s-%s-%sT00:00:00' %\r\n (obs_date[:4], obs_date[4:6], obs_date[6:8]), format='isot')\r\n obs_date_midnight = obs_date_midnight.jd\r\n\r\n rdate = astro_time.Time(obs_date_midnight, format='jd',\r\n out_subfmt='date')\r\n rdate = rdate.iso\r\n\r\n # number of days after midnight at obs start\r\n obs_date_time = astro_time.Time('%s-%s-%s:%s:%s' %\r\n (obs_date[:4], obs_date[4:6], obs_date[6:11], obs_date[11:13],\r\n obs_date[13:]), format='isot')\r\n obs_date_time = obs_date_time.jd - obs_date_midnight\r\n\r\n # get specific items from the results that will be need in\r\n # the reduction\r\n reduce_interferogram = self.previous_results['reduceinterferogram']\r\n data_quality = reduce_interferogram['data_quality']\r\n scan_uvspectra = reduce_interferogram['scan_uvspectra']\r\n\r\n wavenumber = scan_uvspectra[0].wavenumber\r\n\r\n # construct lists of the values to be stored in each Table column\r\n n_uvspectra = max(scan_uvspectra.keys()) + 1\r\n mcomplex = 3\r\n mstokes = 1\r\n mfreq = len(wavenumber)\r\n mra = 1\r\n mdec = 1\r\n\r\n uv_data = np.zeros([n_uvspectra, mdec, mra, mfreq, mstokes, mcomplex])\r\n u = np.zeros([n_uvspectra])\r\n v = np.zeros([n_uvspectra])\r\n w = np.zeros([n_uvspectra])\r\n dates = np.zeros([n_uvspectra])\r\n times = np.zeros([n_uvspectra])\r\n baselines = np.zeros([n_uvspectra], dtype=np.int)\r\n freqid = np.ones([n_uvspectra], dtype=np.int)\r\n\r\n for k,val in scan_uvspectra.items():\r\n uv_data[k,0,0,:,0,0] = val.spectrum.real\r\n uv_data[k,0,0,:,0,1] = val.spectrum.imag\r\n uv_data[k,0,0,:,0,2] = np.ones(val.spectrum.real.shape)\r\n u[k] = np.mean(val.baseline_x)\r\n v[k] = np.mean(val.baseline_y)\r\n w[k] = np.mean(val.baseline_z)\r\n dates[k] = obs_date_midnight\r\n times[k] = obs_date_time + (np.mean(val.time) / (3600 * 24))\r\n baselines[k] = 258\r\n\r\n # external_params is referred to inside config.xml and can be\r\n # used to set parameters there\r\n light_speed = constants.c.to('m/s').value\r\n external_params = {'NCHAN':len(wavenumber),\r\n 'RDATE':rdate,\r\n 'REF_FREQ':0.0 * 100 * light_speed,\r\n 'CHAN_BW':np.abs(wavenumber[1] - wavenumber[0]) * \\\r\n 100 * light_speed}\r\n\r\n print \"Out: %s\\nConfig: %s\"%(idifitsfile, configxml)\r\n\r\n print('\\nConfiguring Array geography')\r\n print('--------------------------')\r\n # Meaningless numbers, hopefully not needed by any CASA method \r\n # that we want to use\r\n (latitude, longitude, elevation) = ('00:00:00.00', '00:00:00.00', 0)\r\n now = datetime.datetime.now()\r\n\r\n # Make ourselves an Array (pyEphem observer)\r\n array_geometry_m = np.array([\r\n [0.0, 0.0, 0.0],\r\n [0.0, 80.0, 0.0]], dtype = 'float32')\r\n beach = Array(lat=latitude, long=longitude, elev=elevation, date=now,\r\n antennas=array_geometry_m)\r\n\r\n print('\\nConfiguring phase source')\r\n print('--------------------------')\r\n # The source is our phase centre for UVW coordinates\r\n line = \"%s,f,%s,%s,%s,%d\" % ('Deep Space', '00:00:00',\r\n '00:00:00', '1', 2000)\r\n source = ephem.readdb(line)\r\n source.compute(beach)\r\n print \"Name: %s \\nRA: %s \\nDEC: %s\"%(source.name, source.ra, source.dec)\r\n\r\n # Make a new blank FITS HDU\r\n print('\\nCreating PRIMARY HDU')\r\n print('------------------------------------')\r\n hdu = make_primary(config=configxml, external_params=external_params)\r\n print repr(hdu.header)\r\n\r\n # Go through and generate required tables\r\n print('\\nCreating ARRAY_GEOMETRY')\r\n print('------------------------------------')\r\n tbl_array_geometry = make_array_geometry(config=configxml, num_rows=2,\r\n external_params=external_params)\r\n tbl_array_geometry = config_array_geometry(tbl_array_geometry,\r\n array_geometry_m)\r\n print repr(tbl_array_geometry.header)\r\n\r\n print('\\nCreating FREQUENCY')\r\n print('------------------------------------')\r\n tbl_frequency = make_frequency(config=configxml, num_rows=1,\r\n external_params=external_params)\r\n tbl_frequency = config_frequency(tbl_frequency,\r\n external_params=external_params)\r\n print repr(tbl_frequency.header)\r\n\r\n print('\\nCreating SOURCE')\r\n print('------------------------------------')\r\n tbl_source = make_source(config=configxml, num_rows=1,\r\n external_params=external_params)\r\n tbl_source = config_source(tbl_source, source)\r\n print repr(tbl_source.header)\r\n\r\n print('\\nCreating ANTENNA')\r\n print('------------------------------------')\r\n tbl_antenna = make_antenna(config=configxml, num_rows=2,\r\n external_params=external_params)\r\n tbl_antenna = config_antenna(tbl_antenna)\r\n print repr(tbl_antenna.header)\r\n\r\n print('\\nCreating UV_DATA')\r\n print('------------------------------------')\r\n\r\n print 'Data dimensions: %i dumps, %i chans, %i pols, %i data' % (\r\n n_uvspectra, mfreq, mstokes, mcomplex)\r\n\r\n print('Generating blank UV_DATA rows...')\r\n tbl_uv_data = make_uv_data(config=configxml, num_rows=n_uvspectra,\r\n external_params=external_params)\r\n\r\n timesorted = np.argsort(times)\r\n\r\n for k in timesorted:\r\n tbl_uv_data.data[k]['FLUX'] = uv_data[k,0,0,:,0,:].ravel()\r\n tbl_uv_data.data[k]['UU'] = u[k] / light_speed\r\n tbl_uv_data.data[k]['VV'] = v[k] / light_speed\r\n tbl_uv_data.data[k]['WW'] = w[k] / light_speed\r\n tbl_uv_data.data[k]['BASELINE'] = baselines[k]\r\n tbl_uv_data.data[k]['DATE'] = dates[k]\r\n tbl_uv_data.data[k]['TIME'] = times[k]\r\n tbl_uv_data.data[k]['SOURCE'] = 1\r\n tbl_uv_data.data[k]['FREQID'] = 1\r\n tbl_uv_data.data[k]['INTTIM'] = 3\r\n\r\n print repr(tbl_uv_data.header)\r\n \r\n hdulist = pyfits.HDUList(hdus=\r\n [hdu,\r\n tbl_array_geometry,\r\n tbl_source, \r\n tbl_frequency,\r\n tbl_antenna,\r\n tbl_uv_data])\r\n\r\n print('Verifying integrity...') \r\n hdulist.verify()\r\n \r\n if(os.path.isfile(idifitsfile)):\r\n print('Removing existing file...')\r\n os.remove(idifitsfile)\r\n print('Writing to file...')\r\n hdulist.writeto(idifitsfile)\r\n\r\n print('Done.')\r\n\r\n self.result['idifitsfile'] = idifitsfile\r\n\r\n return self.result", "def make_obstab_era5fb_dic(self, dataset = '' , date_time = '', File = ''):\n index_offset = self.unique_dates[dataset][File]['index_offset']\n \n # Removing the index_offset, which is defined only if any slicing was done \n index = self.unique_dates[dataset][File]['indices'][date_time]['low'] - index_offset\n index_up = self.unique_dates[dataset][File]['indices'][date_time]['up'] - index_offset\n \n obs_dic = {} \n for v in self.observations_table_vars:\n obs_dic[v] = data[dataset][File]['observations_table'][v][index:index_up]\n #print('v is : ', v )\n\n \"\"\" Loop over the obs_tab to find duplicates.\n I fill a dictionary for each distinct pressure level, and I put inside\n the observed_variable number.\n If the list lready contains the combination pressure level - observed variable,\n then the record is skipped \"\"\"\n\n indices = [] # these are the only non-duplicates to be kept\n\n already_selected = { }\n \n #print('starting the loop: ' , date_time, ' ' , dataset, ' ', index, ' ' , index_up)\n for p,var,val,ind in zip ( obs_dic['z_coordinate'] , obs_dic['observed_variable'],obs_dic['observation_value'] ,range(len(obs_dic['z_coordinate'])) ):\n #print(p,var,val,ind)\n #if date_time > 2354300000:\n # print('looping :::', var, ' ' , val, ' ' , ind , ' ', dataset, ' ' , index_up, ' ' , index, ' ', File)\n \n if self.only_std_plevels:\n if p not in self.std_plevs:\n continue \n\n \n if p not in already_selected.keys():\n already_selected[p] = []\n \n \n if np.isfinite(val):\n if var not in already_selected[p]:\n already_selected[p].append(var)\n indices.append(ind) # record to be kept\n else:\n pass\n else: # skipping nans\n pass\n\n #print('done with the loop')\n red_obs_dic = {} # dictionary for the reduced (removed duplicates) obs_tab\n for v in self.observations_table_vars:\n red_obs_dic[v] = obs_dic[v][indices]\n\n ''' Simply returns the proper format for ''null' value '''\n def get_null( tipo = ''):\n if tipo == np.int32 :\n void = 0\n elif tipo == np.float32 :\n void = 0.0\n elif tipo == np.bytes_ :\n void = b'nan'\n return void\n \n ''' Filling the feedback table. Only feednack for era5_1 and era5_2 are currently available. \n Reads the total number of possible columns from the dic_type_attributes dictionary.\n Era5_1 and era5_2 fb have different columns.\n If data for a variable is not available, it fills with the appropriate null value '''\n \n #print('making the era5fb ', date_time, ' ' , dataset)\n red_era5fb_dic = {}\n for v in self.era5fb_columns:\n tipo = self.dic_type_attributes['era5fb'][v]['type'] \n if dataset == 'era5_1' or dataset == 'era5_2':\n if v in data[dataset][File]['era5fb_tab'].keys(): \n red_era5fb_dic[v] = data[dataset][File]['era5fb_tab'][v][index:index_up][indices]\n else:\n void = get_null(tipo = tipo)\n red_era5fb_dic[v]= np.full(len(indices), void) \n else: # no feedback for non era%-1 or era5_2 datasets \n void = get_null(tipo = tipo)\n red_era5fb_dic[v]= np.full(len(indices), void)\n \n #print('done making_obstab_era5fb')\n \"\"\"\n try:\n if len(red_obs_dic['date_time']) > 2:\n print('yes')\n else:\n print('check') \n except:\n print('check')\n \"\"\" \n return red_obs_dic , red_era5fb_dic", "def exper(self):\n self.dbase.pop('time') # since we do not want the time data to be included in our calculation we drop it out.\n ind=list(zip(self.start, self.stop)) # here I recomend to Google; 'zip , list python' to understand what is going on :)\n Experiments={} # assigning a local dictionary variable\n for x in range(self.NrExperiments):\n Experiments[\"Experiment{0}\".format(x)]=[] # this two lines creates keys for each experment. For each experiment we are going to collect mean and Std\n \n # next passage is a little bit harsh to digest att once \n for i in range(self.NrExperiments): # we are looping n-times n=number of experiments\n for key in sorted(self.dbase.keys()): # every time we are going through each key of the dictionary with the data\n if len(key) == 2: # we check how the key looks like . If you remmember the first number of the key correspons to the row at which cells is, and \n # the second part of the key corresponds to the column the cell is comming from . For example key = '32' tells you row = 3 , column = 2\n if int(key[0]) in list(range(ind[i][0],ind[i][1])) and key[1]==str(self.col): # here we check if the first number of the key (key[0])is in the range of stat-stop row and att the same time \n # att which column key[1]. If it is in the searched column and rows we append it to the expriment of interest\n Experiments[\"Experiment{0}\".format(i)].append(self.dbase[key])\n else:\n if int(key[0]) in list(range(ind[i][0],ind[i][1])) and key[1]+key[2] ==str(self.col): # WE have columns 10, 11, 12 wich have key like for ex. key = '212' , which tells you row= 2, column = '12'\n Experiments[\"Experiment{0}\".format(i)].append(self.dbase[key]) # this is the same as above\n \n self.ListExperiments.append(np.array(Experiments[\"Experiment{0}\".format(i)])) # we collect at the end all data for our experiments in a final list 'ListExperiments'\n return self.ListExperiments", "def buildInstrumentTable(inst):\n names = [\"\" for k in inst.devices]\n for k,v in inst.devices.items():\n names[v] = k\n for n in names:\n # If any of the device names is null, then the indices are screwed up\n if n==\"\":\n print 'ERROR: Device indices screwed up for instrument',inst.name\n sys.exit(1)\n # Make an array of zeros to build the [xy]Min/Max columns\n z = [0. for n in names]\n hdu = pf.BinTableHDU.from_columns(\\\n pf.ColDefs( [pf.Column(name='NAME',format=py_to_fits(names),array=names),\n pf.Column(name='XMIN',format=py_to_fits(z),array=z),\n pf.Column(name='XMAX',format=py_to_fits(z),array=z),\n pf.Column(name='YMIN',format=py_to_fits(z),array=z),\n pf.Column(name='YMAX',format=py_to_fits(z),array=z)]),\n name='Instrument')\n hdu.header['NAME'] = inst.name\n if inst.band is None:\n hdu.header['BAND'] = inst.name\n else:\n hdu.header['BAND'] = inst.band\n hdu.header['NUMBER'] = inst.index\n #hdu.header['EXTNAME'] = 'Instrument'\n return hdu", "def observation_spec(self) -> Dict[str, Any]:", "def _get_observation(self):\n return []", "def get_importance_dataframe(self,obslist_dict=None):\n self.log(\"calculating importance of observations\")\n if obslist_dict is None:\n obs = self.pst.observation_data.loc[:,[\"obsnme\",\"weight\"]]\n obslist_dict = {}\n for o, w in zip(obs.obsnme,obs.weight):\n if w > 0:\n obslist_dict[o] = [o]\n\n results = {}\n names = [\"base\"]\n for forecast,pt in self.posterior_forecast.items():\n results[forecast] = [pt]\n for case_name,obs_list in obslist_dict.items():\n names.append(case_name)\n self.log(\"calculating contribution from: \" + str(obs_list) + '\\n')\n case_post = self.__importance_of_observations(obs_list)\n self.log(\"calculating contribution from: \" + str(obs_list) + '\\n')\n for forecast,pt in case_post.items():\n results[forecast].append(pt)\n df = pd.DataFrame(results,index=names)\n self.log(\"calculating importance of observations\")\n return df", "def _table(self, images):\n header = ['Name', 'Pulled', 'Saved']\n data = []\n for item in images:\n if item not in self._missing:\n data.append((item, True, True if self._save else 'N/A'))\n else:\n data.append((item, self._missing[item]['pulled'], self._missing[item]['saved']))\n return self._check_table(header, {'Name': 'l'}, data)", "def add_environment_toTable(tab, in_ds, envvar_take=[],tabvar_skip=[], rainvar_name=None, env_tformat=\"%Y-%m-%d %H:%M:%S\", env_hour=12):\n\n tab_tformat = \"%Y-%m-%d_%H:%M\"\n dic = {}\n for k in tab.keys():\n if k in tabvar_skip: # option to add variables to be excluded from environment table\n continue\n dic[k] = tab[k]\n\n ds = in_ds\n envdates = pd.to_datetime(ds.time.dt.floor('T'), format=env_tformat)\n ###### sample variables\n for tlat, tlon, date, mask, tir in zip(dic['tminlat'], dic['tminlon'], dic['date'], dic['cloudMask'], dic['tir']):\n\n\n #save cloud-wide rainfall stats, and rainfall maximum at ~0.15deg\n if rainvar_name is not None:\n tabdate = pd.to_datetime(date, format=tab_tformat) # rainfall sampling same time as TIR\n\n pos = envdates == tabdate\n rain = ds[rainvar_name].isel(time=pos).where(mask).squeeze() # to mm/h\n pmax_pos = np.nanargmax(rain.values)\n ppos_2d = np.unravel_index(pmax_pos, rain.shape)\n pmax_lon = rain.lon[ppos_2d[1]]\n pmax_lat = rain.lat[ppos_2d[0]]\n pmax = rain.sel(lon=slice(pmax_lon - 0.075, pmax_lon + 0.075), lat=slice(pmax_lat - 0.075, pmax_lat + 0.075))\n # ipdb.set_trace()\n pmax = pmax.mean().values\n if (rainvar_name + '_mean') not in dic.keys():\n for tag in ['_mean', '_max', '_p95', '_p99']:\n dic[rainvar_name + tag] = []\n dic[rainvar_name + '_mean'].append(float(rain.mean().values)) # full cloud mean\n dic[rainvar_name + '_max'].append(float(pmax)) # ~0.15deg rain max\n dic[rainvar_name + '_p95'].append(float(rain.quantile(0.95).values))\n dic[rainvar_name + '_p99'].append(float(rain.quantile(0.99).values))\n\n ## save mean environments at ~0.7deg centred on location of minimum storm temperature\n if len(envvar_take) > 0:\n tabdate = pd.to_datetime(date, format=tab_tformat).replace(hour=env_hour, minute=0) # hour of environment sampling\n pos = envdates == tabdate\n single = ds.isel(time=pos).sel(longitude=slice(tlon - 0.375, tlon + 0.375), latitude=slice(tlat - 0.375, tlat + 0.375))\n # ipdb.set_trace()\n single = single.mean()\n for vt in envvar_take:\n if vt in dic.keys():\n dic[vt].append(single[vt].values)\n else:\n dic[vt] = [single[vt].values]\n return dic", "def _get_observations(self):\n food = np.array(self.game.state.data.food.data)\n walls = np.array(self.game.state.data.layout.walls.data)\n map_shape = walls.shape\n capsules = self.game.state.data.capsules\n pacman_pos = self.game.state.data.agentStates[0].configuration.pos\n\n gosts_pos = list(map(lambda agent: agent.configuration.pos,\n self.game.state.data.agentStates[1:]))\n gosts_scared = list(\n map(lambda agent: agent.scaredTimer > 0, self.game.state.data.agentStates[1:]))\n\n \"\"\"\n 0: empty,\n 1: wall,\n 2: food,\n 3: capsules,\n 4: ghost,\n 5: scared ghost,\n 6: pacman\n \"\"\"\n\n view_slices = ((max(pacman_pos[0]-self.view_distance[0], 0), min(pacman_pos[0]+self.view_distance[0]+1, map_shape[0])),\n (max(pacman_pos[1]-self.view_distance[1], 0), min(pacman_pos[1]+self.view_distance[1]+1, map_shape[1])))\n\n def select(l):\n return l[view_slices[0][0]:view_slices[0][1], view_slices[1][0]:view_slices[1][1]]\n\n obs = np.vectorize(lambda v: 1 if v else 0)(select(walls))\n obs = obs + np.vectorize(lambda v: 2 if v else 0)(select(food))\n\n def pos_to_relative_pos(pos):\n if (pos[0] < view_slices[0][0] or view_slices[0][1] <= pos[0]\n or pos[1] < view_slices[1][0] or view_slices[1][1] <= pos[1]):\n return None\n else:\n return pos[0]-view_slices[0][0], pos[1]-view_slices[1][0]\n\n for c_relative_pos in filter(lambda x: x is not None, map(pos_to_relative_pos, capsules)):\n obs[c_relative_pos[0], c_relative_pos[1]] = 3\n\n for i, g_relative_pos in enumerate(map(pos_to_relative_pos, gosts_pos)):\n if (g_relative_pos is not None):\n obs[int(g_relative_pos[0]), int(g_relative_pos[1])\n ] = 5 if gosts_scared[i] else 4\n\n pacman_relative_pos = pos_to_relative_pos(pacman_pos)\n\n obs[pacman_relative_pos[0], pacman_relative_pos[1]] = 6\n\n obs[0, 0] = 2 if np.any(\n food[0:pacman_pos[0]+1, 0:pacman_pos[1]+1]) else 0\n obs[obs.shape[0]-1,\n 0] = 2 if np.any(food[pacman_pos[0]:map_shape[0], 0:pacman_pos[1]+1])else 0\n\n obs[0, obs.shape[1] -\n 1] = 2 if np.any(food[0:pacman_pos[0]+1, pacman_pos[1]:map_shape[0]]) else 0\n obs[obs.shape[0]-1, obs.shape[1]-1] = 2 if np.any(\n food[pacman_pos[0]:map_shape[0], pacman_pos[1]:map_shape[0]]) else 0\n\n # print(np.transpose(obs)[::-1, :])\n\n return obs", "def get_k2_data_from_exofop(epic, table=\"star\"):\n keys = {\n \"phot\": 1,\n \"mag\": 1,\n \"star\": 2,\n \"planet\": 3,\n \"spec\": 4,\n \"imaging\": 5,\n \"file\": 8,\n }\n errmsg = f\"table={table} not in\\n{list(keys.keys())}\"\n assert table in list(keys.keys()), errmsg\n key = keys[table]\n url = f\"https://exofop.ipac.caltech.edu/k2/edit_target.php?id={epic}\"\n data = pd.read_html(url, attrs={\"id\": f\"myTable{key}\"})[0]\n # remove multi-index column\n data = data.T.reset_index(level=0, drop=True).T\n data[\"epic\"] = epic\n return data", "def observationsTableName(self):\n return 'observations'", "def gen_obs(self):\n\n #grid, vis_mask = self.gen_obs_grid()\n\n # Encode the partially observable view into a numpy array\n image = self.grid.encode(self.agent_pos,self.drone_pos)\n\n #assert hasattr(self, 'mission'), \"environments must define a textual mission string\"\n\n # Observations are dictionaries containing:\n # - an image (partially observable view of the environment)\n # - the agent's direction/orientation (acting as a compass)\n # - a textual mission string (instructions for the agent)\n obs = {\n 'image': image,\n #'direction': self.agent_dir,\n 'mission': self.mission\n }\n obs=image\n #print(obs.shape)\n return self.render(mode='rgb_array')\n #return obs", "def gen_tab(cat):\n\n col = ['FLUX_APER2','FLUX_APER4','FLUX_APER5','FLUX_APER8','FLUX_APER10','FLUX_APER14',\n 'MAG_APER2','MAG_APER4','MAG_APER5','MAG_APER8','MAG_APER10','MAG_APER14',\n 'MAG_AUTO','MAG_PETRO','KRON_RADIUS',\n 'PETRO_RADIUS','FLUX_MAX','ISOAREAF_IMAGE','x',\n 'y','ra','dec','X2_IMAGE','Y2_IMAGE','XY_IMAGE',\n 'THETA_IMAGE','X2WIN_IMAGE','Y2WIN_IMAGE','XYWIN_IMAGE','AWIN_IMAGE','BWIN_IMAGE',\n 'THETAWIN_IMAGE','AWIN_WORLD','BWIN_WORLD','THETAWIN_WORLD',\n 'MU_MAX','FLAGS','FWHM_IMAGE','ELONGATION','SEX_CLASS','FLUX_RADIUS25',\n 'FLUX_RADIUS50','FLUX_RADIUS85','FLUX_RADIUS95','FLUX_RADIUS99']\n print('generating features table: {}'.format(cat))\n tab = pd.read_table(cat,skiprows=41,sep=r'\\s+',header=None, names=col)\n\n # crop the image for just using the central part of the image\n tab = crop(tab)\n\n # add concentration column by subtracting mag10 by mag5, rejecting the detections with negative concentration\n tab['CONCENT'] = tab.MAG_APER5 - tab.MAG_APER10\n tab = tab[tab.CONCENT > 0]\n\n # normalizing the columns\n print('normalizing features...')\n seesq_norm = ['X2_IMAGE','Y2_IMAGE','X2WIN_IMAGE',\n 'Y2WIN_IMAGE','XY_IMAGE','XYWIN_IMAGE',\n 'ISOAREAF_IMAGE']\n see_norm = ['AWIN_WORLD','AWIN_WORLD','FWHM_IMAGE',\n 'KRON_RADIUS','PETRO_RADIUS','FLUX_RADIUS25',\n 'FLUX_RADIUS50','FLUX_RADIUS85',\n 'FLUX_RADIUS95','FLUX_RADIUS99']\n mag_norm = ['MAG_APER4','MAG_APER5','MAG_APER8',\n 'MAG_APER10','MAG_APER14','MAG_AUTO',\n 'MAG_PETRO','MU_MAX','CONCENT']\n flux_norm = ['FLUX_APER2','FLUX_APER4','FLUX_APER5',\n 'FLUX_APER8','FLUX_APER10','FLUX_APER14']\n fwhm_mean = tab.FWHM_IMAGE.mean()\n for seesq_col in seesq_norm:\n tab[seesq_col] = tab[seesq_col] / (fwhm_mean**2)\n for see_col in see_norm:\n tab[see_col] = tab[see_col] / fwhm_mean\n for mag_col in mag_norm:\n tab[mag_col] = tab[mag_col] * tab['MAG_APER2']\n for flux_col in flux_norm:\n tab[flux_col] = tab[flux_col] * tab['FLUX_MAX']\n tab['CONCENT'] = -1 * tab['CONCENT']\n\n # add column for galactic latitude\n print('calculating galactic latitude...')\n ra = np.array(tab['ra'].values)\n dec = np.array(tab['dec'].values)\n pos = SkyCoord(ra=ra*u.degree, dec=dec*u.degree, frame='icrs')\n tab['b'] = list(pos.galactic.b.deg)\n\n tab.drop(['MAG_APER2','FLUX_MAX','x','y'], axis=1, inplace=True)\n tab.to_csv(cat[:-4]+'.csv', index=False, header=True)", "def get_histos(fout, fileList, variable, rebin=None):\n\n hists = {}\n os_id_iso, ss_id_iso, os_antiid_antiiso, ss_antiid_antiiso = {}, {}, {}, {}\n os_antiid_iso, ss_antiid_iso, os_id_antiiso, ss_id_antiiso = {}, {}, {}, {}\n os_id_ctr, ss_id_ctr, os_antiid_ctr, ss_antiid_ctr = {}, {}, {}, {}\n for ifile in fileList:\n fin = ROOT.TFile(ifile, 'READ')\n fout.cd()\n\n name = ifile.split('/')[-1].replace('.root', '')\n if name == 'Data_output':\n name = 'Data'\n print name\n hists[ifile.split('/')[-1].replace('.root', '')] = grab(fin, 'OS_pass', variable, rebin)\n os_id_iso[name] = grab(fin, 'OS_pass', variable, rebin)\n ss_id_iso[name] = grab(fin, 'SS_pass', variable, rebin)\n os_antiid_antiiso[name] = grab(fin, 'OS_anti_fail', variable, rebin)\n ss_antiid_antiiso[name] = grab(fin, 'SS_anti_fail', variable, rebin)\n os_antiid_iso[name] = grab(fin, 'OS_anti_pass', variable, rebin)\n ss_antiid_iso[name] = grab(fin, 'SS_anti_pass', variable, rebin)\n os_id_antiiso[name] = grab(fin, 'OS_fail', variable, rebin)\n ss_id_antiiso[name] = grab(fin, 'SS_fail', variable, rebin)\n os_id_ctr[name] = grab(fin, 'OS_control', variable, rebin)\n ss_id_ctr[name] = grab(fin, 'SS_control', variable, rebin)\n os_antiid_ctr[name] = grab(fin, 'OS_anti_control', variable, rebin)\n ss_antiid_ctr[name] = grab(fin, 'SS_anti_control', variable, rebin)\n fin.Close()\n return hists, {\n 'os_id_iso': os_id_iso,\n 'ss_id_iso': ss_id_iso,\n 'os_antiid_antiiso': os_antiid_antiiso,\n 'ss_antiid_antiiso': ss_antiid_antiiso,\n 'os_antiid_iso': os_antiid_iso,\n 'ss_antiid_iso': ss_antiid_iso,\n 'os_id_antiiso': os_id_antiiso,\n 'ss_id_antiiso': ss_id_antiiso,\n 'os_id_ctr': os_id_ctr,\n 'ss_id_ctr': ss_id_ctr,\n 'os_antiid_ctr': os_antiid_ctr,\n 'ss_antiid_ctr': ss_antiid_ctr,\n\n }", "def _prepare_data(self):\n self._factors = []\n if isinstance(self._heatmap_data, pandas.DataFrame):\n # Make it look multifactored if it is a single experiment\n self._heatmap_data = [ ( [('',''),('','')] , self._heatmap_data) ]\n for facts,data in self._heatmap_data:\n if self._num_frames is None:\n self._num_frames = len(data.iloc[:,self._ndx_t].unique())\n if self._categories is None:\n self._categories = np.unique(data.iloc[:,self._ndx_cat])\n if len(self._categories) > 1:\n self._is_multi = True\n if len(self._categories) > 3:\n raise ValueError('Animations are currently limited to three categories')\n self._factors.append(facts)\n abundances = data.iloc[:,self._ndx_cell:].astype('float')\n d_min = abundances.min().min()\n d_max = abundances.max().max()\n if self._vmin is None or d_min < self._vmin:\n self._vmin = d_min\n if self._vmax is None or d_max > self._vmax:\n self._vmax = d_max", "def impute_dates(tables, dates):\n new_fights = []\n for idx, date in enumerate(dates):\n if date == 'FUTURE EVENTS':\n break\n tables[idx]['Date'] = date\n for table in tables[:-1]:\n fights = [table[x:x+2] for x in range(0, len(table), 2)] \n for idxf, fight in enumerate(fights):\n fight.reset_index(drop=True, inplace=True)\n fight['Time'] = fight['Time'][0]\n new_fights.append(fight) \n return new_fights", "def iex_equities(symbols):\n # strict this in memory so that we can reiterate over it\n symbols = tuple(symbols)\n\n def ingest(environ,\n asset_db_writer,\n minute_bar_writer, # ignored\n daily_bar_writer,\n adjustment_writer,\n calendar,\n start_session, # ignored\n end_session, # ignored\n cache,\n show_progress,\n output_dir):\n\n metadata = pd.DataFrame(np.empty(len(symbols), dtype=[\n ('start_date', 'datetime64[ns]'),\n ('end_date', 'datetime64[ns]'),\n ('auto_close_date', 'datetime64[ns]'),\n ('symbol', 'object'),\n ]))\n\n today = datetime.today()\n start = datetime(today.year-5,today.month,today.day)\n \n def _pricing_iter():\n sid = 0\n with maybe_show_progress(\n symbols,\n show_progress,\n label='Downloading IEX pricing data: ') as it, \\\n requests.Session() as session:\n for symbol in it:\n path = _cachpath(symbol, 'ohlcv')\n try:\n df = cache[path]\n except KeyError:\n df = cache[path] = get_historical_data(symbol, start=start, end=None, output_format='pandas').sort_index()\n df.index = pd.to_datetime(df.index)\n # the start date is the date of the first trade and\n # the end date is the date of the last trade\n start_date = df.index[0]\n end_date = df.index[-1]\n # The auto_close date is the day after the last trade.\n ac_date = end_date + pd.Timedelta(days=1)\n metadata.iloc[sid] = start_date, end_date, ac_date, symbol\n\n df.rename(\n columns={\n 'Open': 'open',\n 'High': 'high',\n 'Low': 'low',\n 'Close': 'close',\n 'Volume': 'volume',\n },\n inplace=True,\n )\n yield sid, df\n sid += 1\n\n daily_bar_writer.write(_pricing_iter(), show_progress=True)\n\n metadata['exchange'] = \"NYSE\"\n \n symbol_map = pd.Series(metadata.symbol.index, metadata.symbol)\n asset_db_writer.write(equities=metadata)\n\n adjustment_writer.write()\n\n return ingest", "def observation(self, observation):\n final_obs = []\n for obs in observation:\n o = []\n o.extend(obs['left_team'].flatten())\n o.extend(obs['left_team_direction'].flatten())\n o.extend(obs['right_team'].flatten())\n o.extend(obs['right_team_direction'].flatten())\n\n # If there were less than 11vs11 players we backfill missing values with\n # -1.\n # 88 = 11 (players) * 2 (teams) * 2 (positions & directions) * 2 (x & y)\n if len(o) < 88:\n o.extend([-1] * (88 - len(o)))\n\n # ball position\n o.extend(obs['ball'])\n # ball direction\n o.extend(obs['ball_direction'])\n # one hot encoding of which team owns the ball\n if obs['ball_owned_team'] == -1:\n o.extend([1, 0, 0])\n if obs['ball_owned_team'] == 0:\n o.extend([0, 1, 0])\n if obs['ball_owned_team'] == 1:\n o.extend([0, 0, 1])\n\n active = [0] * 11\n if obs['active'] != -1:\n active[obs['active']] = 1\n o.extend(active)\n\n game_mode = [0] * 7\n game_mode[obs['game_mode']] = 1\n o.extend(game_mode)\n final_obs.append(o)\n return np.array(final_obs, dtype=np.float32)", "def create_observations_dict(instances, unprocessed_map_data):\n # print(instances)\n results = {}\n for idx in instances:\n results[idx] = {}\n if idx == instances[0]:\n tag_filter_pixel_corners = B.compute_corner_pixels(\n idx, unprocessed_map_data\n ).tolist()\n # prettified_corner_pixels = [tag_filter_pixel_corners[:2,0],tag_filter_pixel_corners[:2,1],tag_filter_pixel_corners[:2,2],tag_filter_pixel_corners[:2,3]]\n # results[idx][\"corner_pixels\"] = [pixel_pair.tolist() for pixel_pair in prettified_corner_pixels]\n results[idx][\"corner_pixels\"] = tag_filter_pixel_corners\n results[idx][\"tag_pose\"] = B.compute_tag_pose(\n idx, unprocessed_map_data\n ).tolist()\n results[idx][\"camera_pose\"] = B.compute_camera_pose(\n idx, unprocessed_map_data\n ).tolist()\n\n return results", "def plot_all_indicies(resolutions, temporal_resolution, temporal_decomposition, detrend, imagefolder = 'images/timeseries/INDICIES/', indicies = ['SAM', 'IPO', 'DMI', 'ENSO']):\n for temp_res, temp_decomp, dt, indexname in itertools.product(temporal_resolution, temporal_decomposition, detrend, indicies):\n plot_index_timeseries(anomlous = 'anomalous' == temp_decomp, temporal_resolution = temp_res, detrend = dt == 'detrended', indexname = indexname)", "def process_observation(self, observation):\n #print(\"start_process_obs\")\n processed_observation = np.zeros((NB_AGENTS, OBSERVATION_SIZE))\n\n goliath_type = getattr(env, 'Terran_Goliath')\n battlecruiser_type = getattr(env, 'Terran_Battlecruiser')\n '''\n goliath and battlecruiser type:\n hp_max: 125\n armor: 1\n cooldown_max: 22\n acceleration: 1\n top_speed: 4.57\n damage_amount: 12\n damage_factor: 1\n weapon_range: 192\n sight_range: 256\n seek_range: 160\n\n hp_max: 500\n energy_max: 200\n armor: 3\n cooldown_max: 30\n acceleration: 27\n top_speed: 2.5\n damage_amount: 25\n damage_factor: 1\n weapon_range: 192\n sight_range: 352\n '''\n #print(\"goliath and battlecruiser type:\")\n #print(goliath_type)\n #print(battlecruiser_type)\n\n for i, agent in enumerate(observation.my_unit):\n if agent.hp <= 0:\n continue\n my_x = agent.pos_x\n my_y = agent.pos_y\n my_type_str = agent.unit_type\n my_type = goliath_type if my_type_str == 'Terran_Goliath' else print(\"error in the my_type\")\n t1 = [agent.hp + agent.shield, agent.cooldown, math.atan2(agent.velocity_y, agent.velocity_x),\n math.sqrt((agent.velocity_x) ** 2 + (agent.velocity_y) ** 2), agent.angle,\n 1 if agent.accelerating else -1 if agent.braking else 0, agent.attacking, agent.is_attack_frame]\n t2 = [self.last_action[i] / (env.action_space[1] - 1)]\n t3 = [i.nearest_obstacle_dist for i in agent.pos_info]\n t4 = []\n t5 = []\n t4_max = []\n t5_max = []\n for idx, enemy in enumerate(observation.en_unit):\n en_type_str = enemy.unit_type\n if en_type_str == 'Terran_Battlecruiser':\n en_type = battlecruiser_type\n else:\n continue \n if enemy.hp <= 0:\n t4.extend([0,0,0,0,0,0,0,0,0,0])\n else:\n t4.extend([math.atan2(enemy.pos_y - my_y, enemy.pos_x - my_x), math.sqrt((enemy.pos_x - my_x) ** 2 + (enemy.pos_y - my_y) ** 2),\n math.atan2(enemy.velocity_y, enemy.velocity_x), math.sqrt((enemy.velocity_x) ** 2 + (enemy.velocity_y) ** 2),\n enemy.cooldown, enemy.hp + enemy.shield, enemy.angle, 1 if agent.accelerating else -1 if agent.braking else 0, agent.attacking, agent.is_attack_frame])\n t4_max.extend([math.pi, 320, math.pi, en_type.top_speed, en_type.cooldown_max, en_type.hp_max + en_type.shield_max, math.pi, 1, 1, 1])\n for idx, ally in enumerate(observation.my_unit):\n if i == idx:\n continue\n if ally.hp <= 0:\n t5.extend([0,0,0,0,0])\n else:\n t5.extend([math.atan2(ally.pos_y - my_y, ally.pos_x - my_x), math.sqrt((ally.pos_x - my_x) ** 2 + (ally.pos_y - my_y) ** 2),\n math.atan2(ally.velocity_y, ally.velocity_x), math.sqrt((ally.velocity_x) ** 2 + (ally.velocity_y) ** 2), ally.hp + ally.shield])\n ally_type = goliath_type\n t5_max.extend([math.pi, 320, math.pi, ally_type.top_speed, ally_type.hp_max + ally_type.shield_max])\n if my_type_str == 'Terran_Goliath':\n t1_max = [my_type.hp_max + my_type.shield_max, 1, math.pi, my_type.top_speed, math.pi, 1, 1, 1]\n else:\n t1_max = [my_type.hp_max + my_type.shield_max, my_type.cooldown_max, math.pi, my_type.top_speed, math.pi, 1, 1, 1]\n #t4_max = [math.pi, 320, math.pi, en_type.top_speed, en_type.cooldown_max, en_type.hp_max + en_type.shield_max, math.pi, 1, 1, 1]\n #t5_max = [math.pi, 320, math.pi, ally_type.top_speed, ally_type.hp_max + ally_type.shield_max]\n\n #t5_max = [32, 32, type.hp_max + type.shield_max, type.cooldown_max,\n #32, 32, type.hp_max + type.shield_max, type.cooldown_max,\n #32, 32, type.hp_max + type.shield_max, type.cooldown_max,\n #32, 32, type.hp_max + type.shield_max, math.pi,\n #32, 32, type.hp_max + type.shield_max, math.pi,\n #32, 32, type.hp_max + type.shield_max, math.pi]\n\n t1 = np.divide(t1, t1_max) # runtime warning\n t2 = np.array(t2) / 320\n t3 = np.array(t3) / 320\n t4 = np.divide(t4, t4_max)\n t5 = np.divide(t5, t5_max)\n\n processed_observation[i] = np.concatenate([t1, t2, t3, t4, t5])\n\n self.last_my_unit_cnt.append(np.sum(np.array([u.hp+u.shield for u in observation.my_unit]) > 0))\n self.last_enemy_unit_cnt.append(np.sum(np.array([u.hp+u.shield for u in observation.en_unit]) > 0))\n self.last_enemy_unit_hp.append(sum([u.hp + u.shield for u in observation.en_unit]))\n self.accumulated_observation.append(processed_observation)\n\n\n return processed_observation", "def add_expl_data(i_exp, expl_id, prj):\n exp_remarks = tbl_exploration['mem_Remarks'][i_exp] \\\n if not pd.isna(tbl_exploration['mem_Remarks'][i_exp]) \\\n else None\n exp_ewt = tbl_exploration['dbl_DepthToWaterStatic'][i_exp] * 0.00328084 \\\n if not pd.isna(\n tbl_exploration['dbl_DepthToWaterStatic'][i_exp]) else None\n exp_predom = tbl_exploration[\n 'txt_USCSCodePredominant'][i_exp] if not pd.isna(\n tbl_exploration['txt_USCSCodePredominant'][i_exp]) else 'NA'\n exp_predom = uscs_predom[exp_predom]\n exp_type = tbl_exploration['txt_KeyExplorationType'][i_exp]\n exp_elev = tbl_exploration['dbl_GroundElevation'][i_exp] * 0.00328084 \\\n if not pd.isna(\n tbl_exploration['dbl_GroundElevation'][i_exp]) else None\n exploration = Borings(\n name=expl_id,\n project=prj,\n remarks=exp_remarks,\n ewt=exp_ewt,\n predom_soil=exp_predom,\n type=exp_type,\n elevation=exp_elev)\n\n return exploration", "def set_data(self):\n # take care of samples\n patients = self.samples.iloc[:,1].tolist()\n samples = self.samples.iloc[:,0].tolist()\n self.samples = pd.DataFrame(patients,index = samples,columns = ['patient']) # indexed by sample\n #\n # take care of expression data\n cols = self.expression.SYMBOL.tolist() # set new column names to transposed expression_data \n \n new_exp = self.expression.T.ix[1:,:] # transpose\n new_exp.columns = cols\n self.expression = new_exp # add columns\n self.data = pd.merge(self.expression,self.samples,left_index = True,right_index=True) # merged data sets\n #pd.merge(df1,df2,how = 'left',left_index=True,right_index=True) # do a left join", "def loop_observations ( self, start_date, end_date, step=1, fmt=\"%Y-%m-%d\" ):\n\n start_date = datetime.datetime.strptime( start_date, fmt )\n end_date = datetime.datetime.strptime( end_date, fmt )\n if start_date < self.date[0]:\n print \"No observations until %s, starting from there\" % self.date[0]\n start_date = self.date[0]\n\n if end_date > self.date[-1]:\n print \"No observations after %s, stopping there\" % self.date[-1]\n end_date = self.date[-1]\n\n delta = datetime.timedelta ( days=step )\n this_date = start_date.date()\n end_date = end_date.date() + delta\n obs_dates = [ x.date() for x in self.date ]\n while this_date < end_date:\n if this_date in obs_dates:\n iloc = obs_dates.index ( this_date )\n have_obs = True\n the_data = self._data_pntr[iloc].ReadAsArray()\n try:\n the_mask = map ( *self.masks[iloc] )\n except:\n the_mask = self.get_mask ( iloc )\n the_emulator = self.emulator[ iloc ]\n the_sza = self.sza[ iloc ]\n the_saa = self.saa[ iloc ]\n the_vza = self.vza[ iloc ]\n the_vaa = self.vaa[ iloc ]\n the_fname = self._data_pntr[iloc].GetDescription()\n try:\n the_sensor = self.sensor[iloc]\n except:\n the_sensor = self.sensor\n try:\n the_spectrum = self.spectral[iloc]\n except:\n the_spectrum = self.spectral\n\n else:\n have_obs = False\n the_data = None\n the_mask = None\n the_emulator = None\n the_sza = None\n the_saa = None\n the_vza = None\n the_vaa = None\n the_fname = None\n the_spectrum = None\n the_sensor = None\n this_date += delta\n retval = namedtuple ( \"retval\", [\"have_obs\", \"sensor\", \"date\", \"image\", \"mask\", \"emulator\",\n \"sza\", \"saa\", \"vza\", \"vaa\", \"fname\", \"spectrum\"] )\n retvals = retval ( have_obs=have_obs, sensor=the_sensor, \n date=this_date - delta, image=the_data, mask=the_mask, emulator=the_emulator, sza=the_sza,\n saa=the_saa, vza=the_vza, vaa=the_vaa, fname=the_fname, spectrum=the_spectrum )\n yield retvals", "def sequence_ingest(self,sequence):\n\t\tdata=self.data\n\t\t\n\t\tcounter=0\n\n\t\tfor item in data[sequence]:\n\t\t\tdatestring=item['specimenDate']\n\t\t\tdate=fetchdate(datestring)\n\t\t\trow,created=DailyCases.objects.get_or_create(specimenDate=date,areacode=item['areaCode'])\n\t\t\trow.areaname=item['areaName']\n\t\t\trow.dailyLabConfirmedCases=item['dailyLabConfirmedCases']\n\t\t\trow.totalLabConfirmedCases=item['totalLabConfirmedCases']\n\t\t\trow.changeInDailyCases=item['changeInDailyCases']\n\t\t\trow.dailyTotalLabConfirmedCasesRate=item['dailyTotalLabConfirmedCasesRate']\n\t\t\trow.previouslyReportedDailyCases=item['previouslyReportedDailyCases']\n\t\t\trow.previouslyReportedTotalCases=item['previouslyReportedTotalCases']\n\t\t\trow.changeInTotalCases=item['changeInTotalCases']\n\t\t\trow.save()\n\t\t\tcounter+=1\n\t\tlog.info(f'Processed: {counter} rows')", "def summary_info_events(filename):\n # filename = self.out_filename('events')\n print('Reading {}'.format(filename))\n table = Table.read(str(filename), hdu='EVENTS')\n data = dict()\n\n # Copy over header info to the summary table\n data['RA_PNT'] = np.float32(table.meta['RA_PNT'])\n data['DEC_PNT'] = np.float32(table.meta['DEC_PNT'])\n # data['GLON_PNT'] = np.float32(table.meta['GLON_PNT'])\n # data['GLAT_PNT'] = np.float32(table.meta['GLAT_PNT'])\n data['ALT_PNT'] = np.float32(table.meta['ALT_PNT'])\n data['AZ_PNT'] = np.float32(table.meta['AZ_PNT'])\n #data['ZEN_PNT'] = np.float32(90. - table.meta['ALT_PNT'])\n data['ZEN_PNT'] = np.float32(90. - table['ALT'].mean())\n data['ONTIME'] = np.float32(table.meta['ONTIME'])\n data['LIVETIME'] = np.float32(table.meta['LIVETIME'])\n data['DEADC'] = np.float32(table.meta['DEADC'])\n\n MJDREFI = table.meta['MJDREFI']\n MJDREFF = table.meta['MJDREFF']\n MJDREF = MJDREFI + MJDREFF\n\n TSTART_MET = table.meta['TSTART'] / 3600. / 24.\n TSTOP_MET = table.meta['TSTOP'] / 3600. / 24.\n\n start_time = Time(MJDREF + TSTART_MET, scale='tt', format='mjd')\n stop_time = Time(MJDREF + TSTOP_MET, scale='tt', format='mjd')\n\n data['TSTART'] = np.float32(start_time.utc.mjd)\n data['TSTOP'] = np.float32(stop_time.utc.mjd)\n data['TSTART_STR'] = str(start_time.utc.iso[:-4])\n data['TSTOP_STR'] = str(stop_time.utc.iso[:-4])\n\n data['N_TELS'] = table.meta['N_TELS']\n data['TELLIST'] = table.meta['TELLIST']\n try:\n data['OBJECT'] = table.meta['OBJECT']\n except KeyError:\n data['OBJECT'] = \"\"\n data['RA_OBJ'] = np.float32(table.meta['RA_OBJ'])\n data['DEC_OBJ'] = np.float32(table.meta['DEC_OBJ'])\n\n # data['OBS_MODE'] = table.meta['OBS_MODE']\n\n try:\n data['MUONEFF'] = np.float32(table.meta['MUONEFF'])\n except KeyError:\n data['MUONEFF'] = np.float32(-1)\n\n # Calculate some summary statistics for important event columns\n data['EVENT_COUNT'] = len(table)\n data['EVENT_TIME_MIN'] = table['TIME'].min()\n data['EVENT_TIME_MAX'] = table['TIME'].max()\n data['EVENT_ENERGY_MEDIAN'] = np.float32(np.median(table['ENERGY']))\n data['EVENT_RA_MEDIAN'] = np.float32(np.median(table['RA']))\n data['EVENT_DEC_MEDIAN'] = np.float32(np.median(table['DEC']))\n\n return data", "def set_list_of_expenses(self):\n fix_exp = DB.get_fixed_expenses(self.customer.email)\n var_exp = DB.get_variable_expenses(self.customer.email)\n self.listOfExpensesSEK.item(2).setText(str(fix_exp[\"subscription\"]))\n self.listOfExpensesSEK.item(3).setText(str(fix_exp[\"insurance\"]))\n self.listOfExpensesSEK.item(4).setText(str(fix_exp[\"rent\"]))\n self.listOfExpensesSEK.item(5).setText(str(fix_exp[\"others\"]))\n\n self.listOfExpensesSEK.item(11).setText(str(var_exp[\"food\"]))\n self.listOfExpensesSEK.item(12).setText(str(var_exp[\"bills\"]))\n self.listOfExpensesSEK.item(13).setText(str(var_exp[\"transportation\"]))\n self.listOfExpensesSEK.item(14).setText(str(var_exp[\"hygien\"]))\n self.listOfExpensesSEK.item(15).setText(str(var_exp[\"clothes\"]))\n self.listOfExpensesSEK.item(16).setText(str(var_exp[\"entertainment\"]))\n self.listOfExpensesSEK.item(17).setText(str(var_exp[\"others\"]))", "def set_list_of_expenses(self):\n fix_exp = DB.get_fixed_expenses(self.customer.email)\n var_exp = DB.get_variable_expenses(self.customer.email)\n self.listOfExpensesSEK.item(2).setText(str(fix_exp[\"subscription\"]))\n self.listOfExpensesSEK.item(3).setText(str(fix_exp[\"insurance\"]))\n self.listOfExpensesSEK.item(4).setText(str(fix_exp[\"rent\"]))\n self.listOfExpensesSEK.item(5).setText(str(fix_exp[\"others\"]))\n\n self.listOfExpensesSEK.item(11).setText(str(var_exp[\"food\"]))\n self.listOfExpensesSEK.item(12).setText(str(var_exp[\"bills\"]))\n self.listOfExpensesSEK.item(13).setText(str(var_exp[\"transportation\"]))\n self.listOfExpensesSEK.item(14).setText(str(var_exp[\"hygien\"]))\n self.listOfExpensesSEK.item(15).setText(str(var_exp[\"clothes\"]))\n self.listOfExpensesSEK.item(16).setText(str(var_exp[\"entertainment\"]))\n self.listOfExpensesSEK.item(17).setText(str(var_exp[\"others\"]))", "def construct(cls, obs_lists, platform_id):\n step = 0\n LookupTable = []\n while step < obs_lists.shape[0]:\n K = str(int(obs_lists[step, 0]))\n LookupTable.append(BaseCreateFactory(K, platform_id).create_object())\n step += 1\n return LookupTable", "def summary_info_events(filename):\n # filename = self.out_filename('events')\n print('Reading {}'.format(filename))\n table = Table.read(str(filename), hdu='EVENTS')\n data = dict()\n \n # Copy over header info to the summary table\n data['RA_PNT'] = np.float32(table.meta['RA_PNT'])\n data['DEC_PNT'] = np.float32(table.meta['DEC_PNT'])\n #data['GLON_PNT'] = np.float32(table.meta['GLON_PNT'])\n #data['GLAT_PNT'] = np.float32(table.meta['GLAT_PNT'])\n data['ALT_PNT'] = np.float32(table.meta['ALT_PNT'])\n data['AZ_PNT'] = np.float32(table.meta['AZ_PNT'])\n data['ZEN_PNT'] = np.float32(90. - table.meta['ALT_PNT'])\n data['ONTIME'] = np.float32(table.meta['ONTIME'])\n data['LIVETIME'] = np.float32(table.meta['LIVETIME'])\n data['DEADC'] = np.float32(table.meta['DEADC'])\n\n MJDREFI = table.meta['MJDREFI']\n MJDREFF = table.meta['MJDREFF']\n MJDREF = MJDREFI + MJDREFF\n\n TSTART_MET = table.meta['TSTART'] / 3600. / 24.\n TSTOP_MET = table.meta['TSTOP'] / 3600. / 24.\n\n start_time = Time(MJDREF + TSTART_MET, scale='tt', format='mjd')\n stop_time = Time(MJDREF + TSTOP_MET, scale='tt', format='mjd')\n\n data['TSTART'] = np.float32(start_time.utc.mjd)\n data['TSTOP'] = np.float32(stop_time.utc.mjd)\n data['TSTART_STR'] = str(start_time.utc.iso[:-4])\n data['TSTOP_STR'] = str(stop_time.utc.iso[:-4])\n\n data['N_TELS'] = table.meta['N_TELS']\n data['TELLIST'] = table.meta['TELLIST']\n try:\n data['OBJECT'] = table.meta['OBJECT']\n except KeyError:\n data['OBJECT'] = \"\"\n data['RA_OBJ'] = np.float32(table.meta['RA_OBJ'])\n data['DEC_OBJ'] = np.float32(table.meta['DEC_OBJ'])\n\n # data['OBS_MODE'] = table.meta['OBS_MODE']\n\n try:\n data['MUONEFF'] = np.float32(table.meta['MUONEFF'])\n except KeyError:\n data['MUONEFF'] = np.float32(-1)\n\n # Calculate some summary statistics for important event columns\n data['EVENT_COUNT'] = len(table)\n data['EVENT_TIME_MIN'] = table['TIME'].min()\n data['EVENT_TIME_MAX'] = table['TIME'].max()\n data['EVENT_ENERGY_MEDIAN'] = np.float32(np.median(table['ENERGY']))\n data['EVENT_RA_MEDIAN'] = np.float32(np.median(table['RA']))\n data['EVENT_DEC_MEDIAN'] = np.float32(np.median(table['DEC']))\n\n return data", "def exposure_plots(self, energy=1000.):\n cfg = configuration.Configuration(os.path.expandvars('.'), quiet=True);\n exp = cfg.irfs.exposure(0, energy) \n hf = hpm.HPskyfun('front-1000 exp', exp, 64);\n expf = hf.getcol()\n emeanf = expf.mean()\n euw=hpm.HParray('FRONT exposure @ {} MeV / {:.2e}'.format(energy, emeanf), expf/emeanf)\n fig,ax=plt.subplots(figsize=(12,6))\n euw.plot(axes=ax,vmin=0.80,vmax=1.20, title=euw.name, \n cmap=plt.get_cmap('coolwarm')).grid(color='grey');\n\n return fig", "def summary_info_aeff(filename):\n # filename = self.out_filename('aeff')\n print('Reading {}'.format(filename))\n table = Table.read(str(filename), hdu='AEFF_2D')\n\n data = dict()\n\n # Copy over header info to the summary table\n data['LO_THRES'] = table.meta['LO_THRES']\n data['HI_THRES'] = table.meta['HI_THRES']\n\n # Summary stats on IRF file content\n data['EFFAREA_MAX'] = table['EFFAREA'].max()\n data['EFFAREA_RECO_MAX'] = table['EFFAREA_RECO'].max() \n return data", "def iter_input_annotation_output_df_df(inspection_count, inspection_index, input_data, input_annotations, output,\n appends_col=False):\n # pylint: disable=too-many-locals, too-many-arguments\n # Performance tips:\n # https://stackoverflow.com/questions/16476924/how-to-iterate-over-rows-in-a-dataframe-in-pandas\n data_before_with_annotations = pandas.merge(input_data, input_annotations, left_on=\"mlinspect_index\",\n right_on=\"mlinspect_index\")\n joined_df = pandas.merge(data_before_with_annotations, output, left_on=\"mlinspect_index\",\n right_on=\"mlinspect_index\")\n\n column_index_input_end = len(input_data.columns)\n column_annotation_current_inspection = column_index_input_end + inspection_index\n column_index_annotation_end = column_index_input_end + inspection_count\n\n input_df_view = joined_df.iloc[:, 0:column_index_input_end - 1]\n input_df_view.columns = input_data.columns[0:-1]\n\n annotation_df_view = joined_df.iloc[:,\n column_annotation_current_inspection:column_annotation_current_inspection + 1]\n\n output_df_view = joined_df.iloc[:, column_index_annotation_end:]\n if not appends_col:\n output_df_view.columns = output.columns[0:-1]\n else: # e.g., __setkey__ can add columns, but they are then behind mlinspect_index\n output_df_view.columns = list(output.columns[0:-2]) + list(output.columns[-1:])\n\n input_rows = get_df_row_iterator(input_df_view)\n annotation_rows = get_df_row_iterator(annotation_df_view)\n output_rows = get_df_row_iterator(output_df_view)\n\n return map(lambda input_tuple: InspectionInputUnaryOperator(*input_tuple),\n zip(input_rows, annotation_rows, output_rows))", "def getUserHistFeatures(transaction_list, coupon_dict, model_start_date, purchase_date):\n feat_header = [\"NoOfPurchases\", \"DaysSinceLastPurchase\", \"NoOfPurchasesLastweek\", \"NoOfPurchasesLast15Days\", \"NoOfPurchasesLast30Days\", \"NoOfPurchasesLast60Days\", \"NoOfPurchasesLast90Days\", \"NoOfPurchasesLast180Days\", \"DaysSincePrevPurchase\", \"NoOfPurchasesPrevweek\", \"NoOfPurchasesPrev15Days\", \"NoOfPurchasesPrev30Days\", \"NoOfPurchasesPrev60Days\", \"NoOfPurchasesPrev90Days\", \"NoOfPurchasesPrev180Days\"]\n\n # getting number of purchases #\n feat_list = [len(transaction_list)]\n\n # initializing variables #\n purchase_small_area_name_dict = {}\n puchase_date_list = []\n capsule_text_dict = {}\n genre_name_dict = {}\n price_rate_list = []\n catalog_price_list = []\n discount_price_list = []\n dispperiod_list = []\n valid_period_list = []\n usable_date_mon_list = {}\n usable_date_tue_list = {}\n usable_date_wed_list = {}\n usable_date_thu_list = {}\n usable_date_fri_list = {}\n usable_date_sat_list = {}\n usable_date_sun_list = {}\n usable_date_hol_list = {}\n usable_date_before_hol_list = {}\n coupon_large_area_name_dict = {}\n coupon_small_area_name_dict = {}\n coupon_ken_name_dict = {}\n days_since_last_purchase = 9999\n last_week_purchase = 0\n last_fifteendays_purchase = 0\n last_thirtydays_purchase = 0\n last_sixtydays_purchase = 0\n last_nintydays_purchase = 0\n\tlast_oneeightydays_purchase = 0\n\tdays_since_prev_purchase = 9999\n\tprev_week_purchase = 0\n prev_fifteendays_purchase = 0\n prev_thirtydays_purchase = 0\n prev_sixtydays_purchase = 0\n prev_nintydays_purchase = 0\n prev_oneeightydays_purchase = 0\n for transaction in transaction_list:\n diff_days = (model_start_date - datetime.datetime.strptime(transaction['I_DATE'], \"%Y-%m-%d %H:%M:%S\").date()).days\n if diff_days < days_since_last_purchase:\n days_since_last_purchase = diff_days\n if diff_days <= 7:\n last_week_purchase += 1\n if diff_days <= 15:\n last_fifteendays_purchase += 1\n if diff_days <= 30:\n last_thirtydays_purchase += 1\n if diff_days <= 60:\n last_sixtydays_purchase += 1\n if diff_days <= 90:\n last_nintydays_purchase += 1\n\t\tif diff_days <= 180:\n last_oneeightydays_purchase += 1\n\t\t\n\t\tdiff_days = (purchase_date - datetime.datetime.strptime(transaction['I_DATE'], \"%Y-%m-%d %H:%M:%S\").date()).days\n if diff_days < days_since_last_purchase:\n days_since_prev_purchase = diff_days\n if diff_days <= 7:\n prev_week_purchase += 1\n if diff_days <= 15:\n prev_fifteendays_purchase += 1\n if diff_days <= 30:\n prev_thirtydays_purchase += 1\n if diff_days <= 60:\n prev_sixtydays_purchase += 1\n if diff_days <= 90:\n prev_nintydays_purchase += 1\n if diff_days <= 180:\n prev_oneeightydays_purchase += 1\n\n coupon_id_dict = coupon_dict[ transaction['COUPON_ID_hash'] ]\n purchase_small_area_name_dict[transaction['SMALL_AREA_NAME']] = purchase_small_area_name_dict.get( transaction['SMALL_AREA_NAME'],0) + 1\n capsule_text_dict[ coupon_id_dict['CAPSULE_TEXT'] ] = capsule_text_dict.get( coupon_id_dict['CAPSULE_TEXT'], 0) + 1\n genre_name_dict[ coupon_id_dict['GENRE_NAME'] ] = genre_name_dict.get( coupon_id_dict['GENRE_NAME'],0 ) + 1\n coupon_large_area_name_dict[ coupon_id_dict['large_area_name'] ] = coupon_large_area_name_dict.get( coupon_id_dict['large_area_name'],0 ) + 1\n coupon_small_area_name_dict[ coupon_id_dict['small_area_name'] ] = coupon_small_area_name_dict.get( coupon_id_dict['small_area_name'],0 ) + 1\n coupon_ken_name_dict[ coupon_id_dict['ken_name'] ] = coupon_ken_name_dict.get( coupon_id_dict['ken_name'],0 ) + 1\n price_rate_list.append( float(coupon_id_dict['PRICE_RATE']) )\n catalog_price_list.append( float(coupon_id_dict['CATALOG_PRICE']) )\n discount_price_list.append( float(coupon_id_dict['DISCOUNT_PRICE']) )\n dispperiod_list.append( float(coupon_id_dict['DISPPERIOD']) )\n if coupon_id_dict['VALIDPERIOD'] not in ('','NA'):\n valid_period_list.append( float(coupon_id_dict['VALIDPERIOD']) )\n if coupon_id_dict['USABLE_DATE_MON'] not in ('','NA'):\n usable_date_mon_list[ float(coupon_id_dict['USABLE_DATE_MON']) ] = usable_date_mon_list.get( float(coupon_id_dict['USABLE_DATE_MON']),0 ) + 1\n usable_date_tue_list[ float(coupon_id_dict['USABLE_DATE_TUE']) ] = usable_date_tue_list.get( float(coupon_id_dict['USABLE_DATE_TUE']),0 ) + 1\n usable_date_wed_list[ float(coupon_id_dict['USABLE_DATE_WED']) ] = usable_date_wed_list.get( float(coupon_id_dict['USABLE_DATE_WED']),0 ) + 1\n usable_date_thu_list[ float(coupon_id_dict['USABLE_DATE_THU']) ] = usable_date_thu_list.get( float(coupon_id_dict['USABLE_DATE_THU']),0 ) + 1\n usable_date_fri_list[ float(coupon_id_dict['USABLE_DATE_FRI']) ] = usable_date_fri_list.get( float(coupon_id_dict['USABLE_DATE_FRI']),0 ) + 1\n usable_date_sat_list[ float(coupon_id_dict['USABLE_DATE_SAT']) ] = usable_date_sat_list.get( float(coupon_id_dict['USABLE_DATE_SAT']),0 ) + 1\n usable_date_sun_list[ float(coupon_id_dict['USABLE_DATE_SUN']) ] = usable_date_sun_list.get( float(coupon_id_dict['USABLE_DATE_SUN']),0 ) + 1\n usable_date_hol_list[ float(coupon_id_dict['USABLE_DATE_HOLIDAY']) ] = usable_date_hol_list.get( float(coupon_id_dict['USABLE_DATE_HOLIDAY']),0 ) + 1\n usable_date_before_hol_list[ float(coupon_id_dict['USABLE_DATE_BEFORE_HOLIDAY']) ] = usable_date_before_hol_list.get( float(coupon_id_dict['USABLE_DATE_BEFORE_HOLIDAY']),0 )+1\n else:\n usable_date_mon_list[3.0] = usable_date_mon_list.get( 3.0,0 ) + 1\n usable_date_tue_list[3.0] = usable_date_tue_list.get( 3.0,0 ) + 1\n usable_date_wed_list[3.0] = usable_date_wed_list.get( 3.0,0 ) + 1\n usable_date_thu_list[3.0] = usable_date_thu_list.get( 3.0,0 ) + 1\n usable_date_fri_list[3.0] = usable_date_fri_list.get( 3.0,0 ) + 1\n usable_date_sat_list[3.0] = usable_date_sat_list.get( 3.0,0 ) + 1\n usable_date_sun_list[3.0] = usable_date_sun_list.get( 3.0,0 ) + 1\n usable_date_hol_list[3.0] = usable_date_hol_list.get( 3.0,0 ) + 1\n usable_date_before_hol_list[3.0] = usable_date_before_hol_list.get( 3.0,0 ) + 1\n\n feat_list.extend([days_since_last_purchase, last_week_purchase, last_fifteendays_purchase, last_thirtydays_purchase, last_sixtydays_purchase, last_nintydays_purchase, last_oneeightydays_purchase, days_since_prev_purchase, prev_week_purchase, prev_fifteendays_purchase, prev_thirtydays_purchase, prev_sixtydays_purchase, prev_nintydays_purchase, prev_oneeightydays_purchase])\n return feat_list, feat_header, [purchase_small_area_name_dict, capsule_text_dict, genre_name_dict, coupon_large_area_name_dict, coupon_small_area_name_dict, coupon_ken_name_dict, price_rate_list, catalog_price_list, discount_price_list, dispperiod_list, valid_period_list, usable_date_mon_list, usable_date_tue_list, usable_date_wed_list, usable_date_thu_list, usable_date_fri_list, usable_date_sat_list, usable_date_sun_list, usable_date_hol_list, usable_date_before_hol_list]", "def iter_input_annotation_output_df_pair_df(inspection_count, inspection_index, x_data, x_annotations, y_data,\n y_annotations, output):\n # pylint: disable=too-many-locals, too-many-arguments\n # Performance tips:\n # https://stackoverflow.com/questions/16476924/how-to-iterate-over-rows-in-a-dataframe-in-pandas\n x_before_with_annotations = pandas.merge(x_data, x_annotations, left_on=\"mlinspect_index_x\",\n right_on=\"mlinspect_index\", suffixes=[\"_x_data\", \"_x_annot\"])\n y_before_with_annotations = pandas.merge(y_data, y_annotations, left_on=\"mlinspect_index_y\",\n right_on=\"mlinspect_index\", suffixes=[\"_y_data\", \"_y_annot\"])\n df_x_output = pandas.merge(x_before_with_annotations, output, left_on=\"mlinspect_index_x\",\n right_on=\"mlinspect_index_x\", suffixes=[\"_x\", \"_output\"])\n df_x_output_y = pandas.merge(df_x_output, y_before_with_annotations, left_on=\"mlinspect_index_y\",\n right_on=\"mlinspect_index_y\", suffixes=[\"_x_output\", \"_y_output\"])\n\n column_index_x_end = len(x_data.columns)\n column_annotation_x_current_inspection = column_index_x_end + inspection_index\n column_index_output_start = column_index_x_end + inspection_count\n column_index_y_start = column_index_output_start + len(output.columns) - 2\n column_index_y_end = column_index_y_start + len(y_data.columns) - 1\n column_annotation_y_current_inspection = column_index_y_end + inspection_index\n\n df_x_output_y = df_x_output_y.drop(['mlinspect_index_x_output', 'mlinspect_index_y'], axis=1)\n\n input_x_view = df_x_output_y.iloc[:, 0:column_index_x_end - 1]\n input_x_view.columns = x_data.columns[0:-1]\n annotation_x_view = df_x_output_y.iloc[:, column_annotation_x_current_inspection:\n column_annotation_x_current_inspection + 1]\n annotation_x_view.columns = [annotation_x_view.columns[0].replace(\"_x_output\", \"\")]\n\n output_df_view = df_x_output_y.iloc[:, column_index_output_start:column_index_y_start]\n output_df_view.columns = [column for column in output.columns if\n (column not in (\"mlinspect_index_x\", \"mlinspect_index_y\"))]\n\n input_y_view = df_x_output_y.iloc[:, column_index_y_start:column_index_y_end]\n input_y_view.columns = y_data.columns[0:-1]\n annotation_y_view = df_x_output_y.iloc[:, column_annotation_y_current_inspection:\n column_annotation_y_current_inspection + 1]\n annotation_y_view.columns = [annotation_y_view.columns[0].replace(\"_y_output\", \"\")]\n\n input_iterators = []\n annotation_iterators = []\n\n input_iterators.append(get_df_row_iterator(input_x_view))\n annotation_iterators.append(get_df_row_iterator(annotation_x_view))\n\n input_iterators.append(get_df_row_iterator(input_y_view))\n annotation_iterators.append(get_df_row_iterator(annotation_y_view))\n\n input_rows = map(list, zip(*input_iterators))\n annotation_rows = map(list, zip(*annotation_iterators))\n\n output_rows = get_df_row_iterator(output_df_view)\n\n return map(lambda input_tuple: InspectionInputNAryOperator(*input_tuple),\n zip(input_rows, annotation_rows, output_rows))" ]
[ "0.70176095", "0.6014823", "0.5961176", "0.5897251", "0.5735278", "0.57064164", "0.5697232", "0.5609351", "0.5595745", "0.5515109", "0.5511498", "0.5509595", "0.5412777", "0.5358326", "0.52642626", "0.5259989", "0.52551216", "0.5245346", "0.5239172", "0.52262944", "0.52035564", "0.52017057", "0.519556", "0.517978", "0.5164753", "0.5161697", "0.5160446", "0.5156876", "0.5156876", "0.5142819", "0.5137116", "0.51286376", "0.51281196", "0.5088688", "0.5080511", "0.5056763", "0.50532967", "0.50447434", "0.50441253", "0.5033711", "0.50151056", "0.49982694", "0.499482", "0.49822322", "0.49685988", "0.49663112", "0.49563715", "0.49406803", "0.4937258", "0.49332097", "0.4931526", "0.49089047", "0.48913977", "0.48830086", "0.48811662", "0.4880393", "0.4876707", "0.48685318", "0.4866723", "0.48638877", "0.48422793", "0.48352847", "0.48339847", "0.48292506", "0.4822682", "0.48147786", "0.48041615", "0.47986794", "0.47723192", "0.47641143", "0.47597665", "0.47511917", "0.47466773", "0.4741142", "0.47378972", "0.47373322", "0.4737157", "0.47306624", "0.4721988", "0.4719652", "0.47190282", "0.47176144", "0.47154498", "0.47152162", "0.47140148", "0.47040933", "0.47012487", "0.4698831", "0.4692495", "0.46915233", "0.46906456", "0.4689461", "0.4689461", "0.46872947", "0.4687009", "0.468056", "0.46795246", "0.46770784", "0.46752012", "0.46679327" ]
0.7040096
0
Save exposure table to a FITS file.
def save(self, filename=None, overwrite=True): if filename is None: filename = self._output self._exposure_table.write(filename, overwrite=overwrite)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def save_as_fits(self, filename):", "def write(self, filename, **kwargs):\n self.to_table().write(filename, format='fits', **kwargs)", "def save_fits(df, fname):\n df = df.reset_index()\n outtable = Table.from_pandas(df)\n Path(fname).parent.mkdir(parents=True, exist_ok=True)\n outtable.write(fname, format='fits', overwrite=True)", "def save_fits(data, fname):\n\tcols = fits.ColDefs(np.copy(data)) # This is somehow necessary.\n\ttbhdu = fits.BinTableHDU.from_columns(cols)\n\ttbhdu.writeto(fname, clobber=True)\n\t\n\treturn", "def write_fits(self, name=None, output_path=None):\n pass", "def saveFits(self, filename):\n \n if isinstance(self.res, type(None)):\n raise Exception('Result is not yet aviable.')\n \n header = fits.Header()\n header['NAXIS1'] = self.naxis\n header['NAXIS2'] = self.naxis\n header['CTYPE1'] = 'RA---SIN'\n header['CTYPE2'] = 'DEC--SIN'\n header['CDELT1'] = - self.fov/(np.pi/180 * self.naxis)\n header['CDELT2'] = self.fov/(np.pi/180 * self.naxis)\n header['BUNIT'] = 'JY/PIXEL'\n \n hdu = fits.PrimaryHDU(self.res, header=header)\n hdulist = fits.HDUList([hdu])\n hdulist.writeto(filename, overwrite=True)\n \n print(\"Saved as '%s'.\" %(filename))", "def save_fits(self, name: str, hdu):\r\n hdu.writeto(self._path_for_fits(name), overwrite=True)", "def export_fits(self, mask=None, **kwargs):\n \n ## Check key word arguments\n save_file = kwargs.pop('save_file', 'image.fits')\n fill_value = kwargs.pop('fill_value',0.)\n \n ## Check if mask provided matches data shape\n if self.is_valid_mask(mask):\n masked_data = np.ma.MasedArray()", "def save_fits(self):\n hdu = fits.PrimaryHDU()\n\n hdu.data = self.original_image.astype('float32')\n hdr = hdu.header\n\n if not self.metadata_filename:\n # Let the user choose a JSON file containing fits header.\n self.metadata_filename = self.get_fits_metadata()\n\n if self.metadata_filename == None:\n # The user didn't select a file so they must create one.\n self.metadata_filename = self.user_create_fits_header()\n \n if self.metadata_filename:\n # A metadata file exists so load it into a dict that will become the header.\n with open(self.metadata_filename, 'r') as f:\n d = json.load(f)\n\n for k, v in d.items():\n hdr[k] = v\n\n if not self.save_directory:\n self.save_directory = QtWidgets.QFileDialog.getExistingDirectory(\n self,\n \"Select a directory\",\n options=QtWidgets.QFileDialog.DontUseNativeDialog\n )\n\n directory = os.path.join(self.save_directory, self.starting_time)\n\n if not os.path.exists(directory):\n os.makedirs(directory)\n\n path = os.path.join(directory, f'{self._image_counter}.fits')\n hdu.writeto(path)\n self.statusBar().showMessage(f'Saved to {path}.')", "def write(self, filename, *args, **kwargs):\n self.to_fits().writeto(filename, *args, **kwargs)", "def write_fits(self):\n \n import time\n import getpass\n \n formats = {}\n formats['bool'] = 'L'\n formats['int16'] = 'I'\n formats['int32'] = 'J'\n formats['int64'] = 'K'\n formats['float32'] = 'E'\n formats['float64'] = 'D'\n \n formats['>i8'] = 'K'\n formats['>f8'] = 'D'\n \n #### Make the table columns, translating numpy data types to \"TFORM\"\n coldefs = []\n dt = str(np.array(self.images).dtype)\n if 'S' in dt:\n TFORM = 'A'+dt.split('S')[1]\n elif 'U' in dt:\n TFORM = 'A'+dt.split('U')[1]\n \n print(TFORM)\n \n coldefs.append(pyfits.Column(name='images', array=np.array(self.images), format=TFORM))\n \n for column in self.params.keys():\n if column == 'comment':\n coldata = np.array(self.params['comment'])\n else:\n coldata = self.params[column]\n #\n dtype = str(coldata.dtype)\n #print column, dtype\n if dtype in formats.keys():\n TFORM=formats[dtype]\n else:\n if ('S' not in dtype) & ('U' not in dtype):\n print('Unrecognized data type in: %s' %(dtype))\n return False\n #\n if 'S' in dtype:\n TFORM = 'A'+dtype.split('S')[1]\n elif 'U' in dtype:\n TFORM = 'A'+dtype.split('U')[1]\n #\n #data = self.params[column]\n if '>' in dtype:\n cast_types = {'>i8':np.int64, '>f8':np.float64}\n coldata = np.cast[cast_types[dtype]](coldata)\n #\n coldefs.append(pyfits.Column(name=column, array=coldata, format=TFORM))\n \n #### Done, now make the binary table\n tbhdu = pyfits.BinTableHDU().from_columns(coldefs)\n \n linehdu = pyfits.ImageHDU(data=self.marked_reads, name='FLAGGED')\n \n #### Primary HDU\n hdu = pyfits.PrimaryHDU()\n thdulist = pyfits.HDUList([hdu, tbhdu, linehdu])\n\n #### Add modification time of \"infile\" to FITS header\n infile_mod_time = time.strftime(\"%m/%d/%Y %I:%M:%S %p\",\n time.localtime()) # os.path.getmtime(self.filename)))\n \n thdulist[0].header['MODTIME'] = infile_mod_time\n thdulist[0].header['USER'] = getpass.getuser()\n \n thdulist.writeto(self.logfile, clobber=True)\n \n print('Log to file %s' %(self.logfile))", "def write_to_fits(self, filename=None, gain=GAIN):\n\n hdu_new = self.hdu_ideal\n hdu_new[1].data = (self.data/gain).astype('uint16') # Convert to ADU in 16 bit integers.\n\n if filename is None:\n filename = self.ima_path[:-5] + self.modif_str + '.fits'\n hdu_new.writeto(filename, overwrite=True)\n\n print('Writing to file: ' + filename)", "def to_fits(self, imagename, fitsname=None, script='to_fits', del_script=True, overwrite=False):\n input_image = imagename\n ct.exportfits(input_image, fitsname=fitsname, script=script, overwrite=overwrite)", "def save(file, table):\n pq.write_table(pa.Table.from_pandas(table), file)", "def fitswrite(img, imgname, **kwargs):\n try:\n if kwargs.has_key('header'):\n hdu = pyfits.PrimaryHDU(img, header = kwargs['header'])\n else:\n hdu = pyfits.PrimaryHDU(img)\n hdu.writeto(imgname)\n except IOError:\n print \"FITSWRITE: Unable to write FITS image %s. Stopping.\" %imgname\n \n return", "def wfits(self, filename=None):\n with self.lock:\n dark = self.dark\n if not filename:\n if dark != 0:\n filename = self.getNextFilename(\"dark\")\n else:\n filename = self.getNextFilename(\"object\")\n with self.lock:\n if(self.data.size == 0):\n raise FliError(\"No image available\")\n hdu = pyfits.PrimaryHDU(self.data)\n hdr = hdu.header\n with self.lock:\n hdr.set('DATE', self.timestamp, 'exposure begin date')\n hdr.set('INSTRUME', self.devname, 'this instrument')\n hdr.set('SERIAL', self.devsn, 'serial number')\n hdr.set('EXPTIME', self.exptime, 'exposure time (ms)')\n hdr.set('VBIN', self.vbin, 'vertical binning')\n hdr.set('HBIN', self.hbin, 'horizontal binning')\n hdr.set('CCD-TEMP', self.temp, 'CCD temperature')\n if dark != 0:\n hdr.set('SHUTTER', 'CLOSE', 'shutter status')\n else:\n hdr.set('SHUTTER', 'OPEN', 'shutter status')\n hdr.set('CCDAREA', '[%d:%d,%d:%d]' % self.expArea, 'image area')\n hdu.writeto(filename, overwrite=True, checksum=True)\n with self.lock:\n self.filename = filename", "def export(stars, filepath):\n\timport inspect\n\ttry:\n\t\timport pyfits\n\texcept:\n\t\tprint \"pyfits could not be imported, no problem if you don't need it.\"\n\t\n\tallattributes = set([])\n\tfor s in stars:\n\t\ts.good = s.isgood()\n\t\t\n\t\t\n\t\tattributes = inspect.getmembers(s, lambda a:not(inspect.isroutine(a)))\n\t\tattributes = set([a[0] for a in attributes if not(a[0].startswith('__') and a[0].endswith('__')) and not 'committee' in a[0]])\n\t\t\n\t\tallattributes = attributes.union(allattributes)\n\t\t\n\tcols = [pyfits.Column(name=a, format=\"D\", array=np.array([convert(s, a) for s in stars])) for a in sorted(list(allattributes))]\n\t\n\tcoldefs = pyfits.ColDefs(cols)\n\ttbhdu = pyfits.new_table(coldefs)\n\ttbhdu.writeto(filepath, clobber=True)\n\tprint \"Wrote %s\" % (filepath)", "def save_phot_fits(df, fname):\n keep_col_phot = [\"MJD\", \"FLUXCAL\", \"FLUXCALERR\", \"FLT\"]\n # eliminate repeated rows (-777.0)\n df_phot = df.copy()\n df_phot = df_phot.loc[df_phot[\"FLUXCAL\"].shift() != df_phot[\"FLUXCAL\"]]\n\n if df_phot.MJD.values[-1] == -777.0:\n df_phot = df_phot.drop(df_phot.index[-1])\n if df_phot.MJD.values[0] == -777.0:\n df_phot = df_phot.drop(df_phot.index[0])\n\n mask_seven = df_phot['MJD'] == -777.0\n df_phot.loc[mask_seven, 'SNID'] = 0\n\n df_phot = df_phot.reset_index()\n df_phot_saved = df_phot[keep_col_phot]\n save_fits(df_phot_saved, fname)\n return df_phot", "def write(self, filename, overwrite=True):\n if not overwrite and os.path.exists(filename):\n raise OSError(\"File '%s' already exists.\" % filename)\n\n if self._filename is None:\n # create and write the FITS file from scratch\n prihdu = pyfits.PrimaryHDU(header=self.header)\n prihdu.header['DATE'] = (str(datetime.datetime.now()),\n 'Creation date')\n prihdu.header['AUTHOR'] = ('MPDAF', 'Origin of the file')\n prihdu.header['FORMAT'] = (TABLES_SCHEMA['version'],\n 'Version of the Source format')\n hdulist = pyfits.HDUList([prihdu])\n\n _write_table(self.lines, 'LINES', hdulist)\n _write_table(self.mag, 'MAG', hdulist)\n _write_table(self.z, 'Z', hdulist)\n\n for typ in ('spectra', 'images', 'cubes'):\n for key, obj in getattr(self, typ).items():\n _write_mpdaf_obj(obj, _ATTRIBUTES_TO_EXTNAME[typ], key,\n hdulist)\n\n # tables\n for key, tab in self.tables.items():\n _write_table(tab, 'TAB_%s' % key, hdulist)\n\n # save to disk\n hdulist.writeto(filename, overwrite=True, output_verify='fix')\n else:\n # update the existing FITS file\n if os.path.abspath(filename) != self._filename:\n shutil.copy(self._filename, filename)\n\n with pyfits.open(filename, mode='update') as hdulist:\n hdulist[0].header = self.header\n\n _write_table(self.lines, 'LINES', hdulist)\n _write_table(self.mag, 'MAG', hdulist)\n _write_table(self.z, 'Z', hdulist)\n\n for typ in ('spectra', 'images', 'cubes'):\n obj = getattr(self, typ)\n extname = _ATTRIBUTES_TO_EXTNAME[typ]\n for key in obj.loaded_ext:\n _write_mpdaf_obj(obj[key], extname, key, hdulist)\n for key in obj.deleted_ext:\n _remove_hdu(hdulist, '{}_{}_DATA'.format(extname, key))\n _remove_hdu(hdulist, '{}_{}_STAT'.format(extname, key))\n\n # tables\n for key in self.tables.loaded_ext:\n _write_table(self.tables[key], 'TAB_%s' % key, hdulist)\n for key in self.tables.deleted_ext:\n _remove_hdu(hdulist, 'TAB_%s' % key)\n\n hdulist.flush()", "def create_fits_file(fits, cols, cdata):\n dlist = []\n for k in range(0, len(cols)):\n aent = numpy.array(cdata[k])\n dcol = pyfits.Column(name=cols[k], format='F', array=aent)\n dlist.append(dcol)\n\n dcols = pyfits.ColDefs(dlist)\n tbhdu = pyfits.BinTableHDU.from_columns(dcols)\n\n mcf.rm_files(fits)\n tbhdu.writeto(fits)", "def dump(self, ipoint, isave):\n\n for key, vals in self.resfi.items():\n outName = '{}/{}_{}_{}.hdf5'.format(self.outDir,\n self.dbName, key, self.num)\n if vals is not None:\n # transform to astropy table to dump in hdf5 file\n tab = Table.from_pandas(vals)\n keyhdf = 'metric_{}_{}_{}'.format(self.num, ipoint, isave)\n tab.write(outName, keyhdf, append=True, compression=True)\n\n # reset the metric after dumping\n for metric in self.metricList:\n self.resfi[metric.name] = pd.DataFrame()", "def write_to_file(data, filename):\n fimg = fits.HDUList()\n fimghdu = fits.PrimaryHDU()\n fimghdu.data = data\n fimg.append(fimghdu)\n fimg.writeto(filename, overwrite=True)\n print(' wrote output data to: ', filename)", "def save(self, labpath: str) -> None:\n self._table.to_csv(labpath, index=False)\n print(\"# Save experimental data into {0}\".format(labpath))", "def save_as_hdf5(self, filename):", "def save_to_fits(self, filename, comment=None, overwrite = False):\n\n\n hdu = fits.PrimaryHDU(self.flux)\n hdu.header = self.header\n\n # Update header information\n crval = self.dispersion[0]\n cd = self.dispersion[1]-self.dispersion[0]\n crpix = 1\n\n hdu.header['CRVAL1'] = crval\n hdu.header['CD1_1'] = cd\n hdu.header['CDELT1'] = cd\n hdu.header['CRPIX1'] = crpix\n\n hdu.header['HISTORY'] = '1D spectrum generated with SpecOneD'\n\n if comment:\n hdu.header['HISTORY'] = comment\n\n hdul = fits.HDUList([hdu])\n\n try:\n hdul.writeto(filename, overwrite = overwrite)\n except:\n raise ValueError(\"Spectrum could not be saved. Maybe a file with the same name already exists and overwrite is False\")", "def save(self):\n if os.path.isfile(self.filename): os.remove(self.filename)\n fits.HDUList([self.primary_hdu, self.energs_hdu, self.params_hdu, self.spectra_hdu]).writeto(self.filename)", "def tofits(outfilename, pixelarray, hdr=None, verbose=True):\n # print \"LOGX:: Entering `tofits` method/function in %(__file__)s\" %\n # globals()\n pixelarrayshape = pixelarray.shape\n if verbose:\n print(\"FITS export shape : (%i, %i)\" % (pixelarrayshape[0], pixelarrayshape[1]))\n\n if pixelarray.dtype.name == \"bool\":\n pixelarray = np.cast[\"uint8\"](pixelarray)\n\n if os.path.isfile(outfilename):\n os.remove(outfilename)\n\n if hdr == None: # then a minimal header will be created\n hdu = pyfits.PrimaryHDU(pixelarray.transpose())\n else: # this if else is probably not needed but anyway ...\n hdu = pyfits.PrimaryHDU(pixelarray.transpose(), hdr)\n\n hdu.writeto(outfilename, output_verify='ignore')\n\n if verbose:\n print(\"Wrote %s\" % outfilename)", "def write_file(self, i, path, fout):\n\n test_file = path + '/' + self.output[i]\n # Write file name\n print(test_file, file=fout, end='\\n\\n')\n\n extension = os.path.splitext(test_file)[1]\n if extension == '.fits' or extension == 'FITS':\n import subprocess\n prog = self.bindir + '/fits2ascii.py -i ' + test_file\n output = subprocess.check_output(prog.split(), shell=False)\n data = output.decode()\n else:\n fin = open(test_file, 'r')\n data = fin.read()\n fin.close()\n #fout.write(data)\n print(data, file=fout)\n print(file=fout, end='\\n')", "def write(self, filename=None):\n if filename == None:\n filename = self.ofilename\n\n ofile = open(filename, 'w')\n\n ofile.write('# Susceptibility: %E d(susc): %E Coercivity: %E d(coer): %E\\n' % (self.susceptibility_mean, self.susceptibility_std, self.coercivity_mean, self.coercivity_std) )\n ofile.write('# H[] M[] Mfit[]\\n')\n\n #for i in range(len(self.h)):\n # ofile.write(\" %12.10f %12.10f %12.10f\\n\" % ( self.h[i], self.m[i], self.m_fit[i] ) )\n\n ofile.close()", "def to_fits(self, header=None, energy_unit='TeV', effarea_unit='m2', **kwargs):\n\n self.energy_lo = self.energy_lo.to(energy_unit)\n self.energy_hi = self.energy_hi.to(energy_unit)\n self.effective_area = self.effective_area.to(effarea_unit)\n\n hdu = table_to_fits_table(self.to_table())\n\n if header is None:\n header = hdu.header\n\n # Write FITS extension header\n header['EXTNAME'] = 'SPECRESP', 'Name of this binary table extension'\n header['TELESCOP'] = 'DUMMY', 'Mission/satellite name'\n header['INSTRUME'] = 'DUMMY', 'Instrument/detector'\n header['FILTER'] = 'NONE', 'Filter information'\n header['HDUCLASS'] = 'OGIP', 'Organisation devising file format'\n header['HDUCLAS1'] = 'RESPONSE', 'File relates to response of instrument'\n header['HDUCLAS2'] = 'SPECRESP', 'Effective area data is stored'\n header['HDUVERS '] = '1.1.0', 'Version of file format'\n\n header['PHAFILE'] = ('', 'PHA file for which ARF was produced')\n\n # Obsolete ARF headers, included for the benefit of old software\n header['ARFVERSN'] = '1992a', 'Obsolete'\n header['HDUVERS1'] = '1.0.0', 'Obsolete'\n header['HDUVERS2'] = '1.1.0', 'Obsolete'\n\n for key, value in kwargs.items():\n header[key] = value\n\n header['LO_THRES'] = self.energy_thresh_lo.value\n header['HI_THRES'] = self.energy_thresh_hi.value\n\n prim_hdu = fits.PrimaryHDU()\n hdu.header = header\n return fits.HDUList([prim_hdu, hdu])", "def save_scatter_table(self, fn, description=\"\"):\n data = {\n \"description\": description,\n \"time\": datetime.now(),\n \"psd_scatter\": (self.num_points, self.D_max, self._psd_D, \n self._S_table, self._Z_table, self._angular_table, \n self._m_table, self.geometries),\n \"version\": tmatrix_aux.VERSION\n }\n with open(fn, 'wb') as f:\n pickle.dump(data, f, pickle.HIGHEST_PROTOCOL)", "def write_file(self):\n rl_df, lift_df = self.create_df()\n\n number = re.findall('\\d+', self.url)[0]\n\n if self.write is True:\n with open('house_{}.csv'.format(number), 'w',\n encoding='utf-8-sig') as file:\n rl_df.to_csv(file, sep=';')\n with open('house_lifts_{}.csv'.format(number), 'w',\n encoding='utf-8-sig') as file2:\n lift_df.to_csv(file2, sep=';')", "def write(filename, data, extname=None, extver=None, header=None,\n clobber=False, ignore_empty=False, units=None, table_type='binary',\n names=None, write_bitcols=False, compress=None, tile_dims=None,\n **keys):\n if keys:\n import warnings\n warnings.warn(\n \"The keyword arguments '%s' are being ignored! This warning \"\n \"will be an error in a future version of `fitsio`!\" % keys,\n DeprecationWarning, stacklevel=2)\n\n kwargs = {\n 'clobber': clobber,\n 'ignore_empty': ignore_empty\n }\n with FITS(filename, 'rw', **kwargs) as fits:\n fits.write(data,\n table_type=table_type,\n units=units,\n extname=extname,\n extver=extver,\n compress=compress,\n header=header,\n names=names,\n write_bitcols=write_bitcols,\n tile_dims=tile_dims)", "def write(self, filename, energy_unit='TeV', effarea_unit='m2',\n *args, **kwargs):\n self.to_fits(energy_unit=energy_unit, effarea_unit=effarea_unit).writeto(\n filename, *args, **kwargs)", "def save_table(date, table):\n if os.path.isfile(date+\".table\"):\n file_using = open(date+\".table\", \"w\")\n else:\n return False\n file_using.seek(0)\n file_using.truncate()\n for line in table:\n file_using.write(\"{},{},{},{},{}\\n\".format(line[0], line[1], line[2], line[3], line[4]))\n file_using.close()", "def tofits(self, filename=None):\n robot_array = self.robot_array()\n target_array = self.target_array()\n fitsio.write(filename, robot_array, clobber=True)\n fitsio.write(filename, target_array, clobber=False)\n return", "def write_table(table, file_path):\n\n\twith open(file_path, 'w') as file:\n\t\tfile.write(table)", "def save_tiff(self, to_file=None):\n self.tif_file.clear() # Empty the array first\n\n # Header\n byteo = 'II'\n if self.byteOrder != 'little':\n byteo = 'MM'\n self.tif_file.insert_bytes(list(byteo.encode())) # byte order\n self.tif_file.insert_int(42, 2) # Magic number\n self.tif_file.insert_int(8, 4) # first IFD always at 0x08\n\n for ifd in self.ifds:\n # self.calculateIFDSpace(ifd) # Readjusts counts because of changes to image data\n endpos = self.save_ifd(ifd)\n self.save_image(ifd, endpos)\n\n self.tif_file.write(to_file) # lastly, write to file", "def save_inventory(file_name, table):\r\n with open(file_name, 'w') as objFile:\r\n for cd in table:\r\n objFile.write(cd.saveFormat())\r\n return table", "def writeto(self, output):\n\n hdu = pyfits.PrimaryHDU(data=self.integrated_psf)\n (year, month, day, hour, minute, second, weekday, DOY, DST) = \\\n time.gmtime()\n hdu.header.update(\"DATE\", \"%4d-%02d-%02dT%02d:%02d:%02d\" %\n (year, month, day, hour, minute, second))\n hdu.header.update(\"FILENAME\", os.path.basename(output),\n comment=\"Name of this file\")\n hdu.header.update(\"INSTRUME\", self.instrument, \"Instrument name\")\n\n # Copy some specific keywords from the input header.\n ihdr = self.header\n if \"BUNIT\" in ihdr:\n hdu.header.update(\"BUNIT\", ihdr.get(\"BUNIT\"))\n if \"ERR_BUDG\" in ihdr:\n hdu.header.update(\"ERR_BUDG\", ihdr.get(\"ERR_BUDG\"),\n comment=\"Optical error budget version number\")\n if \"SI_FP\" in ihdr:\n hdu.header.update(\"SI_FP\", ihdr.get(\"SI_FP\"),\n comment=\"Focal plane for OPD calculation\")\n if \"OPD_WFE\" in ihdr:\n hdu.header.update(\"OPD_WFE\", ihdr.get(\"OPD_WFE\"),\n comment=\"OPD wavefront error (nm)\")\n if \"W\" in ihdr:\n hdu.header.update(\"W\", ihdr.get(\"W\"),\n comment=\"Flat width of hex segment (m)\")\n if \"GAP\" in ihdr:\n hdu.header.update(\"GAP\", ihdr.get(\"GAP\"),\n comment=\"Gap width between hex segments (m)\")\n if \"EDGE\" in ihdr:\n hdu.header.update(\"EDGE\", ihdr.get(\"EDGE\"),\n comment=\"Edge roll off (m)\")\n if \"SW\" in ihdr:\n hdu.header.update(\"SW\", ihdr.get(\"SW\"),\n comment=\"Obscuring strut width (m)\")\n if \"HTS\" in ihdr:\n hdu.header.update(\"HTS\", ihdr.get(\"HTS\"),\n comment=\"Height of segment isogrid\")\n if \"HT2\" in ihdr:\n hdu.header.update(\"HT2\", ihdr.get(\"HT2\"),\n comment=\"Height of secondary isogrid\")\n if \"HT3\" in ihdr:\n hdu.header.update(\"HT3\", ihdr.get(\"HT3\"),\n comment=\"Height of tertiary isogrid\")\n if \"FL\" in ihdr:\n hdu.header.update(\"FL\", ihdr.get(\"FL\"),\n comment=\"Focal length (m)\")\n\n # Add some keywords.\n if self.phase_file is not None:\n hdu.header.update(\"PHASE\", os.path.basename(self.phase_file),\n \"Name of phase image file\")\n if self.pupil_file is not None:\n hdu.header.update(\"PUPIL\", os.path.basename(self.pupil_file),\n \"Name of pupil image file\")\n hdu.header.update(\"OVERSAMP\", self.oversample, \"Oversampling factor\")\n hdu.header.update(\"CALCTYPE\", self.type,\n \"32 = single precision, 64 = double precision\")\n hdu.header.update(\"DIAMETER\", self.D, \"pupil diameter (meters)\")\n hdu.header.update(\"ORIG_NX\", self.header[\"naxis1\"],\n \"NAXIS1 in input image\")\n hdu.header.update(\"ORIG_NY\", self.header[\"naxis2\"],\n \"NAXIS2 in input image\")\n\n self.putCoordInfo(hdu)\n\n (wavelengths, weights) = self.filter\n if len(wavelengths) >= 99:\n root_wln = \"WAV\"\n root_wgt = \"WGT\"\n else:\n root_wln = \"WAVELN\"\n root_wgt = \"WEIGHT\"\n for i in range(len(wavelengths)):\n keyword = \"%s%d\" % (root_wln, i + 1)\n hdu.header.update(keyword, wavelengths[i],\n \"wavelength in microns\")\n keyword = \"%s%d\" % (root_wgt, i + 1)\n hdu.header.update(keyword, weights[i], \"weight\")\n\n ofd = pyfits.HDUList(hdu)\n try:\n ofd.writeto(output)\n except IOError as message:\n print(\"ERROR: Output file has NOT been written; \" \\\n \"use <psf>.writeto(output)\")\n print(message)\n return\n self.output_written = True", "def writeto(self, filename, overwrite=True, **kwargs):\n from astropy.io import fits\n return fits.writeto( filename, self.data, header=self.header,\n overwrite=overwrite, **kwargs)", "def file_table(list_observations, indir, informat, outfile):\n print('Creating file summary table ...')\n\n # We gather all infos in a list of dicts and write this\n # as a FITS table at the end.\n # for documentation see http://gamma-astro-data-formats.readthedocs.org/en/latest/data_storage/hdu_index/index.html\n\n HDU_CLASS_TAGS = dict(\n events='events',\n aeff='aeff_2d',\n edisp='edisp_2d',\n psf_3gauss='psf_3gauss',\n psf_king='psf_king',\n psf_table='psf_table',\n gti='gti'\n )\n\n rows = []\n for obs in list_observations.observations:\n testfile=obs.out_filename(\"events\", format=informat, dir=indir)\n try:\n table = Table.read(str(testfile), hdu='EVENTS')\n except Exception:\n print \"fits corrupted for file \"+str(filename)\n continue\n #for filetype in ['events', 'aeff', 'edisp', 'psf_3gauss']:\n #for filetype in ['events']:\n for filetype in ['events', 'aeff', 'edisp', 'psf_3gauss']:\n filename = obs.out_filename(filetype, format=informat, dir=indir)\n\n if filename.is_file():\n print('Processing {}'.format(filename))\n\n data = dict()\n\n # OBS_ID\n data['OBS_ID'] = obs.obs_id\n\n # HDU_TYPE\n if filetype in ('psf_3gauss'):\n data['HDU_TYPE'] = 'psf'\n else:\n data['HDU_TYPE'] = str(filetype)\n\n # HDU_CLASS\n data['HDU_CLASS'] = HDU_CLASS_TAGS[filetype]\n\n # FILE_DIR (relative path)\n data['FILE_DIR'] = str(os.path.relpath(str(obs.out_filename(filetype).parent), str(Path(outfile).parent)))\n\n # FILE_NAME\n data['FILE_NAME'] = str(obs.filename(filetype, format=informat).parts[-1])\n\n # HDU-INFOS\n hdu_list = fits.open(str(filename))\n hdu = hdu_list[1]\n header = hdu.header\n data['HDU_NAME'] = hdu.name\n\n # FILE-INFOS\n stat = filename.stat()\n data['SIZE'] = stat.st_size\n data['MTIME'] = stat.st_mtime\n data['MD5'] = hashlib.md5(filename.open('rb').read()).hexdigest()\n\n # if 'HDUCLAS2' in header:\n # data['HDUCLASS'] = header['HDUCLAS2']\n # else:\n # data['HDUCLASS'] = 'EVENTS'\n\n # if its the events-file, use a second dict for the gti-hdu\n if filetype == 'events':\n data_gti = dict()\n data_gti['OBS_ID'] = obs.obs_id\n data_gti['HDU_TYPE'] = 'gti'\n data_gti['HDU_CLASS'] = 'gti'\n data_gti['FILE_DIR'] = data['FILE_DIR']\n data_gti['FILE_NAME'] = data['FILE_NAME']\n data_gti['HDU_NAME'] = hdu_list[2].name\n data_gti['SIZE'] = data['SIZE']\n data_gti['MTIME'] = data['MTIME']\n data_gti['MD5'] = data['MD5']\n\n rows.append(data_gti)\n\n rows.append(data)\n hdu_list.close()\n\n else:\n print('File not found: {}'.format(filename))\n\n names = [\n 'OBS_ID', 'HDU_TYPE', 'HDU_CLASS',\n 'FILE_DIR', 'FILE_NAME', 'HDU_NAME',\n 'SIZE', 'MTIME', 'MD5'\n ]\n table = Table(rows=rows, names=names)\n\n print('Writing {}'.format(outfile))\n table.write(str(outfile), overwrite=True)\n # add hdu name\n hdulist = fits.open(str(outfile), mode='update')\n hdulist[1].name = 'HDU_INDEX'\n hdulist.close()", "def save_to_fits(self, fits_file_name, data='image', overwrite=True):\n if data == 'image':\n data_use = self.image\n elif data == 'mask':\n data_use = self.mask\n else:\n raise ValueError('Data can only be \"image\" or \"mask\".')\n img_hdu = fits.PrimaryHDU(data_use)\n\n if self.header is not None:\n img_hdu.header = self.header\n if self.wcs is not None:\n wcs_header = self.wcs.to_header()\n import fnmatch\n for i in wcs_header:\n if i in self.header:\n self.header[i] = wcs_header[i]\n if fnmatch.fnmatch(i, 'PC?_?'):\n self.header['CD' + i.lstrip(\"PC\")] = wcs_header[i]\n img_hdu.header = self.header\n elif self.wcs is not None:\n wcs_header = self.wcs.to_header()\n img_hdu.header = wcs_header\n else:\n img_hdu = fits.PrimaryHDU(data_use)\n\n if os.path.islink(fits_file_name):\n os.unlink(fits_file_name)\n\n img_hdu.writeto(fits_file_name, overwrite=overwrite)\n return img_hdu", "def write(self, tofile=None):\n if tofile is None:\n tofile = self._filename[:-4]+\"_tifinity.tiff\"\n\n with open(tofile, 'wb') as out_file:\n self._tiff.tofile(out_file) # numpy.tofile()", "def save_filter(self, filename, overwrite=False):\n hdu = fits.PrimaryHDU(self.filter, self.header)\n hdu.writeto(filename, clobber=overwrite)\n fits.append(filename, self.approx, self.header)\n fits.append(filename, self.filter + self.approx, self.header)\n fits.append(filename, self.max_scale_image(), self.header)", "def save(self, name='stats.fits', comment='', overwrite=True):\n hdus = astropy.io.fits.HDUList()\n header = astropy.io.fits.Header()\n header['TILES'] = self.tiles.tiles_file\n header['START'] = self.start_date.isoformat()\n header['STOP'] = self.stop_date.isoformat()\n header['COMMENT'] = comment\n header['EXTNAME'] = 'STATS'\n hdus.append(astropy.io.fits.PrimaryHDU())\n hdus.append(astropy.io.fits.BinTableHDU(self._data, header=header, name='STATS'))\n config = desisurvey.config.Configuration()\n fullname = config.get_path(name)\n hdus.writeto(fullname, overwrite=overwrite)\n log = desiutil.log.get_logger()\n log.info('Saved stats to {}'.format(fullname))\n if comment:\n log.info('Saved with comment \"{}\".'.format(header['COMMENT']))", "def save_calib_data(self):\r\n \r\n #get data to save\r\n x0 = self.ui.x0.value()\r\n x1 = self.ui.x1.value()\r\n y0 = self.ui.y0.value()\r\n y1 = self.ui.y1.value()\r\n \r\n directory, fileName = os.path.split(self.ui.imagePath.text())\r\n nofpixels = (max(x0,x1)-min(x0,x1))*(max(y0,y1)-min(y0,y1))\r\n\r\n #create a list with everything to be saved \r\n #(fast way in Python to build a string)\r\n strList = [self.ui.filmNumber.text(),#indentifier\r\n fileName, #file name\r\n \"{:d}\".format(x0),#coordinates\r\n \"{:d}\".format(y0),\r\n \"{:d}\".format(x1),\r\n \"{:d}\".format(y1),\r\n \"{:d}\".format(nofpixels)] \r\n \r\n \r\n #save the channel data\r\n for channel in [0,1,2]:\r\n avg = np.average(self.npImg[y0:y1,x0:x1,channel])\r\n std = np.std(self.npImg[y0:y1,x0:x1,channel])\r\n strList.append(\"{:.3f}\".format(avg))\r\n strList.append(\"{:.3f}\".format(std))\r\n\r\n #concatenate the list, using tab as a seperator\r\n saveStr = '\\t'.join(strList)+\"\\n\"\r\n \r\n self.saveTablePath = self.check_save_table_path(self.ui.saveTablePath.text())\r\n \r\n if self.saveTablePath == \"\":\r\n logging.error(\"no valid file selected, nothing written\")\r\n else:\r\n with open(self.saveTablePath,\"a\") as saveTable:\r\n saveTable.write(saveStr)\r\n logging.info((\"info for \"+self.ui.filmNumber.text()+\" written to file\"))", "def save_indicator(table, target_path, var_name, geo):\n table.to_csv(f\"{target_path}/{var_name}.{geo}.csv\", index=False)", "def save(self, inst):\n n = inst.dimensions[\"n\"]\n with open(self.location, \"wt\") as f:\n f.write(f\"measurements: {n}\\n\")\n f.write(f\"time temperature\\n\")\n for time, temp in zip(inst.time, inst.temperature):\n f.write(f\"{time:4} {temp:12}\\n\")", "def save(self, outputfile, normalize_output=False):\n filelib.make_folders([os.path.dirname(outputfile)])\n if not outputfile.endswith('.tif'):\n outputfile += '.tif'\n with warnings.catch_warnings():\n warnings.simplefilter(\"ignore\")\n if normalize_output:\n io.imsave(outputfile, rescale_intensity(self.image, out_range=(0, 255)).astype(np.uint8))\n else:\n io.imsave(outputfile, self.image.astype(np.uint32))\n self.metadata.to_csv(outputfile[:-4] + '.csv', sep='\\t', header=False)\n self.filename = outputfile", "def write_tab(gdf, tab_name, crs_wkt=WKT_SWISS, index=None):\n\n\t\tgdf.crs = WKT_SWISS\n\t\t\n\t\t# fiona can't write integer columns, convert columns to float\n\t\tfor col in gdf.columns:\n\t\t\tstype = str(gdf[col].dtype)\n\t\t\tif stype.startswith('int'):\n\t\t\t\tgdf[col] = gdf[col].astype(float)\n\t\t\t\t\n\t\t# fiona can't write integer columns, convert index to float\n\t\tstype = str(gdf.index.dtype)\n\t\tif stype.startswith('int'):\n\t\t\tgdf.index = gdf.index.astype(float)\n\n\t\tgdf.to_file(tab_name,driver='MapInfo File', index=index) \n\t\treturn print(len(gdf), 'row(s) written to mapinfo file.')", "def save(self, fn):\n plt.imsave(fn, self.image)", "def write_to_file(self, overwrite=True):\n t0 = time.time()\n self.hdus.verify()\n if BACKEND == 'astropy':\n self.hdus.writeto(self.filename, overwrite=overwrite)\n elif BACKEND == 'pyfits':\n self.hdus.writeto(self.filename, clobber=overwrite)\n self.logger.debug(\"Took {:.4f} seconds to write to disk\".format(time.time() - t0))", "def save_file(self):\n # paginate over deputies and senators getting their fields\n fieldnames = set([])\n congressmen = self.deputies + self.senators\n for data in congressmen:\n fieldnames = fieldnames.union(data.dump().keys())\n\n\n with open(IDENTITY_FILE_UPDATED, 'a') as csvfile:\n writer = csv.DictWriter(csvfile, fieldnames=list(fieldnames), delimiter=';')\n writer.writeheader()\n\n for data in congressmen:\n writer.writerow(data.dump())", "def file_table(list_observations, indir, informat, outfile):\n print('Creating file summary table ...')\n\n # We gather all infos in a list of dicts and write this\n # as a FITS table at the end.\n # for documentation see http://gamma-astro-data-formats.readthedocs.org/en/latest/data_storage/hdu_index/index.html\n\n HDU_CLASS_TAGS = dict(\n events='events',\n aeff='aeff_2d',\n edisp='edisp_2d',\n psf_3gauss='psf_3gauss',\n psf_king='psf_king',\n psf_table='psf_table',\n gti='gti'\n )\n\n rows = []\n for obs in list_observations.observations:\n events_filename = Path(indir) / obs.filename('events', format=informat)\n try:\n table = Table.read(str(events_filename), hdu='EVENTS')\n except Exception:\n print \"fits corrupted for file \" + str(events_filename)\n continue\n if table.meta[\"OBS_ID\"]!=obs.obs_id:\n continue\n # for filetype in ['events', 'aeff', 'edisp', 'psf_3gauss']:\n # for filetype in ['events']:\n #for filetype in ['events', 'aeff', 'edisp', 'psf_3gauss']:\n for filetype in ['events', 'aeff', 'edisp', 'psf_table']:\n filename = Path(indir) / obs.filename(filetype, format=informat)\n\n if filename.is_file():\n print('Processing {}'.format(filename))\n\n data = dict()\n\n # OBS_ID\n data['OBS_ID'] = obs.obs_id\n\n # HDU_TYPE\n if filetype in ('psf_3gauss'):\n data['HDU_TYPE'] = 'psf'\n elif filetype in ('psf_table'):\n data['HDU_TYPE'] = 'psf'\n else:\n data['HDU_TYPE'] = str(filetype)\n\n # HDU_CLASS\n data['HDU_CLASS'] = HDU_CLASS_TAGS[filetype]\n\n # FILE_DIR (relative path)\n data['FILE_DIR'] = str(\n os.path.relpath(str(obs.out_filename(filetype).parent), str(Path(outfile).parent)))\n\n # FILE_NAME\n data['FILE_NAME'] = str(obs.filename(filetype, format=informat).parts[-1])\n\n # HDU-INFOS\n hdu_list = fits.open(str(filename))\n hdu = hdu_list[1]\n header = hdu.header\n data['HDU_NAME'] = hdu.name\n\n # FILE-INFOS\n stat = filename.stat()\n data['SIZE'] = stat.st_size\n data['MTIME'] = stat.st_mtime\n data['MD5'] = hashlib.md5(filename.open('rb').read()).hexdigest()\n\n # if 'HDUCLAS2' in header:\n # data['HDUCLASS'] = header['HDUCLAS2']\n # else:\n # data['HDUCLASS'] = 'EVENTS'\n\n # if its the events-file, use a second dict for the gti-hdu\n if filetype == 'events':\n data_gti = dict()\n data_gti['OBS_ID'] = obs.obs_id\n data_gti['HDU_TYPE'] = 'gti'\n data_gti['HDU_CLASS'] = 'gti'\n data_gti['FILE_DIR'] = data['FILE_DIR']\n data_gti['FILE_NAME'] = data['FILE_NAME']\n data_gti['HDU_NAME'] = hdu_list[2].name\n data_gti['SIZE'] = data['SIZE']\n data_gti['MTIME'] = data['MTIME']\n data_gti['MD5'] = data['MD5']\n\n rows.append(data_gti)\n\n rows.append(data)\n hdu_list.close()\n\n else:\n print('File not found: {}'.format(filename))\n\n names = [\n 'OBS_ID', 'HDU_TYPE', 'HDU_CLASS',\n 'FILE_DIR', 'FILE_NAME', 'HDU_NAME',\n 'SIZE', 'MTIME', 'MD5'\n ]\n\n table = Table(rows=rows, names=names)\n\n print('Writing {}'.format(indir + \"/\" + str(outfile)))\n table.write(indir + \"/\" + str(outfile), overwrite=True)\n # add hdu name\n hdulist = fits.open(indir + \"/\" + str(outfile), mode='update')\n hdulist[1].name = 'HDU_INDEX'\n hdulist.close()", "def save_spi3d(self):\n lut = self.generate_lut()\n file_path = os.path.join(self.output, self.name)\n file_io.save_file(lut, file_path)", "def save(self, path, filename=None, overwrite=False):\n \n if filename is None and self.metadata is None:\n raise ValueError(\"If the image has no 'metadata', you must specify a filename\")\n elif filename is not None:\n pass\n elif filename is None and self.metadata is not None:\n filename = os.path.basename(self.metadata[\"pfilename\"])\n \n full_image_path = os.path.join(path, filename)\n \n if overwrite and os.path.exists(full_image_path):\n os.remove(full_image_path)\n \n self.fits.writeto(full_image_path)", "def save_to_file(self, file_name):\n from ligo.skymap.io.fits import write_sky_map\n\n check_file_exists_and_rename(file_name)\n kwargs = {}\n if self.meta_data is not None:\n kwargs = self.meta_data\n write_sky_map(file_name, self, **kwargs)", "def write_features(self):\r\n def pack_keypoint(keypoints, descriptors):\r\n kpts = np.array([[kp.pt[0], kp.pt[1], kp.size,\r\n kp.angle, kp.response, kp.octave,\r\n kp.class_id]\r\n for kp in keypoints])\r\n desc = np.array(descriptors)\r\n return kpts, desc\r\n\r\n filename = self.features_path + self.id\r\n kpts, desc = pack_keypoint(self.keypoints, self.descriptors)\r\n logging.info(f'Writing features of image {self.name} to file...')\r\n np.savez(filename, keypoints=kpts, descriptors=desc)\r\n logging.info('Features saved.')", "def write_map(self, file_name):\n\n if self.pixel == \"HEALPIX\":\n hp.fitsfunc.write_map(file_name, self.data, overwrite=True)\n if self.pixel == \"CAR\":\n enmap.write_map(file_name, self.data)", "def run(self):\r\n #print 'WriteFITS.run'\r\n\r\n # construct the name of the file\r\n runtime = self.previous_results['runtime']\r\n fitsname = '%s.fits' % runtime\r\n\r\n # get list of instrument observations\r\n observe = self.previous_results['observe']\r\n obs_timeline = observe['observed_timeline']\r\n observed_times = obs_timeline.keys()\r\n observed_times.sort()\r\n\r\n # construct lists of the values to be stored in each Table column\r\n for t in observed_times:\r\n timelist = []\r\n smec_position = []\r\n smec_nominal_position = []\r\n flag = []\r\n data = []\r\n pointing1_x = []\r\n pointing1_y = []\r\n pointing2_x = []\r\n pointing2_y = []\r\n\r\n config = obs_timeline[t]\r\n\r\n timelist.append(config.time)\r\n smec_position.append(config.smec_position)\r\n smec_nominal_position.append(config.smec_nominal_position)\r\n flag.append(config.flag)\r\n data.append(config.data)\r\n pointing1_x.append(config.pointing1_x)\r\n pointing1_y.append(config.pointing1_y)\r\n pointing2_x.append(config.pointing2_x)\r\n pointing2_y.append(config.pointing2_y)\r\n\r\n # create a Header object and primary HDU - this just contains\r\n # some very basic, general information\r\n prihdr = pyfits.Header()\r\n prihdr['COMMENT'] = 'This FITS file was created by pyfiins at %s' % \\\r\n runtime\r\n prihdu = pyfits.PrimaryHDU(header=prihdr)\r\n\r\n # create list of Header Data Unit objects, include the primary HDU\r\n hdulist = pyfits.HDUList([prihdu])\r\n\r\n # create an HDU to contain the Table and append it to the list\r\n hdulist.append(pyfits.BinTableHDU.from_columns(\r\n pyfits.ColDefs([\r\n pyfits.Column(name='Time', format='D',\r\n array=np.array(timelist)),\r\n pyfits.Column(name='SMEC Position', format='E',\r\n array=np.array(smec_position)),\r\n pyfits.Column(name='SMEC Nominal Position', format='E',\r\n array=np.array(smec_nominal_position)),\r\n pyfits.Column(name='Flag', format='L',\r\n array=np.array(flag)),\r\n pyfits.Column(name='Data', format='E',\r\n array=np.array(data)),\r\n pyfits.Column(name='Pointing1 X', format='E',\r\n array=np.array(pointing1_x)),\r\n pyfits.Column(name='Pointing1 Y', format='E',\r\n array=np.array(pointing1_y)),\r\n pyfits.Column(name='Pointing2 X', format='E',\r\n array=np.array(pointing2_x)),\r\n pyfits.Column(name='Pointing2 Y', format='E',\r\n array=np.array(pointing2_y))])))\r\n\r\n # write the HDU list to a file\r\n hdulist.writeto(fitsname, clobber=True)\r\n self.result['fitsfile'] = fitsname\r\n\r\n return self.result", "def save(self, filename):\n try:\n import PIL\n except ImportError:\n raise RuntimeError('Could not import PIL. PIL (pillow) is required to save fresnel images.')\n else:\n if self._output is None:\n self.render()\n image = PIL.Image.fromarray(self._output[:], mode='RGBA')\n image.save(filename)", "def store(self, dataFrame, filename):\n columns = [\"longitude\", \"latitude\", \"elevation\", \"noise_mean_day\", \"noise_mean_evening\", \"noise_mean_night\", \"noise_weighted_24h\", \"noise_mean_24h\"]\n self.store_in_csv(dataFrame, filename=filename, columns=columns)\n\n columns.insert(0, \"id\") # pandas adds a id in the front\n self.store_in_database(filename=filename, columns=columns)", "def save_spi3d(self):\n for filename, colormap in colors.colormaps.items():\n if self.test:\n self.print_colormap(filename, colormap)\n lut = self.generate_spi3d_from_colormap(colormap)\n file_path = os.path.join(self.output, filename)\n file_io.save_file(lut, file_path)\n\n for filename, ev_colormap in colors.ev_colormaps.items():\n if self.test:\n self.print_colormap(filename, ev_colormap)\n lut = self.generate_spi3d_from_evs(ev_colormap)\n file_path = os.path.join(self.output, filename)\n file_io.save_file(lut, file_path)", "def run(self):\r\n #print 'WriteFITS_IDI.run'\r\n\r\n # construct the name of the file\r\n readfits = self.previous_results['readfits']\r\n obs_date = readfits['obs date']\r\n idifitsfile = '%s.idi.fits' % obs_date\r\n\r\n configxml = 'firi.xml'\r\n\r\n # midnight on date to Julian day\r\n obs_date_midnight = astro_time.Time('%s-%s-%sT00:00:00' %\r\n (obs_date[:4], obs_date[4:6], obs_date[6:8]), format='isot')\r\n obs_date_midnight = obs_date_midnight.jd\r\n\r\n rdate = astro_time.Time(obs_date_midnight, format='jd',\r\n out_subfmt='date')\r\n rdate = rdate.iso\r\n\r\n # number of days after midnight at obs start\r\n obs_date_time = astro_time.Time('%s-%s-%s:%s:%s' %\r\n (obs_date[:4], obs_date[4:6], obs_date[6:11], obs_date[11:13],\r\n obs_date[13:]), format='isot')\r\n obs_date_time = obs_date_time.jd - obs_date_midnight\r\n\r\n # get specific items from the results that will be need in\r\n # the reduction\r\n reduce_interferogram = self.previous_results['reduceinterferogram']\r\n data_quality = reduce_interferogram['data_quality']\r\n scan_uvspectra = reduce_interferogram['scan_uvspectra']\r\n\r\n wavenumber = scan_uvspectra[0].wavenumber\r\n\r\n # construct lists of the values to be stored in each Table column\r\n n_uvspectra = max(scan_uvspectra.keys()) + 1\r\n mcomplex = 3\r\n mstokes = 1\r\n mfreq = len(wavenumber)\r\n mra = 1\r\n mdec = 1\r\n\r\n uv_data = np.zeros([n_uvspectra, mdec, mra, mfreq, mstokes, mcomplex])\r\n u = np.zeros([n_uvspectra])\r\n v = np.zeros([n_uvspectra])\r\n w = np.zeros([n_uvspectra])\r\n dates = np.zeros([n_uvspectra])\r\n times = np.zeros([n_uvspectra])\r\n baselines = np.zeros([n_uvspectra], dtype=np.int)\r\n freqid = np.ones([n_uvspectra], dtype=np.int)\r\n\r\n for k,val in scan_uvspectra.items():\r\n uv_data[k,0,0,:,0,0] = val.spectrum.real\r\n uv_data[k,0,0,:,0,1] = val.spectrum.imag\r\n uv_data[k,0,0,:,0,2] = np.ones(val.spectrum.real.shape)\r\n u[k] = np.mean(val.baseline_x)\r\n v[k] = np.mean(val.baseline_y)\r\n w[k] = np.mean(val.baseline_z)\r\n dates[k] = obs_date_midnight\r\n times[k] = obs_date_time + (np.mean(val.time) / (3600 * 24))\r\n baselines[k] = 258\r\n\r\n # external_params is referred to inside config.xml and can be\r\n # used to set parameters there\r\n light_speed = constants.c.to('m/s').value\r\n external_params = {'NCHAN':len(wavenumber),\r\n 'RDATE':rdate,\r\n 'REF_FREQ':0.0 * 100 * light_speed,\r\n 'CHAN_BW':np.abs(wavenumber[1] - wavenumber[0]) * \\\r\n 100 * light_speed}\r\n\r\n print \"Out: %s\\nConfig: %s\"%(idifitsfile, configxml)\r\n\r\n print('\\nConfiguring Array geography')\r\n print('--------------------------')\r\n # Meaningless numbers, hopefully not needed by any CASA method \r\n # that we want to use\r\n (latitude, longitude, elevation) = ('00:00:00.00', '00:00:00.00', 0)\r\n now = datetime.datetime.now()\r\n\r\n # Make ourselves an Array (pyEphem observer)\r\n array_geometry_m = np.array([\r\n [0.0, 0.0, 0.0],\r\n [0.0, 80.0, 0.0]], dtype = 'float32')\r\n beach = Array(lat=latitude, long=longitude, elev=elevation, date=now,\r\n antennas=array_geometry_m)\r\n\r\n print('\\nConfiguring phase source')\r\n print('--------------------------')\r\n # The source is our phase centre for UVW coordinates\r\n line = \"%s,f,%s,%s,%s,%d\" % ('Deep Space', '00:00:00',\r\n '00:00:00', '1', 2000)\r\n source = ephem.readdb(line)\r\n source.compute(beach)\r\n print \"Name: %s \\nRA: %s \\nDEC: %s\"%(source.name, source.ra, source.dec)\r\n\r\n # Make a new blank FITS HDU\r\n print('\\nCreating PRIMARY HDU')\r\n print('------------------------------------')\r\n hdu = make_primary(config=configxml, external_params=external_params)\r\n print repr(hdu.header)\r\n\r\n # Go through and generate required tables\r\n print('\\nCreating ARRAY_GEOMETRY')\r\n print('------------------------------------')\r\n tbl_array_geometry = make_array_geometry(config=configxml, num_rows=2,\r\n external_params=external_params)\r\n tbl_array_geometry = config_array_geometry(tbl_array_geometry,\r\n array_geometry_m)\r\n print repr(tbl_array_geometry.header)\r\n\r\n print('\\nCreating FREQUENCY')\r\n print('------------------------------------')\r\n tbl_frequency = make_frequency(config=configxml, num_rows=1,\r\n external_params=external_params)\r\n tbl_frequency = config_frequency(tbl_frequency,\r\n external_params=external_params)\r\n print repr(tbl_frequency.header)\r\n\r\n print('\\nCreating SOURCE')\r\n print('------------------------------------')\r\n tbl_source = make_source(config=configxml, num_rows=1,\r\n external_params=external_params)\r\n tbl_source = config_source(tbl_source, source)\r\n print repr(tbl_source.header)\r\n\r\n print('\\nCreating ANTENNA')\r\n print('------------------------------------')\r\n tbl_antenna = make_antenna(config=configxml, num_rows=2,\r\n external_params=external_params)\r\n tbl_antenna = config_antenna(tbl_antenna)\r\n print repr(tbl_antenna.header)\r\n\r\n print('\\nCreating UV_DATA')\r\n print('------------------------------------')\r\n\r\n print 'Data dimensions: %i dumps, %i chans, %i pols, %i data' % (\r\n n_uvspectra, mfreq, mstokes, mcomplex)\r\n\r\n print('Generating blank UV_DATA rows...')\r\n tbl_uv_data = make_uv_data(config=configxml, num_rows=n_uvspectra,\r\n external_params=external_params)\r\n\r\n timesorted = np.argsort(times)\r\n\r\n for k in timesorted:\r\n tbl_uv_data.data[k]['FLUX'] = uv_data[k,0,0,:,0,:].ravel()\r\n tbl_uv_data.data[k]['UU'] = u[k] / light_speed\r\n tbl_uv_data.data[k]['VV'] = v[k] / light_speed\r\n tbl_uv_data.data[k]['WW'] = w[k] / light_speed\r\n tbl_uv_data.data[k]['BASELINE'] = baselines[k]\r\n tbl_uv_data.data[k]['DATE'] = dates[k]\r\n tbl_uv_data.data[k]['TIME'] = times[k]\r\n tbl_uv_data.data[k]['SOURCE'] = 1\r\n tbl_uv_data.data[k]['FREQID'] = 1\r\n tbl_uv_data.data[k]['INTTIM'] = 3\r\n\r\n print repr(tbl_uv_data.header)\r\n \r\n hdulist = pyfits.HDUList(hdus=\r\n [hdu,\r\n tbl_array_geometry,\r\n tbl_source, \r\n tbl_frequency,\r\n tbl_antenna,\r\n tbl_uv_data])\r\n\r\n print('Verifying integrity...') \r\n hdulist.verify()\r\n \r\n if(os.path.isfile(idifitsfile)):\r\n print('Removing existing file...')\r\n os.remove(idifitsfile)\r\n print('Writing to file...')\r\n hdulist.writeto(idifitsfile)\r\n\r\n print('Done.')\r\n\r\n self.result['idifitsfile'] = idifitsfile\r\n\r\n return self.result", "def write(data: orm.Data, filename: str) -> None:\n save(to_bands_inspect(data), hdf5_file=filename)", "def save(self, ofilename, oname, noisy_only = True):\n ofile = ROOT.TFile(ofilename, 'recreate')\n\n outhists = [h.Clone(oname % (i + 1)) for i, h in enumerate(self.modules)]\n for h, cells in zip(outhists, self.cells):\n if noisy_only: h.Reset()\n for cell in cells: h.SetBinContent(cell[0], cell[1], noisy_only * 1.)\n # h.Write()\n\n ofile.Write()\n ofile.Close()", "def save(self, filename, theory, substrate=None, surface=None):\n fresnel_calculator = self.fresnel(substrate, surface)\n Q, FQ = self.apply_beam(self.calc_Q, fresnel_calculator(self.calc_Q))\n Q, R = theory\n if len(Q) != len(self.Q):\n # Saving interpolated data\n A = np.array((Q, R, np.interp(Q, self.Q, FQ)))\n header = (\"# %17s %20s %20s\\n\"\n % (\"Q (1/A)\", \"theory\", \"fresnel\"))\n elif getattr(self, 'R', None) is not None:\n A = np.array((self.Q, self.dQ, self.R, self.dR,\n R, FQ))\n header = (\"# %17s %20s %20s %20s %20s %20s\\n\"\n % (\"Q (1/A)\", \"dQ (1/A)\", \"R\", \"dR\", \"theory\", \"fresnel\"))\n else:\n A = np.array((self.Q, self.dQ, R, FQ))\n header = (\"# %17s %20s %20s %20s\\n\"\n % (\"Q (1/A)\", \"dQ (1/A)\", \"theory\", \"fresnel\"))\n\n header = (\"# intensity: %.15g\\n# background: %.15g\\n\"\n % (self.intensity.value, self.background.value)) + header\n\n with open(filename, \"wb\") as fid:\n #print(\"saving\", A)\n fid.write(asbytes(header))\n np.savetxt(fid, A.T, fmt=\"%20.15g\")", "def write(self, data, units=None, extname=None, extver=None,\n compress=None, tile_dims=None, header=None, names=None,\n table_type='binary', write_bitcols=False, **keys):\n\n if keys:\n import warnings\n warnings.warn(\n \"The keyword arguments '%s' are being ignored! This warning \"\n \"will be an error in a future version of `fitsio`!\" % keys,\n DeprecationWarning, stacklevel=2)\n\n isimage = False\n if data is None:\n isimage = True\n elif isinstance(data, numpy.ndarray):\n if data.dtype.fields == None: # noqa - probably should be is None\n isimage = True\n\n if isimage:\n self.write_image(data, extname=extname, extver=extver,\n compress=compress, tile_dims=tile_dims,\n header=header)\n else:\n self.write_table(data, units=units,\n extname=extname, extver=extver, header=header,\n names=names,\n table_type=table_type,\n write_bitcols=write_bitcols)", "def save_table(data, out_file):\n logging.info(\"Saving table\")\n #header, data = data\n #out = pd.DataFrame(data=data, columns = header.keys())\n joblib.dump(data, out_file)", "def write_to(self, fname, **kwargs):\n data = self.to_Table()\n data.write(fname, **kwargs)", "def tiffwrite(filename, im):\n tf.imwrite(filename, im)", "def testPersistence(self):\n exposure = lsst.afw.image.ExposureF(1, 1)\n exposure.setPsf(self.psf)\n self.assertIsNotNone(exposure.getPsf())\n with lsst.utils.tests.getTempFilePath(\".fits\") as filename:\n exposure.writeFits(filename)\n copy = lsst.afw.image.ExposureF(filename).getPsf()\n self.assertIsNotNone(copy)\n self.assertIsInstance(copy, GaussianOversampledPsf)\n self.assertGaussianOversampledPsfEqual(copy, self.psf)", "def to_fits(self):\n # Set up data\n names = [\n \"ENERG_LO\",\n \"ENERG_HI\",\n \"THETA_LO\",\n \"THETA_HI\",\n \"RAD_LO\",\n \"RAD_HI\",\n \"RPSF\",\n ]\n units = [\"TeV\", \"TeV\", \"deg\", \"deg\", \"deg\", \"deg\", \"sr^-1\"]\n data = [\n self.energy_lo,\n self.energy_hi,\n self.offset,\n self.offset,\n self.rad_lo,\n self.rad_hi,\n self.psf_value,\n ]\n\n table = Table()\n for name_, data_, unit_ in zip(names, data, units):\n table[name_] = [data_]\n table[name_].unit = unit_\n\n hdu = fits.BinTableHDU(table)\n hdu.header[\"LO_THRES\"] = self.energy_thresh_lo.value\n hdu.header[\"HI_THRES\"] = self.energy_thresh_hi.value\n\n return fits.HDUList([fits.PrimaryHDU(), hdu])", "def save_image(self, filename):\n raster.save_image(filename, self.image, self.metadata)", "def save_annotations_table(annotations: dict, outfile: str):\n\n vals = [a.table_data() for a in annotations.values()]\n\n df = pd.DataFrame(vals)\n\n df.to_csv(outfile)", "def export_hdf(dataset_id, df):\n\n df.to_hdf(\n f\"{PROCESSED_DIR}/{dataset_id}.h5\",\n key=dataset_id,\n complevel=COMPLEVEL,\n complib=COMPLIB,\n mode=\"w\",\n )", "def save_features_to_file(self):\n if not os.path.exists(self.features_save_path):\n os.makedirs(self.features_save_path)\n for s in self.sets:\n self.save_features_to_file_by_set(s)", "def save(self, fname):\n fmt = \" %7.1f %7.1f %8.2f %8.2f %8.2f %8.3f %8.3f %8.3f %8.3f %6.1f\"\n with open(fname, 'wb') as fid:\n fid.write('# CoordinateSystem \"Head\"\\n'.encode('utf-8'))\n fid.write('# begin end X (mm) Y (mm) Z (mm)'\n ' Q(nAm) Qx(nAm) Qy(nAm) Qz(nAm) g/%\\n'\n .encode('utf-8'))\n t = self.times[:, np.newaxis] * 1000.\n gof = self.gof[:, np.newaxis]\n amp = 1e9 * self.amplitude[:, np.newaxis]\n out = np.concatenate((t, t, self.pos / 1e-3, amp,\n self.ori * amp, gof), axis=-1)\n np.savetxt(fid, out, fmt=fmt)\n if self.name is not None:\n fid.write(('## Name \"%s dipoles\" Style \"Dipoles\"'\n % self.name).encode('utf-8'))", "def tofits(self, *args, **kwargs):\n return _image.image_tofits(self, *args, **kwargs)", "def save_image(self):\n self.table_to_image.img.save(self.file_name)\n aws.AWSHandler().upload_image(self.file_name)", "def save(self, exp_file, gat_file):\n\t\tto_save = np.stack((self.b, self.sigma)) #(2, K)\n\t\tto_save = np.concatenate((self.W,to_save) , axis = 0) #(D+2,K)\n\t\tnp.savetxt(exp_file, to_save)\n\t\tself.gating.save(gat_file)\n\t\treturn", "def save_tf_export(self, session):\n raise NotImplementedError(\"Implement save_tf_export() method\")", "def to_hdf5(self, filepath, **kwargs):\n hdf = pd.HDFStore(filepath, **kwargs)\n hdf.put(self.INDEXDATAFRAME, self.df, format='fixed', data_columns=True)\n hdf.close()", "def export(self, fname):\n\n # discard any data with null feature values\n self.discard()\n\n # set target as last column\n self.target = self.getFeatureData('Weather Type')\n\n # remove non-exportable features\n for n in ['Station ID', 'Station Name', 'Date', 'Weather Type']:\n if self._isFIdx(n):\n self.delete(n)\n\n # convert all data to float\n self.data = self.data.astype(float)\n\n # export to file\n pickle.dump(self, open(fname, 'wb'))\n\n return 0", "def save(self, filename):\n if len(self.interpolated_points) > 0:\n red, green, blue = zip(*self.interpolated_points)\n red = [np.asscalar(x) for x in red]\n green = [np.asscalar(x) for x in green]\n blue = [np.asscalar(x) for x in blue]\n output_type = \"interpolated\"\n print(\"Exporting interpolated points\")\n elif len(self.main_cluster) > 0:\n red, green, blue = self.get_color_lookup_table_points(self.main_cluster)\n output_type = \"clustered\"\n print(\"Exporting cluster points\")\n else:\n red = self.color_lookup_table_points[0]\n green = self.color_lookup_table_points[1]\n blue = self.color_lookup_table_points[2]\n output_type = \"resized\"\n print(\"Exporting resized points\")\n\n data = dict(\n red=red,\n green=green,\n blue=blue\n )\n\n filename = f'{filename}_{output_type}.pickle'\n with open(filename, 'wb') as outfile:\n pickle.dump(data, outfile, protocol=2)\n # stores data of color lookup table in file as pickle for efficient loading (yaml is too slow)\n\n print(f\"Output saved to '{filename}'.\")", "def saveFeatures(self, filename):\n print(\"Saving features info, spikeset hash\",)\n f = open(filename, 'wb')\n # compute a hash for the spikeset\n b = self.spikes.view(np.uint8)\n hashkey = hashlib.sha1(b).hexdigest()\n print(hashkey, \"to file\", filename, \".\")\n pickle.dump(hashkey, f)\n pickle.dump(self.feature_special, f)", "def expose(self):\n if self.camera is None: # test mode -- immediately return test image\n print(\"NO SPECTRAL CAMERA FOUND -- USING TEST DATA\")\n self.filename = \"example_fits_files/Mooi\"\n return\n\n exposure_time = self.time.get()\n try:\n self.exposure_time = float(exposure_time)\n except:\n message = \"Exposure time \\\"{0}\\\" cannot be converted to floating point number\".format(exposure_time)\n messagebox.showerror(\"Error\", message)\n raise ValueError(message)\n filename = \"spectra/{0}\".format(timestamp())\n self.camera.spectrum(self.exposure_time, filename)\n self.filename = filename", "def store_hdf_pytables(self, filters=None):\n class Acceleration(IsDescription):\n counter = UInt8Col()\n timestamp = UInt64Col()\n acceleration = UInt16Col()\n\n compression_name = (\"No Compression\"\n if filters is None else filters.complib)\n filepath = self.filepath.with_name(\n f\"{self.filepath.stem} PyTables {compression_name}\").with_suffix(\n \".hdf5\")\n with open_file(filepath, 'w', filters=filters) as hdf:\n data = hdf.create_table(hdf.root, \"acceleration\", Acceleration)\n row = data.row\n for value in self.values:\n row['counter'] = value['counter']\n row['timestamp'] = value['timestamp']\n row['acceleration'] = value['acceleration']\n row.append()", "def save_GRID( self , filename ):\n self._fwrite_GRID( filename )", "def save_as(self, filename):\n opencv.imwrite(filename, self.img)", "def save_values(self):\n # TODO: Add self.prefix and extension\n NetworkTables.saveEntries(self.file.get_filename(), prefix='/vision/' + self.name + '_')", "def convert_to_fits(image_file, clobber=True):\n img = load_image(image_file)\n\n fits.writeto(image_file.replace('pgm', 'fits'), img, clobber=clobber)\n\n return", "def reopen(self):\n self._FITS.close()\n del self._FITS\n self._FITS = _fitsio_wrap.FITS(self._filename, self.intmode, 0)\n self.update_hdu_list()", "def to_file(self, filename):\n self.header['n'] = self.n\n save_gyre(filename, self.header, self.data)", "def save_to_hdf(df, fname, output_subdir=None):\n path = Path(fname)\n newfname = path.with_suffix('.h5').name\n folderpath = HOME / 'output'\n if output_subdir:\n folderpath = folderpath / output_subdir\n path = folderpath / newfname\n df.to_hdf(str(path), 'df', format='t')\n return str(path)", "def save_enu(self, filename):\n x, y, z = self.get_coords_enu()\n coords = np.vstack([x, y, z]).T\n np.savetxt(filename, coords, fmt=b'%.12e')", "def fits_summary(self):\n t = pyfits.open(self.fits_file)[1].data\n # remove columns that have multiple dimensions\n for j in range(3):#??? why\n for i,col in enumerate(t.columns):\n if len(col.array.shape)>1:\n t.columns.del_col(i)\n\n tt=pyfits.BinTableHDU.from_columns(t.columns)\n df = pd.DataFrame(tt.data)\n df['flux13*'] = df['Flux_Density']*1e13\n df['unc_flux13*'] = df['Unc_Flux_Density']*1e13\n summary = html_table(df.describe().T, float_format=FloatFormat(3),\n heading='', href=False, maxlines=50)\n self.fits_summary_table = summary.replace('%', '%%')\n # creates error??\n #print ('Check: %s' % df)", "def save(self, filename, precision='%15.07e', encoding='utf-8'):\n\n # when creating empty st object, cols is not set yet\n if not hasattr(self, 'cols'):\n if self.main_data_sets[1][1].shape[1]==19:\n self.cols = stc.split()\n elif self.main_data_sets[1][1].shape[1]==30:\n self.cols = fpm.split()\n else:\n c = self.main_data_sets[1][1].shape[1]\n raise TypeError(f'St input needs 19 (iso) or 30 (aniso/fpm) cols, not {c}')\n\n colwidth = len(precision % 1)\n sep = '=' * colwidth * len(self.cols) + '\\n'\n colhead = ''.join([k.center(colwidth) for k in self.cols]) + '\\n'\n\n nsets = len(self.main_data_sets)\n # fails if dirname is empty string\n if len(os.path.dirname(filename)) > 0:\n os.makedirs(os.path.dirname(filename), exist_ok=True)\n with open(filename, 'w', encoding=encoding) as fid:\n fid.write('%i ; number of sets, Nset\\n' % nsets)\n for mset, set_data_dict in self.main_data_sets.items():\n fid.write('#%i ; set number\\n' % mset)\n for set, set_array in set_data_dict.items():\n dstr = self.to_str(mset=mset, set=set, precision=precision)\n npoints = self.main_data_sets[mset][set].shape[0]\n fid.write(sep + colhead + sep)\n fid.write('$%i %i\\n' % (set, npoints))\n fid.write(dstr + '\\n')", "def output_isochrone(self, file_to_name):\n N = len(self.color) # length of data points\n\n color = self.color\n abs_mag = self.abs_mag\n metallicity = self.metallicity\n best_fit = int(self.best_fit)*np.ones(N)\n age = self.age*np.ones(N)\n\n df_out = pd.DataFrame({'color' : color, 'abs_mag' : abs_mag, 'metallicity': metallicity, 'best_fit' : best_fit, 'age' : age})\n\n # TODO: Allow user to set their own directory\n df_out.to_csv('/Users/cam/Desktop/astro_research/orion/orion_populations/best_fit_isochrones/' + file_to_name)" ]
[ "0.7418882", "0.73383343", "0.72863376", "0.70334315", "0.6764785", "0.6726204", "0.640871", "0.6325488", "0.63196474", "0.629019", "0.62887764", "0.62156135", "0.61818725", "0.613622", "0.61161315", "0.6057704", "0.6053205", "0.6028134", "0.6027796", "0.59942424", "0.5977483", "0.59543246", "0.59526503", "0.59320354", "0.59293157", "0.5881647", "0.5870417", "0.5852343", "0.5735721", "0.5722934", "0.5714079", "0.5696028", "0.5682829", "0.5679494", "0.56565714", "0.56393147", "0.56327426", "0.5629765", "0.5598554", "0.5571364", "0.55634934", "0.5562081", "0.5557427", "0.5553295", "0.5549778", "0.5546262", "0.5540697", "0.5528361", "0.5524472", "0.5515968", "0.5509408", "0.5503033", "0.54991144", "0.5493894", "0.54890925", "0.54638976", "0.54577774", "0.5456901", "0.5440885", "0.5436037", "0.5431615", "0.54231036", "0.5412769", "0.5407654", "0.5403308", "0.539394", "0.5384199", "0.5375424", "0.5373458", "0.53689736", "0.53664064", "0.53656375", "0.53567743", "0.5351575", "0.53381133", "0.53298396", "0.53295857", "0.53261966", "0.53144276", "0.5309426", "0.5308048", "0.52968657", "0.52938604", "0.5290921", "0.5289951", "0.5283685", "0.5277743", "0.5269533", "0.5265369", "0.5265098", "0.52570444", "0.5253691", "0.52496946", "0.5249321", "0.524779", "0.524105", "0.52406746", "0.5240173", "0.52293175", "0.52202517" ]
0.7322082
2
String representation of the exposure sequence.
def __str__(self): output = ['Tile ID {}'.format(self._tileid)] for ex, files in self._exposure_files.items(): filenames = '- exposure {:08d}\n'.format(ex) for f in files: filenames = '{} + {}\n'.format(filenames, f) output.append(filenames) return '\n'.join(output)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def to_string(self):\n return self.sequence", "def __str__(self):\n string = 'input dim: {} \\noutput dim: {} \\n'.format(\n self.dim_inputs, self.dim_outputs\n )\n string += 'sequence length: {} \\n'.format(\n self.tensors[0].shape[1]\n )\n key = 'train' if self.train else 'test'\n string += '{}_samples: {} \\n{}_sequences: {} \\n'.format(\n key, self.experiment_length, key, self.tensors[0].shape[0]\n )\n return string", "def __str__(self):\n ret_val = 'Trace sequence number within line: %i\\n' % \\\n self.header.trace_sequence_number_within_line\n ret_val += '%i samples, dtype=%s, %.2f Hz' % (\n len(self.data),\n self.data.dtype, 1.0 /\n (self.header.sample_interval_in_ms_for_this_trace /\n float(1E6)))\n return ret_val", "def __str__(self):\n return self._seq", "def dump(self):\n outputs = [\"Sequence : %s\" % self.name]\n if self.curr_value:\n outputs.append(\" start : %d\" % self.curr_value)\n outputs.append(\" minimum : %d\" % self.min_value)\n outputs.append(\" maximum : %d\" % self.max_value)\n if self.increment_by > 1:\n outputs.append(\" increment : %d\" % self.increment_by)\n return \"\\n\".join(outputs)", "def __str__(self):\n\n nframes = len(self.frames)\n if nframes == 0:\n return \"\"\n elif nframes == 1:\n frame, = self.frames\n return str(frame)\n else:\n frames = sorted(self.frames)\n start = prev = frames[0] # First frame.\n step = None\n subranges = []\n for end in frames[1:]: # Frame starting from the second in the list.\n\n if step is None: # Step is still none.\n step = end - prev # Find and set step.\n\n if prev + step != end: # If the sequence is broken.\n subranges.append((start, prev, step)) # Create a subrange.\n step = None # Reset step.\n start = end # Re-start start.\n prev = end # The next previous.\n\n else:\n subranges.append((start, end, step))\n\n return \", \".join(format_subrange(start, end, step) for (start, end, step) in subranges)", "def excitation_seq(self) -> str:\n return self.frame_selector.excitation_seq", "def __str__(self):\n return \"{}\\n{}\\n{}\\n{}\".format(self.header,self.sequence,self.line3,self.quality)", "def __str__(self):\n return self.sequence", "def __str__(self):\r\n\r\n data = [self.seq_f,\r\n self.seq_r,\r\n self.tf,\r\n self.df,\r\n len(self.taxons),\r\n len(self.families),\r\n ]\r\n\r\n return \"%s\\n\" % \"\\t\".join([str(x) for x in data])", "def __repr__(self):\n return '%s([%s])' % (self.__class__.__name__, ', '.join(\n ['(%r, %r)' % (key, self[key]) for key in self._sequence]))", "def __repr__(self):\n msg = str(self.__class__.__name__)\n msg += \" (\\n\\tpath={}\\n\\tobs_keys={}\\n\\tseq_length={}\\n\\tfilter_key={}\\n\\tframe_stack={}\\n\"\n msg += \"\\tpad_seq_length={}\\n\\tpad_frame_stack={}\\n\\tgoal_mode={}\\n\"\n msg += \"\\tcache_mode={}\\n\"\n msg += \"\\tnum_demos={}\\n\\tnum_sequences={}\\n)\"\n filter_key_str = self.filter_by_attribute if self.filter_by_attribute is not None else \"none\"\n goal_mode_str = self.goal_mode if self.goal_mode is not None else \"none\"\n cache_mode_str = self.hdf5_cache_mode if self.hdf5_cache_mode is not None else \"none\"\n msg = msg.format(self.hdf5_path, self.obs_keys, self.seq_length, filter_key_str, self.n_frame_stack,\n self.pad_seq_length, self.pad_frame_stack, goal_mode_str, cache_mode_str,\n self.n_demos, self.total_num_sequences)\n return msg", "def snapshot(self):\n text = \"\"\n text += \"{}:\\n{}\\n\".format('chi', np.array2string(self.chi))\n return text", "def __str__(self):\n s = 'Gene: ' + self._dna[:6] + '...' + self._dna[-6:] + \\\n ', length=%d' % len(self._dna)\n if self._exons is not None:\n s += ', %d exon regions' % len(self._exons)\n return s", "def __str__(self):\n return str(self._num_rotations)", "def __str__(self):\n #Get an ordered list of the elements strings so it outputs always the same\n #string given a mass function.\n elements = []\n for element in self.focals:\n elements.append((element, str(element)))\n sortedList = sorted(elements, key=lambda x:x[1])\n \n result = \"\"\n first = True\n for t in sortedList:\n if first:\n result += t[1] + \":\" + \"{:.4f}\".format(self.focals[t[0]])\n first = False\n else:\n result += \", \" + t[1] + \":\" + \"{:.4f}\".format(self.focals[t[0]])\n return \"{\" + result + \"}\"", "def __str__(self):\n # TODO also show relative abundance\n s = \"{} ion species\\n\".format(len(self.ions))\n for ion in self.ions:\n s += \" {:2s} (Z = {:3d}) {:.3e} particles\\n\".format(ion.getName(), ion.getCharge(), ion.getParticleNumber())\n \n return s", "def __str__(self):\n s = \"\"\n for v in self.vectors:\n s += str(v) + \"\\n\"\n return s", "def __str__(self):\n out_str = \"\\n\".join(`\"%.5f, %.5f, %.1f, %s, %s\" % (point[0], point[1], point[2], point[3], point[4])` for point in self.__traectory_list)\n return \"\\'x, y, altitude, capture time, capture date'\\n\"+out_str", "def to_string(self):\n string = (\n f\"r{self.num_repeat}_k{self.kernel_size}_s{self.stride}{self.stride}\"\n f\"_e{self.expand_ratio}_i{self.input_filters}_o{self.output_filters}\"\n f\"_se{self.se_ratio}\"\n )\n\n if not self.id_skip:\n string += \"_noskip\"\n return string", "def as_str(self) -> str:\n return dumps(self.as_dict(), cls=NumpyEncoder)", "def __str__(self):\n if len(self.label) > 0:\n descr = [\"'%s', target='%s' [%s]\" % (self.label, self.target.name, self.target.body_type)]\n else:\n descr = [\"target='%s' [%s]\" % (self.target.name, self.target.body_type)]\n if self.baseline:\n descr[0] += ', initial baseline offset=%f' % (self.baseline.poly[-1],)\n if self.beam:\n descr[0] += ', beam height=%f' % (self.beam.height,)\n for scan_ind, scan in enumerate(self.scans):\n descr.append('%4d: %s' % (scan_ind, str(scan)))\n return '\\n'.join(descr)", "def to_str(self):\n return u\"Superellipse[{:.4g},{:.4g}]\".format(self.alpha0.l, self.alpha0.r)", "def sequence(self):\n\n\t\tseq = \"\"\n\t\tfor chain in self.chain:\n\t\t\tfor res in chain.residue:\n\t\t\t\tseq += res.aa1()\n\n\t\treturn seq", "def __str__(self):\n # newline-delimited values of all the attributes\n return \">%s\\n%s\" % (self.Label, self.Sequence)", "def __str__(self):\n st=\"\"\n for g in self:\n st+=g.fasta()\n st+=\"\\n\"\n return st", "def __str__(self):\n output = \"\"\n for i in self.values:\n st = []\n output += \"[\"\n for j in i:\n st.append(str(j))\n output += \",\".join(st)+\"]\"\n return str(self.m)+\"x\"+str(self.n)+\" [\" + output + \"]\"", "def __str__(self):\n string = ''\n for degree, coef in enumerate(self.coefs, 1):\n degree = degree - 1\n string += str(coef)+'x^' + str(degree) + ' + '\n string = string[0:-3] # remove the last ' + '\n return string", "def __str__( self ) :\n\n return( ' '.join( [ \"%g\" % c_l for c_l in self.coefficients ] ) )", "def __str__(self):\n return str((self.code, self.fitness,))", "def to_string(self):\n\n return '[[%s], [%s]], [%d, %d], [%s], %s, %s, [%s]' % \\\n (', '.join(INT2STRING_CARD[h] for h in self.hand[0]),\n ', '.join(INT2STRING_CARD[h] for h in self.hand[1]),\n self.pot[0], self.pot[1],\n ', '.join(INT2STRING_CARD[p] for p in self.pub),\n INT2STRING_PHASE[self.phase],\n INT2STRING_PLAYER[self.player],\n ', '.join(INT2STRING_STATUS[s] for s in self.status))", "def _to_str(self):\n\t\tprint(\"predictors: {}, types: {} \\n method: {}, preprocessing: {}\\\n\t\t\t \\n partition_rate: {}, metric: {}, file name: {}\".format(\n\t\t\t self.predictors, self.predictors_types, self.method_name,\n\t\t\t self.preprocessing_methods, self.data_split, self.metric,\n\t\t\t self.plotting_file_name))", "def __str__(self):\n\n string = \"values:\\n\\t\"\n string += \" x \".join(map(str, self.shape))\n\n string += \" {} ({})\\n\".format(type(self.values).__name__, self.values.dtype)\n\n if self.print_values is True:\n string += str(self.values) + \"\\n\"\n\n string += \"dims:\\n\\t\"\n\n string += \"{}\\n\".format(self.dims)\n\n string += \"coords:\\n\\t\"\n string += \"\\n\\t\".join(map(repr, self.coords))\n\n string += \"\\n\"\n\n string += \"attrs:\\n\"\n\n for ix, key in enumerate(self.attrs.keys()):\n if ix == self.max_print_attrs:\n string += \"\\t+%i attrs\" % (len(self.attrs) - self.max_print_attrs)\n break\n string += \"\\t{!r}: {!r}\\n\".format(key, self.attrs[key])\n\n return string", "def __str__(self):\n return '%i traces in the SEG Y structure.' % len(self.traces)", "def __str__(self):\n return str(self.asMatrix())", "def __str__(self):\n return (str(self.chromosome_id) + '. Chromosome: Genes: ' + str(\n self.genes) + '; Fitness: ' + str(self.fitness_value))", "def __str__(self):\n return (\">%s\\n\" % self.name) + \\\n wrap(self.sequence, self.COLUMNS)", "def __str__(self):\n\n result = \"n: \" + str(self.n) + \"\\n\"\n result += \"m: \" + str(self.m) + \"\\n\"\n result += \"ns: \" + str(self.ns) + \"\\n\"\n result += \"s0: \" + str(self.s0) + \"\\n\"\n result += \"goals: \" + str([self.goals[i] for i in range(self.ng)]) + \"\\n\"\n result += \"horizon: \" + str(self.horizon) + \"\\n\"\n result += \"gamma: \" + str(self.gamma) + \"\\n\\n\"\n\n result += \"S(s, a, s'):\\n%s\" % (str(np.array([self.S[i] \\\n for i in range(self.n * self.m * self.ns)]).reshape((self.n, self.m, self.ns)))) + \"\\n\\n\"\n\n result += \"T(s, a, s'):\\n%s\" % (str(np.array([self.T[i] \\\n for i in range(self.n * self.m * self.ns)]).reshape((self.n, self.m, self.ns)))) + \"\\n\\n\"\n\n result += \"R(s, a):\\n%s\" % (str(np.array([self.R[i] \\\n for i in range(self.n * self.m)]).reshape((self.n, self.m)))) + \"\\n\\n\"\n\n return result", "def __str__(self):\n \n s = \"(R: \" + str(self.r) + \", G: \" + str(self.g) + \", B: \" + str(self.b) + \")\"\n return s", "def str(self):\n out = \"{0}:\".format(self.gtype) if self.gtype else \"\"\n out += \"{0}\".format(repr(self.coords))\n out += \"[{0}]\".format(str(self.goalPtr)) if self.goalPtr else \"\"\n return out", "def __str__(self):\n return str(self.array)", "def __repr__(self):\n res = \"[%d] \" % self._idy\n if self._pulse_state:\n res += str(self._pulse_state)\n else:\n res += \"-\"\n return res", "def __str__(self):\n return ''.join(\n f'Chromosome - {index} {chromosome} / Fitness = {chromosome.fitness}\\n'\n for index, chromosome\n in enumerate(self)\n )", "def __str__(self):\n result=\"curv %f d0 %f z0 %f ctheta %f phi %f barcode %d\"%(self.curv,self.d0,self.z0,self.ctheta,self.phi,self.barcode)\n return result", "async def str(self, sequences=None):\n if sequences is None: \n sequences = self.sequences\n \n lines = []\n def str_k(k):\n if k is NoDim: return \"NoDim\"\n elif k is None: return \"None\"\n else: return str(k)\n\n max_len_key = max([len(str_k(k)) for k in sequences.keys()])\n for k in sorted(sequences.keys()):\n dimension_lines = [await s.str() for s in self.sequences[k]]\n for i,l in enumerate(dimension_lines):\n if i == 0:\n lines.append(str_k(k) + (max_len_key - len(str_k(k)) + 1) * \" \" + l)\n else:\n lines.append(\" \" * max_len_key + \" \" + l)\n return \"\\n\".join(lines)", "def __str__(self):\n return 'encoded value {} for {}\\nvalue = {}'.format(self.i_category, self.harmonized_trait, self.i_value)", "def __str__(self):\n rep = \"\"\n for row in self._marker:\n for pegs in row:\n rep += pegs + \" \"\n rep = rep[:-1]\n rep += \"\\n\"\n rep = rep[:-1]\n return rep", "def __str__(self):\n # First obtain a string describing the underlying ramp\n # model.\n strg = super(MiriExposureModel, self).__str__()\n \n # Also display the averaged data.\n if self.grpavg > 1 or self.intavg > 1:\n title = self.get_data_title('data')\n title += \" (%d integrations and %d groups averaged)\" % \\\n (self.intavg, self.grpavg)\n len2 = len(title)\n title += \"\\n\" + (len2 * '~')\n strg += title + \"\\n\" + str(self.data_averaged) + \"\\n\"\n \n \n return strg", "def __str__(self):\n return self._str_hsp_header() + \"\\n\" + self._str_aln()", "def __repr__(self):\n info = ('class {}\\n'.format(self.__class__.__name__) +\n 'TFReccord {}\\n'.format(self.tffile) +\n 'Embeddings [{}, {}]\\n'.format(self.embeddings.shape[0], self.embeddings.shape[1]))\n return info", "def __repr__(self):\n ct = shortest_string_in_list(self.cause_texts)\n et = shortest_string_in_list(self.effect_texts)\n ev = ','.join(self.evidence_texts)\n return '%s -> %s [%s, %s, %s]' % (ct, et, ev,\n repr(self.cause_polarity),\n repr(self.effect_polarity))", "def __repr__(self):\n\n repme = \"x0= {!r}, kf= {!r}, n_upd= {!r}\"\\\n .format(self.x0, self.kf, self.n_upd)\n\n return \"UmbrellaSampling({!s})\".format(repme)", "def __repr__(self) -> str:\n return (f'{self.__class__.__name__}({self.in_channels}, {self.out_channels}, kernel_size=({self.kernel_size[0]}, {self.kernel_size[1]}), '\n f'stride=({self.stride[0]}, {self.stride[1]}), padding=({self.padding[0]}, {self.padding[1]}), dilation=({self.dilation[0], self.dilation[1]}), '\n f'groups={self.groups}, bias={self.bias}, reduce_ratio={self.reduce_ratio}, sigma_mapping={str(self.sigma_mapping)}'\n )", "def __str__(self):\n return '{}\\t{}\\t{}'.format(self.sequence_name, self.first, self.last)", "def toString(self):\n\t\ts = \"A %s titled '%s':\\n\\n\" % (self.getSpecString(), self.getName())\n\t\ts += \"It's summary reads: %s\\n\\n\" % (self.getDescription())\n\t\ts += \"~~\\n%s\\n~~\" % (self.getAllItemsStr())\n\t\treturn s", "def __str__(self):\n\n strme = \"fed method {} {} {} {}\"\\\n .format(ExpandedEnsemble.key, self.eta0, self.c_upd, self.n_upd)\n if self.smooth:\n strme = \"{!s} {!s}\".format(strme, self.smooth)\n\n return strme", "def __str__(self):\n s = \"Projection info:\\n\"\n s += \" #instances: \" + str(self.data_ninstances) + \"\\n\"\n s += \" data dimension: \" + str(self.data_dim) + \"\\n\"\n s += \" projection dimension: \" + str(self.projection_dim) + \"\\n\"\n s += \" data: \" + str(self.data[0]) + \"\\n\"\n s += \" \" + str(self.data[1]) + \"...\\n\"\n s += \" projection: \" + str(self.projection[0]) + \"\\n\"\n s += \" \" + str(self.projection[1]) + \"...\"\n return s", "def output_line(self):\n \n if self.genotype_probabilities is None:\n return \"{s.name}=None\".format(s=self)\n\n return \"{0}=AA[{1}],Aa[{2}],aa[{3}]\".format(self.name, *self.genotype_probabilities)", "def __str__(self):\n if len(self.movies) == 1:\n return \"[ %s\" % ( self.movies.head )\n else:\n i, l, s = 0, len(self.movies), \"\"\n ch = chr(201)\n for movie in self.movies:\n s += \"%s %s\" % ( ch, movie )\n i += 1\n if i == l - 1:\n ch = \"\\n\" + chr(200)\n else:\n ch = \"\\n\" + chr(204)\n return s", "def __str__(self):\n \n result = '<' + self._name + ', ' + str(self._id) + ', ' + self._age + ', ' \n for elem in self._direct:\n result += elem.getID() + ', '\n result += self._fitness + ', ' + self._immune + '>'\n\n return result", "def __repr__(self: object) -> str:\n measstring: str = \"Tatort - {:04d} - {} - {} - {} - {}\".format(self.episode_id, self.episode_name, self.episode_inspectors, self.episode_sequence, self.episode_broadcast)\n return measstring", "def __repr__(self):\n s = \"s = $%.2f, x = $%.2f, t = %.2f (years), sigma = %.3f, rf = %.3f\" %(self.s, self.x, self.t, self.sigma, self.rf)\n return s", "def as_string(self):\n return self.__repr__()", "def __str__(self) -> str:\n return self.__repr__() + \"\\n\" + \"\\n\".join(self.regimes())", "def __str__(self):\n return (\"UUID: \" + str(self.uuid) + \"\\n\"\n \"Data: \" + str(self.data) + \"\\n\" +\n \"Tex: \" + str(self.texOutput) + \"\\n\")", "def __str__(self):\n return_string = self.name + \"\\n\" + str(self.traits)\n\n return return_string", "def bytes(self):\n return e(self._seq)", "def to_string(self):\r\n return '\\n'.join([' '.join([trans.start_state, trans.end_state, trans.symbol])\r\n for trans in self.transitions]) + '\\n' + self.start_state + ' ' + ' '.join(self.final_states)", "def __str__(self):\r\n return \"Input event %s[%d], %d -- %f: 0x%x(0x%x) = 0x%x\" % (\r\n self.stream.deviceType, self.stream.deviceIndex, self.stream.grabbed,\r\n self.time, self.eventType, self.eventCode, self.eventValue)", "def __repr__(self):\n return self._metadata.__str__()", "def __str__(self):\n\n strme = \"fed method {} {} {} {}\"\\\n .format(TransitionMatrix.key, self.nout, self.n_upd, self.mode)\n\n return strme", "def __str__(self):\r\n information = self.get_dna_fragment()\r\n\r\n return \"protein indices = \" + str(self._indices) + \": \\n\" + \\\r\n \"t~ strand = \" + str(information[0][0]) + \"\\n\" + \\\r\n \" \" + str(information[0][1]) + \"\\n\" + \\\r\n \"c~ strand = \" + str(information[1][0]) + \"\\n\" + \\\r\n \" \" + str(information[1][1]) + \"\\n\"", "def __str__(self):\n astr = '[\\n name: [ ' + self.name + ' ]\\n'\n astr += ' variables: [ '\n for var, init in self.variables:\n astr += '(' + var + ' := ' + init + '), '\n astr = astr[:-2] + ' ]\\n assumptions: [ '\n for assumption in self.assumptions:\n astr += assumption + ', '\n astr = astr[:-2] + ' ]\\n guarantees: [ '\n for guarantee in self.guarantees:\n astr += guarantee + ', '\n return astr[:-2] + ' ]\\n]'", "def toGenomeRepresentation(self):\n s = \"\"\n s += str(self.axiom)\n s += \"||\"+str(self.niterations) # The iterations must be shown as well\n for prod in self.productions:\n s += \"||\"\n s += prod.toGenomeRepresentation()\n return s", "def to_str(self):\n\n return \"imsi=%s, tmsi=%u, rnti=%u\" % (self.imsi, self.tmsi, self.rnti)", "def __str__(self) -> str:\n out = \"STACK: \" + str(self.da.length()) + \" elements. [\"\n out += ', '.join([str(self.da.get_at_index(_))\n for _ in range(self.da.length())])\n return out + ']'", "def __str__(self):\n\n strme = \"fed method {} {} {} {}\"\\\n .format(UmbrellaSampling.key, self.x0, self.kf, self.n_upd)\n\n return strme", "def __repr__(self):\n\n repme = \"eta0= {!r}, c_upd= {!r}, n_upd= {!r}, smooth= {!r}\"\\\n .format(self.eta0, self.c_upd, self.n_upd, self.smooth)\n\n return \"ExpandedEnsemble({!s})\".format(repme)", "def dumps(self):\n return ''.join(self.out)", "def raw(self) -> str:\n return \"\".join(seg.raw for seg in self.segments)", "def __str__(self):\n return ' '.join([self.source, self.name, str(self.outputs)])", "def __repr__(self):\n\n output = list()\n output.append('{resonance_id:6s}'.format(**self.par))\n output.append('{h_larmor_frq:6.1f}'.format(**self.par))\n output.append('{temperature:4.1f}'.format(**self.par))\n output.append('{:10.5f}'.format(self.val))\n output.append('{:10.5f}'.format(self.err))\n\n if self.cal:\n output.append('{:10.5f}'.format(self.cal))\n\n return ' '.join(output)", "def Beat_disp(self):\n return ' '.join(str(x+self.offset) for x in self.beats)", "def description(self):\n active = np.nonzero([bool(p) for p in self])[0]\n last_active = active[-1] if len(active) else -1\n return ' '.join([p.value_str for p in self][:last_active + 1])", "def __str__(self):\n return super().__str__() + f'Recording Artist: {self.artist}\\nNumber of Discs: {self.num_discs}\\n'", "def __repr__(self):\n rep = self.__class__.__name__ + \"(\"\n rep += repr(self.filename)\n rep += \", \"\n rep = rep[:-2] + \")\"\n return rep", "def __str__(self):\n items = ['({!r})'.format(item) for item in self.items()]\n return '[{}]'.format(' -> '.join(items))", "def __str__(self):\n struct_repr = \", \".join([\n \"roll_deg: \" + str(self.roll_deg),\n \"pitch_deg: \" + str(self.pitch_deg),\n \"yaw_deg: \" + str(self.yaw_deg)\n ])\n\n return f\"EulerAngle: [{struct_repr}]\"", "def __repr__(self) -> str:\n\n return f\"{self.filename}:{self.line}:{self.flag}\"", "def __repr__(self):\n\n return self._metadata.__str__()", "def toString(self):\r\n str = \"\"\r\n for i in range(len(self.Data)):\r\n str += (self.__hexLookup[int(self.Data[i] / 16)]).decode()\r\n str += (self.__hexLookup[int(self.Data[i] % 16)]).decode()\r\n \r\n return str", "def __str__(self):\n result = ('---> Population - Generation: ' + str(self.generation)\n + '<--- \\n')\n result += 'Fittest Chromosome: \\n' + str(self.fittest_chromosome)\n\n for chromosome in self.chromosomes:\n result += str(chromosome) + '\\n'\n\n return result", "def __str__(self):\n return functools.reduce(\n lambda acc, v: acc + str(v[0]) + \" : \" + str(v[1][1]) + \" - lifetime \" + str(v[1][0]) + os.linesep,\n self.store.items(), \"\")", "def __str__(self):\n s = f\"{self.__class__.__name__}: \"\n s += f\"x = {base.array2str(self._x)}\"\n return s", "def __str__(self):\n # TODO check the terminal width and adjust the presentation\n # only use for 1D, fall back to repr for ND\n if self._hist.rank() == 1:\n s = str(self._hist)\n # get rid of first line and last character\n s = s[s.index(\"\\n\") + 1 : -1]\n else:\n s = repr(self)\n return s", "def __str__(self):\n pos = self._pos + 1\n if len(self._refr) > len(self._alt):\n dellength = len(self._refr) - len(self._alt)\n return '{:s}:{:d}:{:d}D'.format(self._seqid, pos, dellength)\n else:\n insertion = self._alt[1:]\n return '{:s}:{:d}:I->{:s}'.format(self._seqid, pos, insertion)", "def __str__(self):\n\t\n\t\tresult = \"\"\n\t\tresult += \"Torsional Spring Specs: \\n\"\n\t\tresult += \"Shape Eq. Slope: {0}\\n\".format(str(self.shape_slope))\n\t\tresult += \"Z Thickness: {0}\\n\".format(str(self.z_thick))\n\t\tresult += \"In-Plane Thickness: {0}\\n\".format(str(self.thick))\n\t\tresult += \"Spiral Length: {0}\\n\".format(str(self.length))\n\n\t\treturn result", "def __str__(self):\n shape, dtype = self._initial_shape, self._initial_dtype\n descr = [self._name_shape_dtype(self.name, shape, dtype)]\n for transform in self.transforms:\n shape, dtype = transform.new_shape(shape), transform.dtype if transform.dtype is not None else dtype\n descr += ['-> ' + self._name_shape_dtype(transform.name, shape, dtype)]\n return '\\n'.join(descr)", "def __str__(self):\n out = \"phase polynomial = \\n\"\n out += str(self.poly)\n out += \"\\naffine function = \\n\"\n out += \" (\"\n for row in range(self.num_qubits):\n wrote = False\n for col in range(self.num_qubits):\n if self.linear[row][col] != 0:\n if wrote:\n out += \" + x_\" + str(col)\n else:\n out += \"x_\" + str(col)\n wrote = True\n if self.shift[row] != 0:\n out += \" + 1\"\n if row != self.num_qubits - 1:\n out += \",\"\n out += \")\\n\"\n return out", "def __str__(self):\n to_print = '{} : {}\\n'.format('Name'.ljust(34),self.name)\n to_print = to_print + '{} : {}\\n'.format('Name'.ljust(34),self.pathloss.name)\n to_print = to_print + '{} : {}\\n'.format('Number of samples'.ljust(34),self.nsamples)\n to_print = to_print + '{} : {}\\n'.format('Sensor model'.ljust(34),self.sensor_model.name)\n to_print = to_print + '{} : {}\\n'.format('Motion model'.ljust(34),self.motion_model.name)\n return to_print" ]
[ "0.72283626", "0.7040741", "0.67555356", "0.6664315", "0.6626783", "0.6547962", "0.65409464", "0.65346056", "0.6483022", "0.64496106", "0.6439818", "0.633267", "0.6277435", "0.62634873", "0.62428826", "0.6218749", "0.620363", "0.61781204", "0.6177901", "0.61686456", "0.615419", "0.61360776", "0.6126522", "0.6114013", "0.6102579", "0.60984993", "0.60914254", "0.60867995", "0.6084197", "0.6062007", "0.6061599", "0.60387903", "0.60309666", "0.6029116", "0.60147494", "0.60131234", "0.60116524", "0.60044694", "0.5993939", "0.5993784", "0.5988301", "0.59872633", "0.5985298", "0.598507", "0.5981959", "0.59697706", "0.59681016", "0.59635794", "0.59620345", "0.59604514", "0.5957581", "0.5949662", "0.59448093", "0.59398204", "0.5934281", "0.5927333", "0.59252006", "0.5922859", "0.5918701", "0.59145457", "0.5908494", "0.59057", "0.58945054", "0.5882936", "0.5880203", "0.58774996", "0.5874913", "0.5873607", "0.58680713", "0.5865703", "0.58649683", "0.5861598", "0.5857956", "0.5857821", "0.58552796", "0.58534634", "0.5849248", "0.5849027", "0.5847905", "0.5841928", "0.5840831", "0.5839621", "0.58395374", "0.58344126", "0.58343333", "0.58306307", "0.58293205", "0.5828642", "0.5824304", "0.5818725", "0.581868", "0.5816766", "0.5806661", "0.5805049", "0.58024216", "0.57988834", "0.5797038", "0.5793527", "0.57901615", "0.57900137" ]
0.70447665
1
Salt and hashes the password.
def set_password(self, password): self.password = md5crypt(password, gen_salt())
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def hash_password(self, password):\n self.password = pwd_context.encrypt(password)", "def hash_password(self, password):\n salt = hashlib.sha256(os.urandom(60)).hexdigest().encode('ascii')\n pwdhash = hashlib.pbkdf2_hmac('sha256', password.encode('utf-8'), \n salt, 100000)\n pwdhash = binascii.hexlify(pwdhash)\n return (salt + pwdhash).decode('ascii')", "def hash_password(self):\n self.__password = self.str_to_hash(self.__password)", "def hash_password(password):\n salt = hashlib.md5(password.encode())\n return salt.hexdigest()", "def hash_password(self, password):\n self.password_hash = generate_password_hash(password)", "def hash_password(password, salt):\n password = str(password).encode('utf-8')\n salt = str(salt).encode('utf-8')\n return hashlib.sha512(password + salt).hexdigest()", "def _hash_password(self, password):\n passwordhash = bcrypt.hashpw(password.encode('utf8'), bcrypt.gensalt())\n return passwordhash", "def hash_password(password):\n secret_key = current_app.config.get('SECRET_KEY')\n return sha256_crypt.encrypt(password+secret_key)", "def hash_pass(password, salt):\n return hashlib.pbkdf2_hmac('sha512', password.encode(), salt, 100000)", "def encrypt_password(password, salt):\n\n if isinstance(password, str):\n password_bytes = password.encode(\"UTF-8\")\n else:\n password_bytes = password\n\n hashed_password = sha256()\n hashed_password.update(password_bytes)\n hashed_password.update(salt)\n hashed_password = hashed_password.hexdigest()\n\n if not isinstance(hashed_password, str):\n hashed_password = hashed_password.decode(\"UTF-8\")\n\n return hashed_password", "def hash_password(self, password):\n cmd = [\n \"snap\",\n \"run\",\n \"{}.hash-password\".format(self.synapse_snap),\n \"-c\",\n self.synapse_config,\n \"-p\",\n password,\n ]\n result = check_output(cmd)\n str_result = result.decode(\"utf-8\")\n return str_result.rstrip()", "def hash_password(password):\r\n salt = hashlib.sha256(os.urandom(60)).hexdigest().encode('ascii')\r\n pwdhash = hashlib.pbkdf2_hmac('sha512', password.encode('utf-8'),salt,100000)\r\n pwdhash = binascii.hexlify(pwdhash)\r\n return (salt+pwdhash).decode('ascii')", "def hash_with_salt(self, s):\n\n data = f'{s} {self.salt}'.encode('ascii') # encode string to raw bytes object\n hash_obj = hashlib.md5(data) # hash it \n if self.trunc > 0:\n hash_txt = hash_obj.hexdigest()[0:self.trunc] # get truncated hash symbols\n else:\n hash_txt = hash_obj.hexdigest()\n return f'{s} {hash_txt}'", "def hash_passwd(password, hash_method=\"sha256\"):\n\n return generate_password_hash(password, hash_method)", "def set_password_hash(self, password):\n salt = bcrypt.gensalt()\n self.password_hash = bcrypt.hashpw(password.encode(), salt)", "def hash_password(password):\n #return passlib.hash.pbkdf2_sha512.encrypt(password)\n return sha256_crypt.hash(password)", "def password(self, value):\n self.password_hashed = func.crypt(value, func.gen_salt('bf'))", "def hash_password(password):\n salt = hashlib.sha256(os.urandom(60)).hexdigest().encode('ascii')\n pwdhash = hashlib.pbkdf2_hmac('sha512', password.encode('utf-8'), \n salt, 100000)\n pwdhash = binascii.hexlify(pwdhash)\n return (salt + pwdhash).decode('ascii')", "def hash_password(password):\n salt = hashlib.sha256(os.urandom(60)).hexdigest().encode('ascii')\n pwdhash = hashlib.pbkdf2_hmac('sha512', password.encode('utf-8'), \n salt, 100000)\n pwdhash = binascii.hexlify(pwdhash)\n return (salt + pwdhash).decode('ascii')", "def hash_password(password):\n salt = hashlib.sha256(os.urandom(60)).hexdigest().encode('ascii')\n pwdhash = hashlib.pbkdf2_hmac('sha512', password.encode('utf-8'), \n salt, 100000)\n pwdhash = binascii.hexlify(pwdhash)\n return (salt + pwdhash).decode('ascii')", "def hash_password(password):\n salt = hashlib.sha256(os.urandom(60)).hexdigest().encode('ascii')\n pwdhash = hashlib.pbkdf2_hmac('sha512', password.encode('utf-8'),\n salt, 100000)\n pwdhash = binascii.hexlify(pwdhash)\n return (salt + pwdhash).decode('ascii')", "def hash_password(password):\n\n return hashlib.sha224(password).hexdigest()[:20]", "def hash_password(password):\n salt = hashlib.sha256(os.urandom(60)).hexdigest().encode('ascii')\n pwdhash = hashlib.pbkdf2_hmac(\n 'sha512', password.encode('utf-8'), salt, 100000\n )\n pwdhash = binascii.hexlify(pwdhash)\n return (salt + pwdhash).decode('ascii')", "def salty_password(name, password, salt=None):\n if not salt:\n salt = ''.join(random.choice(letters)for z in xrange(5))\n salting = hashlib.sha256(name+password+salt).hexdigest()\n return '%s|%s' % (salt, salting)", "def hash_password(password):\n return hashlib.md5(password).hexdigest()", "def hash_password(password):\n salt = hashlib.sha256(os.urandom(60)).hexdigest().encode('utf-8')\n pwdhash = hashlib.pbkdf2_hmac(\n 'sha512', password.encode('utf-8'), salt, 100000\n )\n pwdhash = binascii.hexlify(pwdhash)\n return (salt + pwdhash).decode('utf-8')", "def hashPassword(passwd):\r\n \r\n return hashlib.sha224(passwd).hexdigest()", "def password(self, password):\n self.password_hash = generate_password_hash(password)", "def make_hashed_password(cleartext,salt=None):\n \n if not salt:\n salt = make_salt(5)\n return \"%s|%s\" % (salt,hashlib.sha256(salt + cleartext).hexdigest())", "def hash_password(password: str) -> str:\n return bcrypt.hashpw(password.encode(), bcrypt.gensalt()).decode('utf8')", "def hash_password(self, original_password):\n self.password = generate_password_hash(original_password)", "def simple_hash(username:str,password: str):\n salted_hash = username+password\n return str(sha256(salted_hash.encode('utf-8')).hexdigest())", "def hashpw(password, salt):\n\n\n (_, hash_ver, log_rounds, b64salt) = salt.split('$')\n (major, minor) = tuple(hash_ver)\n\n if (major, minor) > BCRYPT_VERSION:\n raise ValueError('Newer hash version than library version. OMG.')\n\n # Computing power doesn't increase linearly, 2^x should be fine.\n n = int(log_rounds);\n if n > 31 or n < 0:\n raise ValueError('Number of rounds out of bounds.')\n rounds = 1 << n # Because 2 ** n is for wimps.\n if rounds < BCRYPT_MINROUNDS:\n raise ValueError('Minimum number of rounds is: %d' % BCRYPT_MINROUNDS)\n\n # Enforce (not base64-ed) minimum salt length.\n if (len(b64salt) * 3 / 4 != BCRYPT_SALTLEN):\n raise ValueError('Salt has invalid length.')\n\n # We don't want the base64 salt but the raw data.\n raw_salt = _b64_decode(b64salt)\n # Revision a of bcrypt adds a trailing \\0 byte to the key.\n key_len = len(password) + (minor >= 'a' and 1 or 0);\n\n ## Set up EksBlowfish (this is the expensive part).\n bf = EksBlowfish()\n\n bf.expandkey(raw_salt, password, key_len)\n for k in xrange(rounds):\n # NB: The original bcrypt paper runs this step with the salt first,\n # then the password, not vice versa. The C implementation flips those,\n # which is why we reproduce the same bug here.\n bf.expandkey(0, password, key_len)\n bf.expandkey(0, raw_salt, BCRYPT_SALTLEN)\n\n ## Encrypt magic value, 64 times.\n # First, cut into 32bit integers. Big endian, again, sigh.\n bit_format = '>' + 'I' * BCRYPT_BLOCKS\n ctext = list(struct.unpack(bit_format, BCRYPT_MAGICTEXT))\n for i in xrange(64):\n # Encrypt blocks pairwise.\n for d in xrange(0, BCRYPT_BLOCKS, 2):\n ctext[d], ctext[d+1] = bf.cipher(ctext[d], ctext[d+1], bf.ENCRYPT)\n\n ## Concatenate cost, salt, result, and return.\n # The C implementation cuts off the last byte of the ciphertext, so we do\n # the same.\n result = _b64_encode(struct.pack(bit_format, *ctext)[:-1])\n return salt + result", "def hash(password):\n return sha256_crypt.encrypt(password)", "def hashedPassword(password, salt):\n\tif not hasattr(password, 'decode'):\n\t\tpassword = password.encode('utf-8')\n\tkey = makeKey(password, salt)\n\treturn base64.b64encode(\n\t hashlib.pbkdf2_hmac('sha256', key, password, 1,\n\t dklen=32)).decode('utf-8')", "def hash_password(self, password):\n\n # Use passlib's CryptContext to hash a password\n password_hash = self.password_crypt_context.encrypt(password)\n\n return password_hash", "def hash(self, password):\n try:\n salt = self.__get_salt()\n except NotImplementedError:\n raise CryptoError('Could not encrypt password')\n\n hashed = scrypt.hash(password, salt + self.pepper, buflen=CryptoConsts.HASH_BYTES)\n\n return hashed, salt", "def salted_password(self) -> bytes:\n #NB. FOR NOW, USE THIS.\n return self.password.encode()", "def hash_password(username, password):\n m = hashlib.sha512()\n m.update(username.encode('utf-8'))\n m.update(password.encode('utf-8'))\n return m.hexdigest()", "def make_pw_hash(password, salt=None):\n if not salt:\n salt = make_salt()\n h = hashlib.sha256(password+salt).hexdigest()\n return '%s,%s' % (salt, h)", "def set_password(self, password):\n self.password_hash = generate_password_hash(f\"{password}{self.user_salt}\")", "def hashPassword(self, password):\n key = hashlib.pbkdf2_hmac(\n 'sha256',\n str.encode(password),\n self.salt,\n 100000\n )\n return key", "def hash_password(password, user_id):\n return pbkdf2_sha512.hash(password+user_id)", "def get_salt_hashedpassword(self, password):\n if isinstance(password, unicode):\n password_utf8 = password.encode('utf8')\n else:\n password_utf8 = password\n\n # generate salt\n salt = hashlib.sha1()\n # NOTICE: notice, os.urandom uses /dev/urandom under Linux\n # this function call will get blocked if there is no available\n # random bytes in /dev/urandom. An attacker could perform a\n # DOS attack based on this factor\n salt.update(os.urandom(16))\n \n # generate hashed password\n hashedpassword = hashlib.sha1()\n hashedpassword.update(password_utf8 + salt.hexdigest())\n \n return salt.hexdigest(), hashedpassword.hexdigest()", "def hash_password(password):\n salt = binascii.b2a_base64(hashlib.sha256(os.urandom(60)).digest()).strip()\n pwdhash = (\n binascii.b2a_base64(\n hashlib.pbkdf2_hmac(\"sha256\", password.encode(\"utf-8\"), salt, 10000)\n )\n .strip()\n .decode()\n )\n return {\"salt\": salt.decode(), \"pwdhash\": pwdhash}", "def _hash_password(password: str) -> str:\n # return pbkdf2_sha512.encrypt(password, rounds=ROUNDS, salt=SALT)\n return pbkdf2_sha512.using(rounds=ROUNDS, salt=SALT).hash(password)", "def get_password_hash(password):\n\n return pwd_context.hash(password)", "def hash_password(password):\n return pbkdf2_sha512.encrypt(password)", "def hash_password(plain_password, salt=bcrypt.gensalt()):\n # Hash plain password with SHA-512 algorithm\n simple_hashed_pwd = hashlib.sha512(plain_password.encode()).hexdigest().encode()\n\n # Re-hash usign blowfish algorithm to increase robustness against brute-force attacks\n robust_hashed_pwd = bcrypt.hashpw(simple_hashed_pwd, salt)\n\n return robust_hashed_pwd", "def hash_password(password: str) -> str:\n return pbkdf2_sha512.hash(password)", "def _pepper_hash(pepper, password, salt):\n return '{:0>8}{:s}{:s}'.format(pepper, password, salt)", "def encrypt_password(password: str) -> str:\n return pwd_context.hash(password)", "def set_password(self, password):\n self.password = self.hash_password(password)", "def get_password_hash(password: str) -> str:\n return pwd_context.hash(password)", "def passsword(self, password):\n self.passwor_harsh = generate_password_hash(password)", "def hashed_passwd(passwd):\n salt = uuid.uuid4().hex\n return hashlib.sha512(passwd.encode('utf-8')\n + salt.encode('utf-8')).hexdigest()", "def password_encryption(self, password):\n return bcrypt.hashpw(password.encode('utf-8'), bcrypt.gensalt())", "def passwd(self, plaintext):\n self._password = bcrypt.generate_password_hash(plaintext.encode('utf8')).decode('utf8')", "def generate_hash(password, salt):\n # encode the password/salt in utf-8\n bytes_string = password.encode(encoding='utf-8')\n salt = salt.encode(encoding='utf-8')\n\n # creates hash objects\n hash_md5 = hashlib.md5()\n hash_sha256 = hashlib.sha256()\n\n # hashes salt and password in the 2 formats\n hash_md5.update(salt + bytes_string)\n hash_sha256.update(salt + bytes_string)\n\n # returns the hex-digest eg the format you most commonly see\n print(hash_md5.hexdigest())\n print(hash_sha256.hexdigest())\n\n return hash_sha256, hash_md5", "def hash_string(password):\n return hash(password)", "def hashPassword(password, password_string):\n d = hashlib.sha256()\n d.update(password_string + password)\n return d.hexdigest()", "def test_user_hash_with_salt(self):\n self.assertEqual(get_user_hash(\"johndoe\", salt=\"jane\").hex()[:6], \"fb0bf4\")", "def get_hashed_value(password):\n salt = 'saifulBoss'\n password = salt + password\n return md5(password.encode('utf-8')).hexdigest()", "def get_hashed_value(password):\n salt = 'saifulBoss'\n password = salt + password\n return md5(password.encode('utf-8')).hexdigest()", "def password(self, password):\n self.password_hash = generate_password_hash(password)", "def password(self, password):\n self.password_hash = generate_password_hash(password)", "def password(self, password):\n self.password_hash = generate_password_hash(password)", "def password(self, password):\n self.password_hash = generate_password_hash(password)", "def encrypt_password(cls, password):\n return generate_password_hash(password)", "def update_password(self, pwd):\n self.password = bcrypt.generate_password_hash(pwd).decode('utf8')", "def update_password(self, password):\n self.password = scryptsalsa208sha256_str(password.encode('utf-8')).decode('utf-8')\n return True", "def generate_password(plain_password, salt):\n return crypt(plain_password, \"$6$%s\" % salt)", "def update_password(self, user, password):\n user.password = hashers.make_password(password)", "def tasting_salt(name, password, hashedpassword):\n taste = hashedpassword.split('|')[0]\n return salty_password(name, password, taste) == hashedpassword", "def hash_128_bit_pass(passwd):\n h = hashlib.sha256()\n h.update(passwd)\n return h.hexdigest()[:16]", "def password(self, password):\n\n self.password_hash = generate_password_hash(password)", "def set_password(self, password):\n self.password = generate_password_hash(password, method='pbkdf2:sha256')", "def new_password(self, login, password):\n login = self._sha512('{:s}{:s}'.format(login, self.salt))\n pw = self._pepper_hash(self._get_peppers(login).next(), password, self.salt)\n hashed = bcrypt.hashpw(pw, bcrypt.gensalt(7))\n return login, hashed", "def set_password(self, user, password):\n hashed_password = self.hash_password(password)\n server_name = self.get_server_name()\n hookenv.log(\"Storing hash: {}\".format(hashed_password), hookenv.DEBUG)\n result = self.pgsql_query(\n \"UPDATE users SET password_hash = '{}' WHERE name = '@{}:{}';\".format(\n hashed_password, user, server_name\n )\n )\n return result", "def encryptPsw(password):\n # Transform the password into a byte object\n byte = str.encode(password)\n\n # SHA256 the byte object --> HASH object\n middle = hashlib.sha256(byte)\n\n # Convert the HASH object into string\n hash = middle.hexdigest()\n\n return hash", "def hash_pwd_str(provided_password: str) -> str:\n if len(provided_password) > 100:\n raise PasswordError('length')\n\n random_bytes = os.urandom(60)\n salt = hashlib.sha256(random_bytes).hexdigest().encode('ascii')\n\n provided_password = provided_password.encode('utf-8')\n # 100000 iterations of sha256 recommended at\n # https://docs.python.org/3/library/hashlib.html#hashlib.pbkdf2_hmac\n pwdhash = hashlib.pbkdf2_hmac('sha256', provided_password, salt, 100000)\n pwdhash = binascii.hexlify(pwdhash)\n return (salt + pwdhash).decode('ascii')", "def set_password(self, password):\n self.password_hash = generate_password_hash(str(password))", "def encode(self, password, salt):\n assert password is not None\n assert salt and '$' not in salt\n old_site_secret = settings.OLD_SITE_SECRET_KEY\n salt = salt.encode('utf-8')\n secret_md5 = md5(salt).hexdigest()\n secret_sha1 = sha1(salt).hexdigest()\n things = (secret_md5 + password + secret_sha1 + old_site_secret).encode('utf-8')\n password_hash = sha1(things).hexdigest()\n return '{algorithm}${salt}${hash}'.format(\n algorithm=self.algorithm,\n salt=salt.decode('utf-8'),\n hash=password_hash,\n )", "def hash(password):\n result = hashlib.sha1(password.encode())\n # return a hexadecimal digits\n return result.hexdigest()", "def set_password(self, password):\n self.password_hash = generate_password_hash(password)", "def set_password(self, password):\n self.password_hash = generate_password_hash(password)", "def get_password_hash(self, username):\n raise NotImplementedError()", "def set_password(self, value):\n # Salt need to be generated before set password\n m = hashlib.sha256()\n m.update('-'.join([\n str(datetime.now()),\n config.get('security.password_salt')\n ]))\n self.salt = m.hexdigest()\n self.password_pending = False\n self.password = self.__encrypt(value)", "def create_password_hash(self, password):\n return pbkdf2_sha256.encrypt(password, rounds=1000, salt_size=16)", "def setPassword(self, password, hashed=False):\n if hashed or self.hashed:\n self.hashed = True\n self.password = utils.saltHash(password)\n else:\n self.password = password", "def generate_password_hash(password, digestmod='sha256', salt_length=8):\n\n salt = ''.join(random.sample(SALT_CHARS, salt_length))\n signature = create_signature(salt, password, digestmod=digestmod)\n return '$'.join((digestmod, salt, signature))", "def scrypt(salt: bytes, N: int, password: bytes) -> bytes:\n kdf = Scrypt(salt=salt, length=32, n=N, r=8, p=1, backend=default_backend())\n return kdf.derive(password)", "def secure_password(plain_password):\n # Hash plain password with SHA-512 algorithm &\n # re-hash hash with blowfish algorithm\n robust_hashed_pwd = hash_password(plain_password)\n\n # Encrypt hash using encryption secret key\n cypher = Fernet(_get_pwd_key_from_config())\n encrypted_hash = cypher.encrypt(robust_hashed_pwd)\n\n # Encode encrypted hash in base64\n b64_hash = b64encode(encrypted_hash)\n\n # Decode base64 bytes into string\n str_hash = b64_hash.decode('utf-8')\n\n return str_hash", "def pwd(password: str):\n password = password.encode('utf8')\n s = sha1()\n s.update(password)\n return s.hexdigest()", "def hash_new_password(password: str) -> Tuple[bytes, bytes]:\r\n salt = os.urandom(16)\r\n pw_hash = hashlib.pbkdf2_hmac('sha256', password.encode(), salt, 100000)\r\n return salt, pw_hash", "def scramble(password: str):\n salt = secrets.token_hex(16)\n return hashlib.sha512((password + salt).encode('utf-8')).hexdigest()", "def _hashPassword(password):\n charset = './' + ascii_letters + digits\n return crypt.crypt(password, ''.join(random.sample(charset, 2)))", "def test_salt_generation(self):\n pw = generate_password(8)\n hashes = tuple(hash_password(pw) for i in range(10))\n self.assertEqual(len(hashes), len(set(hashes)),)", "def encrypt_password(password,salt=None):\n\tif salt is None:\n\t\tsalt = os.urandom(8) #64 bits\n\n\tassert 8 == len(salt)\n\tassert isinstance(salt,str)\n\n\tif isinstance(password,unicode):\n\t\tpassword = password.encode('UTF-8')\n\n\tassert isinstance(password,str)\n\n\tresult = password\n\tfor i in xrange(10):\n\t\tresult = HMAC(result,salt,sha256).digest()\n\treturn salt + result", "def __hash_new_password(password: str) -> Tuple[bytes, bytes]:\n salt = os.urandom(16)\n pw_hash = hashlib.pbkdf2_hmac(\"sha256\", password.encode(), salt, 100000)\n return salt, pw_hash" ]
[ "0.7726546", "0.76877874", "0.76764584", "0.75947", "0.7566996", "0.7513428", "0.74172544", "0.7408706", "0.7395221", "0.7364182", "0.7327813", "0.73232174", "0.7280142", "0.7258137", "0.72416675", "0.72343624", "0.72303843", "0.72061163", "0.72061163", "0.72061163", "0.72013676", "0.7194805", "0.716947", "0.71646726", "0.7163205", "0.7151571", "0.7132026", "0.7130124", "0.7100996", "0.70737517", "0.7034117", "0.70066786", "0.69935584", "0.6964419", "0.6947658", "0.6943601", "0.69357497", "0.69283825", "0.6915156", "0.69102556", "0.6908548", "0.68901175", "0.6886953", "0.68865556", "0.68704855", "0.68697476", "0.68397474", "0.68385816", "0.6834591", "0.68053156", "0.6784952", "0.6781983", "0.6771304", "0.67586535", "0.67534316", "0.67458224", "0.6737212", "0.673161", "0.6708673", "0.67001635", "0.66521895", "0.66392034", "0.6627819", "0.6627819", "0.661896", "0.661896", "0.661896", "0.661896", "0.66155916", "0.6592313", "0.65916455", "0.656446", "0.6564385", "0.6535789", "0.65176535", "0.6512618", "0.65061057", "0.65036625", "0.64957047", "0.6487345", "0.64721787", "0.64569986", "0.6449573", "0.64362276", "0.6429365", "0.6429365", "0.64145917", "0.6413402", "0.6406148", "0.6403949", "0.63878316", "0.63815814", "0.6378789", "0.6378536", "0.637839", "0.63720745", "0.63719434", "0.6371665", "0.63612205", "0.6358603" ]
0.6528625
74
Saves the ftp account.
def save(self, **kwargs): owner = str(self.vhost.domain.owner()) if not self.name.startswith(owner + '_'): self.name = owner + '_' + self.name try: super(Account, self).save(**kwargs) except IntegrityError: i = 1 base_name = self.name while True: self.name = base_name + '-' + str(i) try: super(Account, self).save(**kwargs) return except IntegrityError: i += 1
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def save_accounts(account):\n account.save_account()", "def save_accounts(account):\n account.save_account()", "def save_click(self):\n acc_name = self.name_entry.get()\n email = self.email_entry.get()\n username = self.user_entry.get()\n password = self.pass_entry.get()\n\n if not acc_name:\n self.error_label.config(text='Introdu numele contului.')\n return\n\n if self.is_new_account and accountdb.account_exists(self.us, acc_name):\n self.error_label.config(text='Un cont cu acest nume există deja.')\n return\n\n # Tell the user what's happening.\n self.error_label.config(text='Se salvează...')\n self.error_label.update()\n\n acc = account.create_account(acc_name, email, username, password, self.us)\n accountdb.change_account(self.us, acc)\n\n self.error_label.config(text='Detaliile contului au fost salvate.')\n\n self.acc = acc\n self.is_new_account = False\n self.load_account_data()", "def save(self):\n path = self.user.get_session_path()\n with open(path, 'a', encoding='utf8') as file:\n self.write(file=file)", "def save(self):\n\t\tPath(self.PATH).mkdir(parents=True,exist_ok=True)\n\n\t\twith open(self.account_file, \"wb\") as file:\n\t\t\tpickle.dump(self, file)", "def save_account(self):\n Credentials.credentials_list.append(self)", "def save_account(self):\n Credential.account_list.append(self)", "def save_account(self, account = None):\n\t\tif account == None:\n\t\t\taccount = self.currentAccount\n\t\tself.config.add_section(account.data['name'])\n\t\tfor field in account.data:\n\t\t\tself.config.set(account.data['name'], field, str(account.data[field]))\n\t\tself.config.write(open(self.configFile, 'w'))", "def save_credentials(credentials):\n credentials. save_details()", "def save_credentials(self):\n Stores.account_store.append(self.register_stores())", "def save(self):\n # EXERCISE:\n # - save self.access_token, self.user_id, self.save_message to access token file AccessData.ACCESS_TOKEN_FILE\n # @see http://stackoverflow.com/questions/12309269/write-json-data-to-file-in-python\n# TODO ==> INSERT CODE HERE <==\n\n logger.debug('saved access token in file %s' % (AccessData.ACCESS_TOKEN_FILE))", "def save_user(user):\n User.save_user(user)", "def save_users(user):\n user.save_user()", "def save_user(self):\n User.user_list.append(self)\n\n # finding a user's credentials", "def save_user(self):\n db.session.add(self)\n db.session.commit()", "def save_state():\n logger.debug(\"called\")\n pwd_gate.save()\n preferences.save()\n shareBuffer.save()\n contacts.save()\n secrets.save()", "def save_profile(self):\n self.save()", "def save(self, context=None):\n updates = self.obj_get_changes()\n self.dbapi.update_user(context, self.id, updates)\n self.obj_reset_changes()", "def save_credentials(credentials):\n Credentials.save_credentials(credentials)", "def put_account(self, account):\n \n pass", "def save(self):\n self.rpc.call(MsfRpcMethod.CoreSave)", "def _save_credentials(self):\n from .. import TOKENS\n credentials = os.path.join(TOKENS, \"drive.json\")\n self._gauth.SaveCredentialsFile(credentials)", "def save():", "def save_user(cls,username,password):\n cls.users[username] = password", "def save():\n pass", "def saveSettings(self):\n self.userFiles.applyData()\n self.userPersonal.applyData()", "def save_path(path_to_account):\r\n with open(\"config.txt\", 'w+') as write_in_file:\r\n write_in_file.write(path_to_account)", "def save(self):\n self.__db.commit()", "def save_user(username, data):\n\n hashed_username = base64.b64encode(Cryptography.hash(username).digest()).decode()\n\n file = open(getcwd() + Database.__DB_FILENAME, 'a')\n iv, ciphered_data = Cryptography.cipher(Cryptography.get_passphrase(), data)\n file.write(hashed_username + ':' + ciphered_data.hex() + '.' + iv.hex() + '\\n')\n file.flush()\n file.close()", "def save(self):\n self.db.commit()", "def putFile(self, filename):\n basename = os.path.basename(filename)\n fp = open(filename, 'rb')\n self.ftp.storbinary('stor ' + basename, fp)\n fp.close();", "def save(self, register=False):\n sha = sha1(self.email).hexdigest()\n infos = self.to_dict()\n infos[\"plan\"] = infos[\"plan\"][\"id\"] if infos[\"plan\"] else None\n\n if not redis.hmset(\"sl:account:{}\".format(sha), infos):\n raise SleekException(\"Could not save current user.\", 401)\n \n if register:\n try:\n send_email.delay(\n \"florent.esp@gmail.com\", \"Welcome to sleekstatus !\",\n \"Welcome message\", [self.email]\n )\n except:\n pass # Cannot send email", "def test_save_account(self):\n self.new_account.save_account() # add account to list\n self.assertEqual(len(Credential.credential_list),\n 1) # check length of list", "def save(self):\n self.session.commit()", "def save(self):\n\t\tself.CONFIG.save()\n\t\tself.temp_files.save()", "def save(self):\n # TODO (Pierre): code", "def flush_account(self):\n if self.data_channel:\n if not self.data_channel.transfer_in_progress():\n self.data_channel.close()\n self.data_channel = None\n if self.data_server:\n self.data_server.close()\n self.data_server = None\n\n self.fs.rnfr = None\n self.authenticated = False\n self.username = \"\"\n self.attempted_logins = 0\n self.current_type = 'a'\n self.restart_position = 0\n self.quit_pending = False\n self.in_dtp_queue = None\n self.out_dtp_queue = None\n\n\n # --- connection", "def save_to_users(self):\n Data.add_data(self.user_data())", "def save_credentials(self):\n Credentials.credentials_list.append(self)", "def save(self):\n self.__session.commit()", "def save(self):\n self.__session.commit()", "def save(self):\n self.__session.commit()", "def save(self):\n self.__session.commit()", "def save(self):\n self.__session.commit()", "def save(self):\n self.__session.commit()", "def save(self):\n self.__session.commit()", "def save(self):\n self.__session.commit()", "def save_password(self):\n Credential.passwords.append(self)", "def save_model(self, request, obj, form, change):\n if change:\n obj.save()\n else:\n obj.set_password(obj.password)\n obj.save()", "def save(self):\n\n self.__session.commit()", "def save(self):\n\n self.__session.commit()", "def save(self, fname):\n pass", "def save(self):\n\n pass", "def save(self):\n # TODO: save the file", "def save (self):\n pass", "def save_credentials(self):\n self.credentials_list.append(self)", "def __save_password(self, filename, data, nonce, website): \n\n spinner = Halo(text=colored(\"Saving\", \"green\"), spinner=self.dots_, color=\"green\")\n spinner.start()\n if os.path.isfile(filename):\n try:\n with open(filename, 'r') as jsondata:\n jfile = json.load(jsondata)\n jfile[website][\"nonce\"] = nonce\n jfile[website][\"password\"] = data\n with open(filename, 'w') as jsondata:\n json.dump(jfile, jsondata, sort_keys=True, indent=4)\n except KeyError:\n with open(filename, 'r') as jsondata:\n jfile = json.load(jsondata)\n jfile[website] = {}\n jfile[website][\"nonce\"] = nonce\n jfile[website][\"password\"] = data\n with open(filename, 'w') as jsondata:\n json.dump(jfile, jsondata, sort_keys=True, indent=4)\n else: # initialize the file in case it doesn't exist off the start\n jfile = {website: {}}\n jfile[website][\"nonce\"] = nonce\n jfile[website][\"password\"] = data\n with open(filename, 'w') as jsondata:\n json.dump(jfile, jsondata, sort_keys=True, indent=4)\n spinner.stop()\n print(colored(f\"{self.checkmark_} Saved successfully. Thank you!\", \"green\"))", "def wmSaveMyAccount(self):\n user_id = uiCommon.GetSessionUserID()\n args = uiCommon.getAjaxArg(\"values\")\n\n u = catouser.User()\n u.FromID(user_id)\n\n if u.ID:\n # if a password was provided...\n # these changes are done BEFORE we manipulate the user properties for update.\n new_pw = uiCommon.unpackJSON(args.get(\"my_password\"))\n if new_pw:\n u.ChangePassword(new_password=new_pw)\n uiCommon.WriteObjectChangeLog(catocommon.CatoObjectTypes.User, u.ID, u.FullName, \"Password changed.\")\n\n # now the other values...\n u.Email = args.get(\"my_email\")\n u.SecurityQuestion = args.get(\"my_question\")\n u.SecurityAnswer = uiCommon.unpackJSON(args.get(\"my_answer\"))\n\n if u.DBUpdate():\n uiCommon.WriteObjectChangeLog(catocommon.CatoObjectTypes.User, u.ID, u.ID, \"User updated.\")\n\n return json.dumps({\"result\": \"success\"})", "def save_prefs(self):\n prefs_file = open(expanduser(self.prefs_path), 'w')\n pickle.dump(self.prefs, prefs_file)", "def saveAs(self):\n self.saveFile()", "def save_user(self):\n\n User.user_list.append(self)", "def save_user(self):\n User.user_list.append(self)", "def save_user(self):\n User.user_list.append(self)", "def save(self, obj):\n self.uow.save(obj)\n self.imap.save(obj)\n state(obj).session = self", "def save(self):\n pass", "def save(self):\n pass", "def save(self):\n pass", "def save(self):\n pass", "def save(self):\n pass", "def save(self):\n\n err = C.git_remote_save(self._remote)\n check_error(err)", "def save(self):\n db.session.commit()", "def save(self):\n users = User.getall()\n users[self.username] = dict(self)\n return self.db().put(self.udb, users)", "def save_backup(\n self):\n self.backup = self.data", "def save_login(mobile):\n mobile = Mobile(mobile)\n ktt = KTT(mobile)\n ktt.gen_device_code()\n ktt.get_api_start()\n time.sleep(4)\n ktt.post_login()\n time.sleep(4)\n ktt.get_user_info()\n user_info = (\n ktt.user_info[\"uid\"], ktt.user_info[\"name\"], ktt.user_info[\"mobile\"],\n ktt.user_info[\"father\"], ktt.user_info[\"balance\"], ktt.user_info[\"coin\"],\n ktt.device_code, ktt.token, ktt.mobile.os, ktt.mobile.brand, ktt.mobile.mac,\n ktt.mobile.android_id\n )\n print(user_info)\n\n # save one user info record and one user flag\n uis.save([user_info])\n read_flag = [(user_info[0],)]\n uis.save_flag(read_flag)", "def Save_txt(self, accounts):\n\n self.extension = \".txt\"\n\n self.sep = \"<--------Account-------->\\n\"\n\n colors.info(\"Saving as TXT in {}{}\".format(self.file, self.extension))\n\n try:\n with open(self.file + self.extension, \"a\") as output_:\n for account in accounts:\n if account.get(\"account_login\") == \"success\":\n if account.get(\"AccountType\") != \"Spotify Free\":\n output_.write(self.sep)\n output_.write(\"Username: {}\\n\".format(account[\"Username\"]))\n output_.write(\"Password: {}\\n\".format(account[\"Password\"]))\n output_.write(\"As Combo: {}:{}\\n\".format(account[\"Username\"], account[\"Password\"]))\n output_.write(\"Account Type: {}\\n\".format(account[\"AccountType\"]))\n output_.write(\"Country: {}\\n\".format(account[\"Country\"]))\n output_.write(\"Admin: {}\\n\".format(account[\"Admin\"]))\n output_.close()\n colors.correct(\"Done! All saved successfully\")\n except Exception as e:\n colors.error(str(e))\n _exit(1)", "def put_account():\n\n # init vars\n user = g.user\n\n # pre-validate data\n errors = unique({}, Administrator, Administrator.username,\n request.json.get('username', None), update=user)\n\n errors = unique_email(errors, Administrator, Administrator.email,\n request.json.get('email', None), update=user)\n\n # validate data\n try:\n data = UserAccountAdminSchema().load(request.json)\n except ValidationError as err:\n errors = dict(list(errors.items()) + list(err.messages.items()))\n\n # return any errors\n if errors:\n return jsonify({\"error\": errors}), 400\n\n # save user account\n user.username = data['username'].strip()\n user.email = data['email'].strip()\n user.first_name = data['first_name'].strip()\n user.last_name = data['last_name'].strip()\n\n db.session.commit()\n\n # response\n return jsonify({'user_account': UserAccountAdminSchema().dump(user)}), 200", "def _save_credentials_if_changed(self):\n if list(self._cookiejar) != self._old_cookies:\n logger.debug(\"Saving credentials to file: %r\", str(self._cookiejar_filepath))\n dirpath = os.path.dirname(self._cookiejar_filepath)\n os.makedirs(dirpath, exist_ok=True)\n\n fd = os.open(self._cookiejar_filepath, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, 0o600)\n self._cookiejar.save(fd)", "def saveUser(self):\n self.user[\"Video\"] = \"\"\n with open(self.user_file, \"w+\") as json_file:\n json.dump(self.user, json_file, indent=4)", "def save_credentials(self):\n Credentials.credential_list.append(self)", "def save(self):\r\n # os.mkdirs(DATADIR, exist_ok=True)\r\n savefile = os.path.join(wg.DATADIR, str(self.guild.id) + \".json\")\r\n\r\n savedata = {\r\n 'userchars': {id:self.usercharacters[id].to_data() for id in self.usercharacters},\r\n 'guildid': self.guild.id,\r\n 'last_known_name': self.guild.name,\r\n }\r\n\r\n with tempfile.NamedTemporaryFile(mode=\"w\", dir=wg.DATADIR) as outf:\r\n json.dump(savedata, outf, indent=1)\r\n if os.path.exists(savefile):\r\n os.unlink(savefile)\r\n os.link(outf.name, savefile)\r\n\r\n wg.log.info(f'Guild {debug_id(guild=self.guild)} saved. '\r\n f'{len(self.usercharacters)} user chars and {len(self.npcs)} npcs.')\r\n\r\n pass", "async def save(self):\n await config.member(self.member).set_raw(str(self.role.id), value=self.as_dict)", "def save(self) -> None:\n pass", "def save(self) -> None:\n pass", "def save(self) -> None:\n pass", "def _save_keys(self) -> None:\n algorithm = self.algorithm_combobox.currentText()\n filename = AesKeyGenerator(algorithm).save_session_key()\n msg_success(f\"Created keys as {filename}\")", "def save(self,cookie_jar):\n if not os.path.exists(self.path):\n os.makedirs(self.path)\n with open(self.file_path, \"wb\") as cookie_file:\n cookie_file.write(bytearray(pickle.dumps(cookie_jar)))", "def upload(self, file_obj):\n file_path = ''\n file_name = file_obj.filename\n file_class, file_type = file_obj.content_type.split('/')\n\n def allowed_file():\n return '.' in file_name and file_name.split('.')[1] in ALLOWED_EXTENSIONS\n\n try:\n log.debug('Try to save file <%s> for user ID: %s', file_name, self.current_user.login)\n\n if not allowed_file():\n log.debug('Filetype not allowed')\n return {'success': False, 'errorMessage': 'Filetype not allowed'}\n\n upload_dir = os.path.join(UPLOAD_FOLDER, self.current_user.login)\n file_path = os.path.join(upload_dir, file_name)\n\n if os.path.isfile(file_path):\n log.debug('File was uploaded already')\n return {'success': False, 'errorMessage': 'File was uploaded already'}\n\n if not os.path.exists(upload_dir):\n log.debug('--> Create path: %s', upload_dir)\n os.makedirs(upload_dir)\n\n # save in File System\n with open(file_path, \"ab\") as f:\n data = file_obj.body\n f.write(bytes(data))\n\n os_f_size = os.stat(file_path).st_size\n\n # check file quota\n if (self.user_api.user_db.used_file_quota + os_f_size) > self.user_api.user_db.file_quota:\n os.remove(file_path)\n log.error('You don\\'t have empty space!')\n return {'success': False, 'errorMessage': 'You don\\'t have empty space!'}\n\n file_db = FileDB()\n file_db.name = file_name\n file_db.type = file_type\n file_db.f_class = file_class\n file_db.size = os_f_size\n file_db.user_id = self.current_user.id\n file_db.date_load = datetime.now().strftime(DATE_FORMAT)\n\n self.db.create(file_db)\n\n log.debug('--> File has been updated in DB.')\n\n # update user\n self.user_api.user_db.used_file_quota += os.stat(file_path).st_size # bytes\n #self.user_api.db.update(self.user_api.user_db)\n\n self.db.commit()\n self.user_api.db.commit()\n\n log.debug('--> User in DB has been updated.')\n\n return {'success': True, 'id': file_db.id}\n except StandardError:\n self.db.session.rollback()\n if os.path.isfile(file_path):\n log.error('File <%s> has been deleted', file_path)\n os.remove(file_path)\n log.exception('Cannot upload file')\n return SERVER_ERROR", "def write_user(self, _user):\n try:\n self.conn_cursor.execute(\"INSERT INTO users (id,bank) VALUES (?, ?)\", (_user.id, _user.bank))\n except sqlite3.IntegrityError:\n pass\n self.conn_cursor.execute(\"UPDATE users SET bank=? WHERE id=?\", (_user.bank, _user.id ))", "def save_model(self, request, obj, form, change):\n if not change:\n if form.is_valid():\n user = form.save()\n user.identity = Users.SUPERVISOR\n user.set_password(form.data.get('password'))\n user.iCode = InviteCls.encode_invite_code(user.id)\n user.save()\n UserExtra.objects.create(uid=user)\n UserBase.objects.create(\n uid=user,\n phone=user.username\n )\n UserBusiness.objects.create(uid=user)\n else:\n super().save_model(request, obj, form, change)", "def save_user_profile(instance, **_):\n instance.profile.save()", "def save(self, path):\n pass", "def save(self, path):\n pass", "def save(self, path):\n pass", "def save(self)->None:\n database.cursor.execute(\n \"INSERT INTO users(firstname,lastname,othernames,email,phone,username,password,role) VALUES (%s,%s,%s,%s,%s,%s,%s,%s) RETURNING id\", (\n self.first_name,\n self.last_name,\n self.other_name,\n self.email,\n self.phone_number,\n self.user_name,\n self.password,\n self.is_admin\n ))\n super().save()", "def save(self):\n self.folder.client._perform_empty(\n \"PUT\", \"/projects/%s/managedfolders/%s\" % (self.folder.project_key, self.folder.odb_id),\n body=self.settings)", "def save_credential(self):\n Credentials.credentials_list.append(self)", "def save_info(self):\n if len(self.password_text.text()) < 8:\n message = Message(self.language[\"inv_pass\"], self.language[\"pass_not_long\"])\n warning_message = message.create_iw_message(self.language[\"ok\"], \"warning\")\n warning_message.exec()\n else:\n data_acces = DbMethods()\n response = data_acces.change_user_information(self.username,\n Hash.encrypt(self.password_text.text()))\n\n if response == True:\n message = Message(\n self.language[\"success\"], self.language[\"act_info\"])\n information_message = message.create_iw_message(\n self.language[\"ok\"], \"information\")\n information_message.exec()\n else:\n message = Message(self.language[\"error\"], self.language[\"inf_error\"])\n warning_message = message.create_iw_message(self.language[\"ok\"], \"warning\")\n warning_message.exec()\n self.close()", "def SaveData(self):\n \n try:\n with open(self.users_file, 'r+') as outfile:\n json.dump(self.user_db, outfile, indent=4)\n outfile.truncate()\n except:\n messagebox.showerror('Error',\n f'{self.users_file} could not be accessed.' \\\n 'New user information won\\'t be saved')", "def save(self):\n self.session.modified = True", "def update_user():", "def saveDatabase(database,user):\n pickle.dump(user, open(\"Users/\"+user.key, \"wb\"))" ]
[ "0.6917412", "0.6917412", "0.6715491", "0.6570291", "0.65482265", "0.62740266", "0.625781", "0.6185513", "0.60461766", "0.5973329", "0.5958735", "0.5941549", "0.5865786", "0.5791105", "0.57839775", "0.57787615", "0.5771968", "0.57635415", "0.5741722", "0.5736343", "0.572928", "0.5676985", "0.5668434", "0.56426984", "0.5614579", "0.5566974", "0.5533356", "0.5501332", "0.5484345", "0.5470361", "0.54692733", "0.5456285", "0.5443388", "0.54207486", "0.540081", "0.5387554", "0.538694", "0.5382654", "0.5381317", "0.5380583", "0.5380583", "0.5380583", "0.5380583", "0.5380583", "0.5380583", "0.5380583", "0.5380583", "0.53705883", "0.5369704", "0.53655875", "0.53655875", "0.53650105", "0.5359477", "0.5346224", "0.5343774", "0.5341521", "0.53265715", "0.532035", "0.5313483", "0.5303868", "0.5296102", "0.5283837", "0.5283837", "0.5278287", "0.5275361", "0.5275361", "0.5275361", "0.5275361", "0.5275361", "0.52676356", "0.52627474", "0.5260088", "0.524857", "0.52401596", "0.52398574", "0.5236509", "0.5227897", "0.5218339", "0.5210846", "0.5209482", "0.5207676", "0.52061397", "0.52061397", "0.52061397", "0.5206061", "0.519945", "0.5198795", "0.519674", "0.518807", "0.5187611", "0.5181044", "0.5181044", "0.5181044", "0.51775444", "0.5167616", "0.515967", "0.51476854", "0.51377666", "0.51334304", "0.5131595", "0.5126901" ]
0.0
-1
Unicode representation for the ftp account.
def __unicode__(self): return unicode(self.name) + '@' + unicode(self.vhost)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getUniStr(self):\n return(\"%s/%s\"%(self.token.id,self.type))", "def __unicode__(self):\n # TODO: Curently this just stores/returns the file path.\n return unicode(self.path).encode('utf-8')", "def toString(self) -> unicode:\n ...", "def toString(self) -> unicode:\n ...", "def show_account(self, obj): # pylint: disable=no-self-use\n return '%s\\\\%s' % (obj.domain, obj.username)", "def _dec_con(contact):\n return u\"%s (%s)\" % (contact.display_name.decode(\"utf-8\"),\n contact.account.decode(\"utf-8\"))", "def __unicode__(self):\n return str(self).decode('ascii')", "def __bytes__(self):\n return unicode(self).encode('utf-8')", "def getUnicodeEncoding(self):\n return self.__unicodeEncoding", "def _namespace_to_unicode(self):\n\t\treturn u\":\".join(self.namespace_parts)", "def __str__(self):\n return unicode(self).encode('utf-8')", "def __str__(self):\n return '%d %d %d %s' % (self.__wd, self.mask, self.cookie, self.name)", "def to_unicode(data):\n return to_string(data)", "def __unicode__(self):\n return unicode(self).encode('utf-8')", "def __unicode__(self):\n return unicode(self.GetString())", "def __unicode__(self):\n\n return unicode(self.user)", "def __str__(self):\n return self.__unicode__().encode('utf-8').decode()", "def codec(cls) -> str:\n return 'UTF8'", "def __repr__(self):\n r = '<Character id:%s[%s] name:_%s_>' % (self.characterID,\n self.accessSystem,\n self.get('name'))\n if isinstance(r, unicode): r = r.encode('utf_8', 'replace')\n return r", "def getfilesystemencoding(): # real signature unknown; restored from __doc__\n return \"\"", "def __repr__(self):\n return '{}'.format(self.username)", "def __str__(self):\n return self.get('name', u'').encode('utf_8', 'replace')", "def __repr__(self):\n return \"{}\".format(self.name.encode('unicode-escape'))", "def _namespace_to_ascii(self):\n\t\tparts = [part.encode(\"utf-8\") for part in self.namespace_parts]\n\t\treturn \":\".join(parts)", "def getUniStr(self):\n return(\"%s.%s.%s-%s\"%(self.getPrefix,self.getSentenceId,\n self.offset_bgn,self.offset_end))", "def __unicode__(self):\n\n d = ((3, \".\"), (7, \".\"), (11, \"-\"))\n s = list(map(str, self.cpf))\n\n for i, v in d:\n s.insert(i, v)\n\n r = ''.join(s)\n\n return r", "def __str__(self):\n # TODO: Curently this just stores/returns the file path.\n return unicode(self.path).encode('utf-8')", "def _encode_userinfo_part(text, maximal=True):\n if maximal:\n bytestr = normalize('NFC', text).encode('utf8')\n return u''.join([_USERINFO_PART_QUOTE_MAP[b] for b in bytestr])\n return u''.join([_USERINFO_PART_QUOTE_MAP[t] if t in _USERINFO_DELIMS\n else t for t in text])", "def get_display_name(self):\n\n return to_unicode(self.uid)", "def __str__(self):\n return str(self.uid)", "def get_valueTransferEncoding(self):\n return \"utf-8\"", "def __str__(self):\n return (\n f\"ForgotPasswordData: [forgot_code: {self.forgot_code}, username: {self.username}, \"\n f\"expires: {self.expires}, deleted: {self.deleted}]\"\n )", "def name(self):\n name_length = self.unpack_word(0x48)\n unpacked_string = self.unpack_string(0x4C, name_length)\n if self.has_ascii_name():\n return unpacked_string.decode(\"windows-1252\")\n return unpacked_string.decode(\"utf-16le\")", "def stringify(self):\n return self.char", "def su(value):\n return safe_unicode(value, encoding=get_charset())", "def get_encoding(self): # real signature unknown; restored from __doc__\n return \"\"", "def __str__(self):\n return \"UID {0}, Key {1}, Cipher {2}, PRNG {3}\".format(hex(self.uid), \n hex(self.key), hex(self.cipher), hex(self.prng))", "def __repr__(self):\n return \"<Username {}>\".format(self.username)", "def info(self):\n if str.__str__(self) in UID_dictionary:\n return UID_dictionary[self][2]\n\n return ''", "def display_unicode(self, string):\n if string is None:\n return ''\n return string.decode(\"utf16\", \"ignore\").encode(\"ascii\", 'backslashreplace')", "def __repr__(self):\n\n return f\"Ufd(\"\\\n f\"title=\\\"{self.title}\\\",\"\\\n f\" icon=\\\"{self.icon}\\\",\"\\\n f\" show_hidden={self.show_hidden},\"\\\n f\" include_files={self.include_files},\"\\\n f\" multiselect={self.multiselect},\"\\\n f\" select_dirs={self.select_dirs},\"\\\n f\" select_files={self.select_files},\"\\\n f\" unix_delimiter={self.unix_delimiter})\"\\\n f\" stdout={self.stdout})\"\\\n f\" @ {hex(id(self))}\"", "def __unicode__(self):\n name = self.group.user.name or self.nickname or self.fullname or self.group.user\n return unicode(name)", "def name(self):\n if not self.has_name():\n return \"\"\n name_length = self.unpack_word(0x2)\n unpacked_string = self.unpack_string(0x14, name_length)\n if self.has_ascii_name():\n return unpacked_string.decode(\"windows-1252\")\n return unpacked_string.decode(\"utf-16le\")", "def __repr__(self):\n return \"<{} {}>\".format(self.__class__.__name__, self.username)", "def __repr__(self):\n return f\"<ForgotPasswordData '{self.forgot_code}','{self.username}'>\"", "def __str__(self):\r\n return str(self.Name) + ':' + self.toHex()", "def __unicode__(self):\r\n return unicode(repr(self))", "def _fn2ascii(self, filename): \n nameBase, ext = Path(Path(filename).basename()).splitext()\n try: nameBase.encode('ascii')\n except UnicodeEncodeError:\n nameBase = nameBase.encode('utf-8').encode('hex')\n try:\n ext = ext.encode('ascii')\n except UnicodeEncodeError:\n ext = ext.encode('utf8').encode('hex')\n return str(nameBase + ext)", "def account(self) -> str:\n return self._account", "def account(self) -> str:\n return self._account", "def __unicode__(self):\n d = ((2, \".\"), (6, \".\"), (10, \"/\"), (15, \"-\"))\n s = list(map(str, self.cnpj))\n \n for i, v in d:\n s.insert(i, v)\n \n r = ''.join(s)\n \n return r", "def encoding(self) -> str:\n return self._encoding", "def __repr__(self):\n\n return \"\\n<User ID: {} Name: {} {}>\\n\".format (self.user_id,\n self.fname,\n self.lname)", "def __bytes__(self):\n from pandas.core.config import get_option\n\n encoding = get_option(\"display.encoding\")\n return self.__unicode__().encode(encoding, 'replace')", "def get_domain_passwd(self):\n return self.domain_passwd.get_text()", "def charset(self) -> str:\n return pulumi.get(self, \"charset\")", "def __str__(self):\r\n return unicode(self.header)", "def getName(self) -> unicode:\n ...", "def getName(self) -> unicode:\n ...", "def getName(self) -> unicode:\n ...", "def getName(self) -> unicode:\n ...", "def getName(self) -> unicode:\n ...", "def __repr__(self):\n\n return f\"\"\"<User user_id={self.user_id} fname={self.fname}\n lname={self.lname} email={self.email} phone={self.phone}\n password={self.password}>\"\"\"", "def account_name(self):\n return self.civic_no_city()", "def __str__(self):\n local_s = 'F30A: '\n local_s += '\\n'\n return local_s", "def u(obj):\n return obj if isinstance(obj, unicode) else unicode(obj) # noqa: F821 pylint: disable=undefined-variable", "def full_name(self):\n return u\"{} {}\".format(self.pref_first_name(), self.last_name)", "def to_unicode(data):\n if isinstance(data, bytes):\n return data.decode('utf-8')\n else:\n return data", "def username(self) -> str:", "def username(self) -> str:", "def name(self) -> unicode:\n ...", "def __str__(self):\n return bytes_to_string(self._bytes)", "def getFolderPath(self) -> unicode:\n ...", "def __repr__(self):\n return f\"<User({self.email!r})>\"", "def __str__(self):\n return self.user_profile.user.username + ' ' + self.language.name", "def __repr__(self):\n return '\\n<User ID: =%s Email: =%s Password: =%s First Name: =%s Last Name: =%s> Subscription: =%s>' % (self.user_id, self.email, self.password, self.fname, self.lname, self.subscription)", "def __unicode__(self):\n prepr = com.pprint_thing(self, escape_chars=('\\t', '\\r', '\\n'),\n quote_strings=True)\n return \"%s(%s, dtype='%s')\" % (type(self).__name__, prepr, self.dtype)", "def __unicode__(self):\n return u'%s' % str(self)", "def to_unicode(session):\n return six.text_type(session.data)", "def uf(self):\n return self._uf", "def __str__(self):\n return 'a/c %s (%s)' % (self.account_number, self.owner)", "def get_user_binary(self):\n pass", "def __unicode__(self):\n return unicode(self.asPyDict())", "def password(self) -> str:", "def encoding(self):\n return self.get_encoding()", "def __repr__(self):\r\n\r\n return f\"<User info: id = {self.user_id}, name = {self.fname} {self.lname} email = {self.email}>\"", "def __repr__(self) -> str:\n return '<User({username!r})>'.format(username=self.username)", "def user_account(self) -> str:\n warnings.warn(\"user_account() is deprecated.\", DeprecationWarning)\n return self._uid", "def _hidden_in_unicode(self, txt):", "def getUniStr(self):\n return(\"%s/%s\"%(Entity.getUniStr(self),self.semanticId))", "def __str__(self):\n\n if compat.PY3:\n return self.__unicode__()\n return self.__bytes__()", "def encoding(self):\n return self._enc", "def _get_address(self):\n return utf82unicode(pn_terminus_get_address(self._impl))", "def __unicode__(self):\n try:\n return unicode(self.srs)\n except:\n return unicode(self.wkt)", "def generateUsername(self):\n retval= \"{0}.{1}\".format( self.first_name.split()[0].lower(),\n self.last_name.split()[-1].lower() )\n \n return toAscii(retval)", "def print_private(self):\n print('Account Number : ', self.__Account)\n return \"\"", "def __str__(self):\r\n return f'{self.user}'", "def __str__(self):\r\n return f'{self.user}'", "def __str__(self):\r\n return f'{self.user}'", "def __str__(self):\r\n return f'{self.user}'" ]
[ "0.62673855", "0.6021327", "0.5987862", "0.5987862", "0.5913589", "0.5901801", "0.579293", "0.5770792", "0.573807", "0.5736912", "0.567259", "0.56630135", "0.5634768", "0.5624955", "0.56201947", "0.5554798", "0.55528706", "0.55185616", "0.54974395", "0.54740465", "0.546071", "0.5458786", "0.5444764", "0.5434871", "0.54318225", "0.54228216", "0.5419672", "0.5417594", "0.5412733", "0.54085815", "0.5406254", "0.53950334", "0.53774303", "0.537357", "0.5366278", "0.53615385", "0.53584397", "0.5354118", "0.53438574", "0.5339147", "0.5322614", "0.5269235", "0.52513534", "0.52236193", "0.5216127", "0.52102864", "0.5196245", "0.51924896", "0.51818943", "0.51818943", "0.5175279", "0.5148148", "0.5124578", "0.5119399", "0.51161367", "0.5115144", "0.51100105", "0.5108803", "0.5108803", "0.5108803", "0.5108803", "0.5108803", "0.51017064", "0.50928587", "0.50875795", "0.5086317", "0.5085604", "0.5084571", "0.5080653", "0.5080653", "0.5077247", "0.5075152", "0.5073282", "0.50678754", "0.5065193", "0.50607437", "0.50598484", "0.505627", "0.5056195", "0.5054733", "0.503568", "0.5031273", "0.5025739", "0.5022186", "0.5017806", "0.5005687", "0.5005071", "0.5003522", "0.49991122", "0.49965504", "0.49954924", "0.4994748", "0.49930423", "0.49930373", "0.499015", "0.49859548", "0.4975329", "0.4975329", "0.4975329", "0.4975329" ]
0.5576632
15
Returns the correct ressource.
def utilization(user, ressource): if ressource == 'accounts': return Account.objects.filter(vhost__in=list(get_vhosts(user))).count() return None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_resource(self):\n from rowgenerators import parse_app_url # Here, to break an import cycle\n\n self._resource = self._downloader.download(self.inner)\n\n\n ru = parse_app_url(self._resource.sys_path,\n downloader=self.downloader,\n scheme_extension=self.scheme_extension,\n **self.frag_dict)\n\n\n return ru", "def get_resource_url(self, resource_name):\r\n return self.__resource_meta.get(resource_name,{}).get(\"resource\", None)", "def getResource(self):\n return self.__resource;", "def getResource(self):\n pass;", "def getResource(self):\n\n return self.__resource;", "def _get_source_rd(self):\n return self.__source_rd", "def resource_uri(self) -> Optional[str]:\n return pulumi.get(self, \"resource_uri\")", "def resource_uri(self) -> Optional[str]:\n return pulumi.get(self, \"resource_uri\")", "def source_resource_path(self) -> Optional[str]:\n return pulumi.get(self, \"source_resource_path\")", "def get_url(self, resource_name):\r\n return self.__resource_meta.get(resource_name,{}).get(\"url\", None)", "def source(self) -> XMLResource:\n return self.schema.source", "def resource(self):\n return self._resource", "def resource(self):\n return self._resource", "def resource(self):\n return self._resource", "def resource(self):\n return self._resource", "def resource(self):\n return self._resource", "def resource(self):\n return self._resource", "def resource(self):\n return self._resource", "def get_resource_loader(self):\n return self.game.resource_loader", "def get_resource(self):\n raise errors.Unimplemented()", "def getResource(self, resourceName, default=None, useCache=True, **kwargs):\n logger.debug(\"Requesting resource %r\", resourceName)\n if resourceName in self.__resourcesD:\n return self.__resourcesD[resourceName](self.__cfgOb, self.__configName, self.__cachePath, useCache=useCache, **kwargs)\n else:\n logger.error(\"Request for unsupported resource %r returning %r\", resourceName, default)\n #\n return default", "def getResource(resname, loc = None):\n # check the HOME for personal config file\n prv_filename = os.path.join(os.getenv(\"HOME\"), \".aphla\", resname)\n if os.path.exists(prv_filename):\n return prv_filename\n elif loc and resource_exists(loc, resname):\n # use the config within distribution\n return resource_filename(loc, resname)\n else:\n return None", "def resource_uri(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_uri\")", "def resource_uri(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_uri\")", "def resource_uri(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_uri\")", "def resource_uri(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_uri\")", "def resource(self):\n return self.properties.get('resource',\n Entity(self.context, ResourcePath(\"resource\", self.resource_path)))", "def getGlobalSelf(self):\r\n transports = self.reactor.getReaders()\r\n for transport in transports:\r\n try:\r\n resource = transport.factory.resource\r\n if isinstance(resource, self.__class__) and resource.port == self.port:\r\n return resource\r\n except AttributeError:\r\n pass\r\n return", "def getResourceFromHydroshare(self, resid, destination=None):\n print('Not Implemented')", "def get_datasource_of():\n global datasource_of\n\n if not datasource_of:\n datasource_of = stixhelpers.datasource_of()\n \n return datasource_of", "def get_url(self):\n if self.url:\n return self.url\n # if we have a uuid and happen to know the URL for it, use that\n elif self.uuid and PathIdentifier.repo_hints.has_key(self.uuid):\n self.url = PathIdentifier.repo_hints[self.uuid] + self.repo_relative_path\n PathIdentifier.locobjs[self.url] = self\n return self.url\n # if we've only seen one rep, use that (a guess, but an educated one)\n elif not self.uuid and len(PathIdentifier.repo_hints) == 1:\n uuid, root = PathIdentifier.repo_hints.items()[0]\n if uuid:\n self.uuid = uuid\n PathIdentifier.locobjs['uuid://%s%s' % (uuid, self.repo_relative_path)] = self\n self.url = root + self.repo_relative_path\n PathIdentifier.locobjs[self.url] = self\n report(\"Guessing that '%s' refers to '%s'\" % (self, self.url))\n return self.url\n else:\n error(\"Cannot determine URL for '%s'; \" % self +\n \"Explicit source argument (-S/--source) required.\\n\")", "def get_resource(res_name, res_type=\"icons\"):\n own_path = os.path.dirname(__file__)\n resource_path = os.path.abspath(os.path.join(own_path, os.pardir, \"resources\", res_type))\n return os.path.join(resource_path, res_name)", "def get_ressource_path(ressource_name=None):\n\n\t\tressource_folder = ['docs', 'ressources']\n\n\t\tif ressource_name is not None:\n\t\t\tressource_folder.append(ressource_name)\n\n\t\tressource_path = os.path.join(*ressource_folder)\n\n\t\tif os.path.isdir(ressource_path) and not ressource_path.endswith('/'):\n\t\t\tressource_path += '/'\n\n\t\treturn ressource_path", "def get_recipe_resource():\n return os.getenv(\"SKU_CUSTOM_RECIPE_RESOURCE_FOLDER\")", "def resource_uri(self):\n primary_key_value = getattr(self, self.primary_key(), None)\n return '/{}/{}'.format(self.endpoint(), primary_key_value)", "def resource(request):\n local_path = os.path.dirname(request.module.__file__)\n return lambda *args: get_resource_path(args, local_path)", "def resource(self, n):\n\n cfg = self.read()\n\n for res in cfg.get('Resources', []):\n res_name = res.get('Resource')\n\n if res_name == n:\n return ConfigResource(res)", "def GetResource(self, name):\r\n matches = [x for x in self.resources if x.name == name]\r\n if len(matches) == 1:\r\n return matches[0]\r\n elif len(matches) > 1:\r\n raise errors.ConfigError(\"Multiple resources with the name [%s]\" % name)\r\n else:\r\n return None", "def get_datasource(self):\n return None", "def target_resource(self):\n return self._target_resource", "def src(self):\n if self._src:\n return self._src\n\n # Parse and create a new client\n conn = parse_url(self.source_url)\n client = get_client(conn)\n self._src = client\n return self._src", "def get_reference(self):\n return self.resource.url", "def get_url(self):\n return self.resource.url", "def lookup(self):\r\n return resources.Lookup(self)", "def target_resource_path(self) -> Optional[str]:\n return pulumi.get(self, \"target_resource_path\")", "def get_resource(self):\n\n return self", "def _get_source(self, uri: str) -> Optional[_Source]:\n\n for source in self._sources:\n if uri == source.uri:\n return source\n\n return None", "def get_resource_base_path(self): # real signature unknown; restored from __doc__\n return \"\"", "def get_snek_resource(resource_name):\n try:\n path = next(_resources.path(\"snek5000.resources\", resource_name).gen)\n except AttributeError:\n path = _resources.path(\"snek5000.resources\", resource_name)\n return path", "def get_source(self):\n\t\treturn self.source.get_source()", "def get(owner_name, resource_name):\n resource = get_node(owner_name, resource_name)\n return resource if isinstance(resource, Resource) else None", "def resource_url(self, resource):\n raise NotImplementedError(\"Runtime needs to provide resource_url()\")", "def get_resource(self, rsc_path):\n\n\t\ttry:\n\t\t\tfrom pkg_resources import resource_filename\n\t\t\treturn resource_filename(__name__, rsc_path)\n\t\texcept ImportError:\n\t\t\treturn os.path.join(os.path.dirname(__file__), rsc_path)", "def resource(self):\n log.warning(\"resource property deprecated. Use boundjid.resource\")\n return self.boundjid.resource", "def getResource(self, file_name):\n path = os.path.join(os.path.dirname(__file__), \"resource\", file_name)\n return open(path)", "def resource_reference(self):\n return self.properties.get(\"resourceReference\", ResourceReference())", "def fetch_resources(uri, rel):\n path = '/usr/share/django/' + uri\n return path", "def get_uri(self):\r\n return self.uri", "def get_default_resource(self, name):\n if not self._default_resource:\n self._default_resource = self.get(name=name)\n\n return self._default_resource", "def source(self) -> str | Path:\n return self._source", "def get_resource(self, name: str) -> ResourceBase:\n resource = self.get_payload(name)\n if not isinstance(resource, ResourceBase):\n raise TypeError(\"Resource was expected but not found\")\n return resource", "def get_src(self):\n return self.isy.prog_get_src(self._mydict['id'])", "def get_recipe_resource():\n return os.getenv(\"DKU_CUSTOM_RESOURCE_FOLDER\")", "def getResource(self):\n return self.serviceClass.app.resource()", "def get_settings_resource(res_type, abbr, res_name):\n\t\n\tif zen_settings.has_key(res_type):\n\t\tresource = zen_settings[res_type];\n\t\tif (has_deep_key(resource, [res_name, abbr])):\n\t\t\treturn resource[res_name][abbr]\n\t\telif 'extends' in resource:\n\t#\t\tfind abbreviation in ancestors\n\t\t\tfor v in resource['extends']:\n\t\t\t\tif has_deep_key(zen_settings, [v, res_name, abbr]):\n\t\t\t\t\treturn zen_settings[v][res_name][abbr]\n\treturn None;", "def get_uri(self):\n if self._uri is None:\n self._uri = \"{0}{1}/{2}\".format(\n self.session.resource_prefix,\n self.base_uri,\n self.ip_or_ifname_or_group_name,\n )\n\n return self._uri", "def get_source(self):", "def get_source(self):\n return self.source", "def _get_resource_property(self, resource_name, property_name, default_value=None):\n if resource_name == \"ExperimentDb\":\n return self._get_experiment_db_property(property_name, default_value)\n elif resource_name == \"ModelDb\":\n return self._get_model_db_property(property_name, default_value)\n elif resource_name == \"JoinDb\":\n return self._get_join_db_property(property_name, default_value)\n elif resource_name == \"IAMRole\":\n return self._get_iam_role_property(property_name, default_value)\n else:\n return None", "def source(self) -> Optional[str]:\n return pulumi.get(self, \"source\")", "def getResource(self, QQuickWindow, *__args): # real signature unknown; restored from __doc__ with multiple overloads\n pass", "def getClassResource(self, className):\n\t\t#To prevent resources from having another type\n\t\tif(className == \"Resource\"):\n\t\t\treturn None\n\t\t#Now check if we have a mapping in here\n\t\tif(className in self._classMapping):\n\t\t\treturn self._classMapping[className]\n\t\telse:\n\t\t\traise Exception(\"Given class name \\\"\" + className + \"\\\"is not associated with a uri!\")", "def get_resource(self, *args, **kwargs):\n target_uri = self._build_uri(**kwargs)\n resource_type = None\n if args:\n resource_type = args[2]\n elif not args and kwargs:\n resource_type = kwargs.get('resource_level')\n return self.get_request(\n target_uri, resource_type, kwargs.get('params'))", "def getSource(self):\n return urllib2.urlopen(Parser.SOURCE_URL)", "def source_uri(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"source_uri\")", "def resource_path(name):\n return os.path.join(\n os.path.dirname(__file__), 'images', 'resource', name)", "def resource(self):\n # Response may be an empty string if comes from an tornado\n # client exception\n if self._resource is None:\n # sanity values\n resbuffer = \"\"\n marshalto = 'text/plain'\n # parse response, if any\n if self._response is not None:\n resbuffer = self._response.buffer\n marshalto = self._response.headers.get_list('content-type')\n marshalto = marshalto[0] if marshalto else 'text/plain'\n # get a valid resource\n converter = Converters.for_type(marshalto.split(';')[0])\n self._resource = converter.unmarshal(resbuffer)\n assert self._resource is not None\n return self._resource", "def get_image_url():", "def get_uri(self):\n return self.__uri", "def get_resource_from_class(klass):\n return _class_to_resources.get(klass, None)", "def _source(self, namespace):\n if not namespace:\n source = self._default_source\n else:\n source = self._sources.get(namespace)\n if not source:\n raise GroupResolverSourceError(namespace or \"<default>\")\n return source", "def source(self):\n if self._source not in ['Idle', 'Network']:\n return self._source\n else:\n return None", "def source_resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"source_resource_id\")", "def source_resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"source_resource_id\")", "def get_agent_resource_url(ctx, agent_config, resource):\n if agent_config.get(resource):\n origin = utils.get_manager_file_server_blueprints_root_url() + \\\n '/' + ctx.blueprint.id + '/' + agent_config[resource]\n else:\n resource_path = DEFAULT_AGENT_RESOURCES.get(resource)\n if not resource_path:\n raise NonRecoverableError('no such resource: {0}'.format(resource))\n if resource == 'agent_package_path':\n origin = utils.get_manager_file_server_url() + \\\n resource_path.format(agent_config['distro'],\n agent_config['distro_codename'])\n else:\n origin = utils.get_manager_file_server_url() + \\\n resource_path.format(agent_config['distro'])\n\n ctx.logger.debug('resource origin: {0}'.format(origin))\n return origin", "def image_reference(self, image_id):\n info = self.image_info[image_id]\n if info['source'] == 'local':\n return info['source']\n else:\n super(self.__class__).image_reference(self, image_id)", "def resource(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource\")", "def resource(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource\")", "def resource(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource\")", "def get_resource_path():\n return os.path.join(os.path.dirname(__file__), \"resources\") + os.path.sep", "def get_uri(self):\n return self.url", "def _get_resource(self, label: str, source: dict, resource_type: str):\r\n try:\r\n return source[label]\r\n except KeyError:\r\n raise ValueError(\"Cannot find {0} with label '{1}'.\\nExisting {0} labels: {2}\".format(\r\n resource_type, label, list(source.keys())))", "def source(self) -> str | Path:\n ...", "def image_reference(self, image_id):\n info = self.image_info[image_id]\n if info[\"source\"] == \"pedestrian\":\n return info[\"path\"]\n else:\n super(self.__class__, self).image_reference(image_id)", "def source_url(self):\n return self._source_url", "def getSourceURL(self):\n return self.SourceURL", "def source_resource_type(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"source_resource_type\")", "def get_source(self) -> Optional[str]:\n return self._source", "def get_url(self, source):\n if source == 'nomads':\n if self.model == 'rap':\n base = 'https://nomads.ncep.noaa.gov/pub/data/nccf/com/rap/prod/'\n path = f'rap.{self.date:%Y%m%d}/rap.t{self.date:%H}z.awip32f{self.fxx:02d}.grib2'\n else:\n base = 'https://nomads.ncep.noaa.gov/pub/data/nccf/com/hrrr/prod/'\n if self.model == 'hrrr':\n path = f\"hrrr.{self.date:%Y%m%d}/conus/hrrr.t{self.date:%H}z.wrf{self.field}f{self.fxx:02d}.grib2\"\n elif self.model == 'hrrrak':\n path = f\"hrrr.{self.date:%Y%m%d}/alaska/hrrr.t{self.date:%H}z.wrf{self.field}f{self.fxx:02d}.ak.grib2\"\n elif source == 'aws':\n if self.model == 'rap':\n base = 'https://noaa-rap-pds.s3.amazonaws.com/'\n path = f'rap.{self.date:%Y%m%d}/rap.t{self.date:%H}z.awip32f{self.fxx:02d}.grib2'\n else:\n base = 'https://noaa-hrrr-bdp-pds.s3.amazonaws.com/'\n if self.model == 'hrrr':\n path = f\"hrrr.{self.date:%Y%m%d}/conus/hrrr.t{self.date:%H}z.wrf{self.field}f{self.fxx:02d}.grib2\"\n elif self.model == 'hrrrak':\n path = f\"hrrr.{self.date:%Y%m%d}/alaska/hrrr.t{self.date:%H}z.wrf{self.field}f{self.fxx:02d}.ak.grib2\"\n elif source == 'google':\n if self.model == 'rap':\n base = 'https://storage.googleapis.com/rapid-refresh/'\n path = f'rap.{self.date:%Y%m%d}/rap.t{self.date:%H}z.awip32f{self.fxx:02d}.grib2'\n else:\n base = 'https://storage.googleapis.com/high-resolution-rapid-refresh/'\n if self.model == 'hrrr':\n path = f\"hrrr.{self.date:%Y%m%d}/conus/hrrr.t{self.date:%H}z.wrf{self.field}f{self.fxx:02d}.grib2\"\n elif self.model == 'hrrrak':\n path = f\"hrrr.{self.date:%Y%m%d}/alaska/hrrr.t{self.date:%H}z.wrf{self.field}f{self.fxx:02d}.ak.grib2\"\n elif source == 'azure':\n if self.model == 'rap':\n base = 'https://noaarap.blob.core.windows.net/rap'\n path = f'rap.{self.date:%Y%m%d}/rap.t{self.date:%H}z.awip32f{self.fxx:02d}.grib2'\n else:\n base = 'https://noaahrrr.blob.core.windows.net/hrrr/'\n if self.model == 'hrrr':\n path = f\"hrrr.{self.date:%Y%m%d}/conus/hrrr.t{self.date:%H}z.wrf{self.field}f{self.fxx:02d}.grib2\"\n elif self.model == 'hrrrak':\n path = f\"hrrr.{self.date:%Y%m%d}/alaska/hrrr.t{self.date:%H}z.wrf{self.field}f{self.fxx:02d}.ak.grib2\"\n elif source.startswith('pando'):\n if source[-1] == '2':\n gateway = 2\n else:\n gateway = 1\n if self.model == 'rap':\n return None # No RAP data on Pando\n else:\n base = f'https://pando-rgw0{gateway}.chpc.utah.edu/'\n path = f\"{self.model}/{self.field}/{self.date:%Y%m%d}/{self.model}.t{self.date:%H}z.wrf{self.field}f{self.fxx:02d}.grib2\"\n \n return base+path", "def Source(self):\r\n\t\treturn self._get_attribute('source')", "def source(self) -> str:\n return pulumi.get(self, \"source\")" ]
[ "0.6944467", "0.67283875", "0.6657046", "0.66149676", "0.6538225", "0.6410846", "0.6354545", "0.6354545", "0.6349629", "0.6121578", "0.6104746", "0.6100484", "0.6100484", "0.6100484", "0.6100484", "0.6100484", "0.6100484", "0.6100484", "0.6060203", "0.6019287", "0.6013599", "0.59908354", "0.5986625", "0.5986625", "0.5986625", "0.5986625", "0.59565276", "0.59444267", "0.5942815", "0.5911147", "0.590962", "0.58950317", "0.5856473", "0.5802219", "0.5790799", "0.5786826", "0.578598", "0.5777883", "0.5773367", "0.5759521", "0.5744814", "0.574458", "0.5731107", "0.5730714", "0.5730391", "0.5728715", "0.57222784", "0.57145876", "0.57101655", "0.57084453", "0.57043695", "0.5685433", "0.5661386", "0.5651863", "0.5635972", "0.5635502", "0.56291044", "0.562369", "0.55991966", "0.5592394", "0.55765796", "0.55725425", "0.5567521", "0.556627", "0.55584395", "0.5548912", "0.55450016", "0.5540151", "0.55388975", "0.55321956", "0.55278957", "0.55210316", "0.55028814", "0.5497861", "0.54846513", "0.54842573", "0.5467998", "0.54554915", "0.54504174", "0.5449793", "0.5449637", "0.54466134", "0.5444022", "0.5444022", "0.54391223", "0.5434896", "0.54347754", "0.54347754", "0.54347754", "0.54219973", "0.54145575", "0.54129374", "0.54072315", "0.5393278", "0.5391767", "0.53904766", "0.537945", "0.53708225", "0.5364887", "0.53644776", "0.53633493" ]
0.0
-1
Returns true if the passed pcre regex matches
def match(tgt, opts=None, minion_id=None): if not opts: opts = __opts__ if not minion_id: minion_id = opts.get("id") return bool(re.match(tgt, minion_id))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _is_regex_match(s, pat):\n\n pat = pat.rstrip()\n m = re.search(Settings._REPAT, pat)\n if m:\n flags_combined = 0\n if m.group('flag'):\n char_to_flag = {\n 'A':re.A, 'I':re.I, 'L':re.L, 'M':re.M, 'S':re.S, 'X':re.X}\n for flag in list(m.group('flag')):\n flags_combined |= char_to_flag[flag]\n return bool(re.search(m.group('pat'), s, flags_combined))\n raise InvalidRegexError(pat)", "def REGEXMATCH(text, regular_expression):\n return bool(re.search(regular_expression, text))", "def match(self, regexp):\n try:\n self.rematch = regexp.match(self.matchstring)\n except AttributeError:\n self.rematch = re.match(regexp, self.matchstring)\n return bool(self.rematch)", "def matches_regex(self, regex):\n match = re.match(regex, self.text)\n if not match:\n return False\n\n self.regex_groups = match.groups()\n return True", "def regex_match(text, pattern):\n try:\n pattern = re.compile(\n pattern,\n flags=re.IGNORECASE + re.UNICODE + re.MULTILINE,\n )\n except BaseException:\n return False\n return pattern.search(text) is not None", "def _is_valid_regex(regex_pattern: str, text: str) -> bool:\n match = re.match(regex_pattern, text)\n return match is not None", "def upy_re_match(regex,value):\n reg = re.compile(regex)\n return reg.match(value)", "def search(self, regexp):\n try:\n self.rematch = regexp.search(self.matchstring)\n except AttributeError:\n self.rematch = re.search(regexp, self.matchstring)\n return bool(self.rematch)", "def regMatch(value, regex):\n if regex == \"*\": # Accounts for python wildcard bug\n regex = \"(.*)\"\n pattern = re.compile(regex)\n match_obj = pattern.search(value)\n return bool(match_obj)", "def regex_search(regex, *fields):\n for match_field in fields:\n if re.search(regex, match_field):\n return True\n return False", "def regexp(expr, item):\n reg = re.compile(expr)\n return reg.search(item) is not None", "def regexp_predicate(value):\n return re.compile(value).match", "def validate_regex(self, pattern, flags=0):\r\n try:\r\n re.compile(pattern, flags)\r\n return False\r\n except:\r\n errormsg(_(\"Invalid Regular Expression!\"))\r\n error(traceback.format_exc())\r\n return True", "def get_match_with_re(pattern, unknown):\n pattern, unknown = _check_params(pattern, unknown)\n regex = re.compile(pattern)\n if not regex.search(unknown):\n return False\n return True", "def compiled_regex(s):\n if comp.match(s) is None:\n return s.isdigit()\n return True", "def _match_regex_list(subject, expressions):\n for expr in expressions:\n if re.search(expr, subject):\n return True\n return False", "def match(self, text):\n if self.sense:\n return (self.regex.match(text) != None)\n else:\n return (self.regex.match(text) == None)", "def match_regex_1(s, r):\n # Case: string is empty.\n if not s:\n if not r:\n return True\n if r[0] == '*':\n return match_regex_1(s, r[1:])\n return False\n # Case: string is not empty.\n if not r:\n return False\n regex_instruction = r[0]\n if regex_instruction in ('.', s[0]):\n return match_regex_1(s[1:], r[1:])\n if regex_instruction == '*':\n return match_regex_1(s[1:], r[1:]) or match_regex_1(s[1:], r)\n return False", "def isMatched(expr):\n pass", "def is_regex_in_string(regex, regex_string):\n try:\n match = re.search(regex, regex_string)\n does_nothing(match.group())\n return True;\n except Exception, e:\n return False;", "def match(self, s):\n self.matches = self.re.search(s)\n return self.matches", "def match(cls, text):\r\n return cls.main.pattern.match(text)", "def is_regex_url(self, url, regexp):\n return len(regexp.findall(url)) > 0", "def test_name_matching(string, matches: bool):\n assert (re.fullmatch(pattern, string) is not None) == matches", "def _regex_comp(self, name, flist):\n if name in flist:\n return True\n for item in flist:\n p = re.compile(item)\n match = p.match(name)\n if (match is not None):\n return True\n return False", "def regexp_match(\n self, pattern: Any, flags: Optional[str] = None\n ) -> ColumnOperators:\n return self.operate(regexp_match_op, pattern, flags=flags)", "def did_match(regular_expression, case):\r\n # re.match returns 'None' if not matched so we cannot return it directly\r\n if re.match(regular_expression, ''.join(case)):\r\n return True\r\n return False", "def match(pattern, s):\n # The regexp compilation caching is inlined in both Match and Search for\n # performance reasons; factoring it out into a separate function turns out\n # to be noticeably expensive.\n if pattern not in _regexp_compile_cache:\n _regexp_compile_cache[pattern] = sre_compile.compile(pattern)\n return _regexp_compile_cache[pattern].match(s)", "def match(pattern, target):\n pattern = ''.join('.*' if c == '*' else re.escape(c) for c in pattern)\n return bool(re.match('^' + pattern + '$', target))", "def match_regex_4(s, r):\n s_len = len(s)\n r_len = len(r)\n stack = [(0, 0)]\n explored = set() # States we've already explored.\n def explore(s_idx, r_idx):\n if (s_idx, r_idx) not in explored:\n explored.add((s_idx, r_idx))\n stack.append((s_idx, r_idx))\n while stack:\n s_idx, r_idx = stack.pop()\n # Case: string is empty.\n if s_idx == s_len:\n if r_idx == r_len:\n return True\n if r[r_idx] == '*':\n explore(s_idx, r_idx + 1)\n continue\n # Case: string is not empty.\n if r_idx == r_len:\n continue\n regex_instruction = r[r_idx]\n if regex_instruction in ('.', s[s_idx]):\n explore(s_idx + 1, r_idx + 1)\n if regex_instruction == '*':\n explore(s_idx + 1, r_idx + 1)\n explore(s_idx + 1, r_idx)\n return False", "def match_regex_3(s, r):\n s_len = len(s)\n r_len = len(r)\n stack = [(0, 0)]\n while stack:\n s_idx, r_idx = stack.pop()\n # Case: string is empty.\n if s_idx == s_len:\n if r_idx == r_len:\n return True\n if r[r_idx] == '*':\n stack.append((s_idx, r_idx + 1))\n continue\n # Case: string is not empty.\n if r_idx == r_len:\n continue\n regex_instruction = r[r_idx]\n if regex_instruction in ('.', s[s_idx]):\n stack.append((s_idx + 1, r_idx + 1))\n if regex_instruction == '*':\n stack.append((s_idx + 1, r_idx + 1))\n stack.append((s_idx + 1, r_idx))\n return False", "def exists(self, regex: str) -> bool:\n for _ in self.find(regex):\n return True\n return False", "def match(self, item):\n return self._regex.search(item) is not None", "async def match_regex(text, opts):\n\n def is_case_sensitive():\n if opts[\"case_sensitive\"]:\n return False\n return regex.IGNORECASE\n\n if opts[\"matching_condition\"].lower() == \"search\":\n matched_regex = regex.search(opts[\"expression\"], text, is_case_sensitive())\n elif opts[\"matching_condition\"].lower() == \"fullmatch\":\n matched_regex = regex.fullmatch(opts[\"expression\"], text, is_case_sensitive())\n else:\n matched_regex = regex.match(opts[\"expression\"], text, is_case_sensitive())\n return matched_regex", "def isMatch(self, s: str, p: str) -> bool:\n def is_match(self, text, pattern):\n if not pattern:\n return not text\n\n first_match = bool(text) and pattern[0] in {text[0], '.'}\n\n if len(pattern) >= 2 and pattern[1] == '*':\n return (self.isMatch(text, pattern[2:]) or\n first_match and self.isMatch(text[1:], pattern))\n else:\n return first_match and self.isMatch(text[1:], pattern[1:])\n\n def isMatch(self, text, pattern):\n memo = {}\n\n def dp(i, j):\n if (i, j) not in memo:\n if j == len(pattern):\n ans = i == len(text)\n else:\n first_match = i < len(text) and pattern[j] in {text[i], '.'}\n if j + 1 < len(pattern) and pattern[j + 1] == '*':\n ans = dp(i, j + 2) or first_match and dp(i + 1, j)\n else:\n ans = first_match and dp(i + 1, j + 1)\n\n memo[i, j] = ans\n return memo[i, j]\n\n return dp(0, 0)", "def test_match_regexp_including_start():\r\n runmatch(lcode)", "def matches_expression(pattern: str, method: str) -> bool:\n \n return True", "def field_match(pattern, field):\n if pattern:\n return re.match(pattern, field)\n return True", "def match(self) -> bool:", "def check_body(body, regex):\n if body and regex:\n return bool(re.match(regex, str(body)))\n else:\n return False", "def match(self, _str: str):\n result = self._regex.match(_str)\n if result:\n return result.groupdict() or True", "def regex(value, pattern):\r\n c_pattern = re.compile(r\"\\b\" + pattern.lower() + r\"\\b\")\r\n return c_pattern.search(value) is not None", "def test_regex(regular_expression, language):\r\n for case in cases:\r\n # Should it have matched?\r\n if did_match(regular_expression, case) != language(case):\r\n # Output problem case\r\n print(f\"Failure at case: {''.join(case)}\")\r\n print(f\"Regex output: {did_match(regular_expression, case)}\")\r\n print(f\"Should have been: {language(case)}\")\r\n print(\"Terminating...\")\r\n return False\r\n print(\"Regex recognizes the language for all test cases!\")\r\n return True", "def in_iterable_re(needle, haystack):\n # match without regex\n if needle in haystack:\n return True\n\n for pattern in haystack:\n # match if regex pattern is set and found in the needle\n if pattern and re.search(pattern, needle) is not None:\n return True\n\n return False", "def _match_incl_regexp(self, rel_path):\n\n for neg_regexp in self.include_regexps:\n if neg_regexp.search(rel_path) is not None:\n self.logger.debug(\"The same path %s matches the include\"\n \" regexp %s.\" % (rel_path,\n neg_regexp.pattern))\n return True\n\n return False", "def __reWildcard(self, regexp, string):\n regexp = re.sub(\"\\*+\", \"*\", regexp)\n match = True\n if regexp.count(\"*\") == 0:\n if regexp == string:\n return True\n else:\n return False\n blocks = regexp.split(\"*\")\n start = \"\"\n end = \"\"\n if not regexp.startswith(\"*\"):\n start = blocks[0]\n if not regexp.endswith(\"*\"):\n end = blocks[-1]\n if start != \"\":\n if string.startswith(start):\n blocks = blocks[1:]\n else:\n return False\n if end != \"\":\n if string.endswith(end):\n blocks = blocks[:-1]\n else:\n return False\n blocks = [block for block in blocks if block != \"\"]\n if blocks == []:\n return match\n for block in blocks:\n i = string.find(block)\n if i == -1:\n return False\n string = string[i + len(block):]\n return match", "def match_regex_2(s, r):\n s_len = len(s)\n r_len = len(r)\n @memoize\n def match(s_idx, r_idx):\n \"\"\"Matches string s[s_idx:] to regex r[r_idx:].\"\"\"\n # Case: string is empty.\n if s_idx == s_len:\n if r_idx == r_len:\n return True\n if r[r_idx] == '*':\n return match(s_idx, r_idx + 1)\n return False\n # Case: string is not empty.\n if r_idx == r_len:\n return False\n regex_instruction = r[r_idx]\n if regex_instruction in ('.', s[s_idx]):\n return match(s_idx + 1, r_idx + 1)\n if regex_instruction == '*':\n return match(s_idx + 1, r_idx + 1) or match(s_idx + 1, r_idx)\n return False\n return match(0, 0)", "def matches(self, actual) -> bool:\n return self.matcher.matches(actual)", "def matches_rule(word):\n return re.search(pattern, word)", "def isRegexPossible(self):\n if self._lastToken is None:\n # No token has been produced yet: at the start of the input,\n # no division is possible, so a regex literal _is_ possible.\n return True\n\n if self._lastToken.type == ECMAScriptLexer.Identifier or \\\n self._lastToken.type == ECMAScriptLexer.NullLiteral or \\\n self._lastToken.type == ECMAScriptLexer.BooleanLiteral or \\\n self._lastToken.type == ECMAScriptLexer.This or \\\n self._lastToken.type == ECMAScriptLexer.CloseBracket or \\\n self._lastToken.type == ECMAScriptLexer.CloseParen or \\\n self._lastToken.type == ECMAScriptLexer.OctalIntegerLiteral or \\\n self._lastToken.type == ECMAScriptLexer.DecimalLiteral or \\\n self._lastToken.type == ECMAScriptLexer.HexIntegerLiteral or \\\n self._lastToken.type == ECMAScriptLexer.StringLiteral or \\\n self._lastToken.type == ECMAScriptLexer.PlusPlus or \\\n self._lastToken.type == ECMAScriptLexer.MinusMinus:\n # After any of the tokens above, no regex literal can follow.\n return False\n else:\n # In all other cases, a regex literal _is_ possible.\n return True", "def validaURL(url: AnyStr) -> bool:\n\n return re.compile(patternURL).search(url) != None # Linea 1", "def operator_nre(s, pattern):\n return not re.search(pattern, s)", "def test_pattern(pattern, fields):\n if not pattern: # \"empty\" pattern\n return True\n\n def eval_exp(text):\n m = re.match(r'^(\\$(\\d+))?(!)?/([^/]*)/$', text)\n try:\n if m: # regular expression\n _, num, neg, pat = m.groups()\n num = int(num) if num else 0 # if no `$i` specified, default to `$0`\n m = re.search(pat, fields[num])\n logging.info(u\"regex: '%s' %s~ /%s/\" % (fields[num], neg or u'', pat))\n return bool(m) != bool(neg)\n else: # expression\n exp = translate_fields(text, fields, u'_') # replace non-exist `$i` with u'_'\n logging.info(u'exp: %s' % exp)\n return bool(exp and eval(exp))\n except Exception, e:\n logging.debug(unicode(e))\n return False\n\n if u',' not in pattern: # \"regular expression\" or \"expression\" pattern\n return eval_exp(pattern)\n else: # \"begpat, endpat\" pattern\n global SWITCH_ON\n\n value = False\n\n begpat, endpat = [s.strip() for s in pattern.split(u',')]\n if eval_exp(begpat):\n SWITCH_ON = True\n if SWITCH_ON:\n value = True\n if eval_exp(endpat):\n SWITCH_ON = False\n\n return value", "def test_match(self):\n\n # Test of the rematch case.\n regex = r\"([a-z]{1,})\\s([a-z]{1,})\\s\"\n expected = \"is\"\n actual = Regex(self.data, regex, rematch=True, group=1).match()\n\n self.assertEqual(expected, actual)\n\n # Test of the group case\n regex = \"e\"\n expected = \"e\"\n actual = Regex(self.data, regex, group=0).match()\n\n self.assertEqual(expected, actual)", "def Match(context, pattern, arg=None):\n if not arg:\n arg = context.node\n arg = Conversions.StringValue(arg)\n bool = re.match(pattern, arg) and boolean.true or boolean.false\n return bool", "def match(self, string):\n matched = False\n cmd = None\n\n if string in self.commands.keys():\n matched = True\n cmd = string\n\n else:\n for command in self.commands.keys():\n if \"regex\" in self.commands[command].keys() \\\n and re.match(self.commands[command][\"regex\"], string):\n matched = True\n cmd = command\n break\n \n if cmd and len(cmd) > 0:\n self._last_matched_command = cmd\n else:\n self._last_matched_command = None\n\n return matched", "def _check_regex_match(file_path, search_regex):\n with file_path.open(\"rb\") as file_obj:\n file_bytes = file_obj.read()\n content = None\n for encoding in TREE_ENCODINGS:\n try:\n content = file_bytes.decode(encoding)\n break\n except UnicodeDecodeError:\n continue\n if not search_regex.search(content) is None:\n return True\n return False", "def match_example():\n global example\n pattern = r'^[a-z]+$'\n return len(re.findall(pattern, example)) > 0", "def contains_match(self, regexp):\n # If the regexp is not found, find will return a tuple (-1, -1) in Sublime 3 or None in Sublime 2 \n # https://github.com/SublimeTextIssues/Core/issues/534\n contains_import = self.view.find(regexp, 0)\n return contains_import.size() > 0 if float(sublime.version()) >= 3000 else contains_import is not None", "def found(self, command, regex):\n result = self.sys(command)\n for line in result:\n found = re.search(regex,line)\n if found:\n return True\n return False", "def found(self, command, regex):\n result = self.sys(command)\n for line in result:\n found = re.search(regex,line)\n if found:\n return True\n return False", "def isValid(text):\n return bool(re.search(r'\\b((kill|stop) the (alarm|clock|music))\\b', text, re.IGNORECASE))", "def match(pattern, string):\n if not len(pattern) and not len(string):\n return True\n\n if len(pattern) > 1 and pattern[0] == '*' and len(string) == 0:\n return False\n\n if (len(pattern) > 0 and pattern[0] == '?') or \\\n (len(pattern) != 0 and len(string) != 0 and pattern[0] == string[0]):\n return match(pattern[1:], string[1:])\n\n if len(pattern) != 0 and pattern[0] == '*':\n return match(pattern[1:], string) or match(pattern, string[1:])\n\n return False", "def _matches(o, pattern):\n if not len(o) == len(pattern):\n return False\n comps = zip(o,pattern)\n return all(isinstance(obj,kind) for obj,kind in comps)", "def isMatch(s: str, p: str):\n # '.*' matches any string.\n if p == '.*':\n return True\n # Finished both string and pattern!\n if not s and not p:\n return True\n # Repeat character zero times\n if len(p) > 1:\n if not s and p[1] == '*':\n return isMatch(s, p[2:])\n # Finished one of string/pattern but not both.\n if not s or not p:\n return False\n # Pattern of length one \n if len(p) == 1:\n if p[0] == s[0] or p[0] == '.':\n return isMatch(s[1:], p[1:])\n else:\n return False\n # Check if we have '*' character\n if p[1] == '*':\n # Zero of preceding character\n if p[0] != '.' and p[0] != s[0]:\n return isMatch(s, p[2:])\n # Characters (not '.') match!\n if p[0] == s[0]:\n if isMatch(s, p[2:]):\n return True\n while p[0] == s[0]:\n s = s[1:]\n if isMatch(s, p[2:]):\n return True\n if not s:\n return False\n return False\n # '.' characte matches any alphabetic character\n if p[0] == '.':\n if isMatch(s, p[2:]):\n return True\n while s and p:\n s = s[1:]\n if isMatch(s, p[2:]):\n return True\n return False\n # If first character matches (or is '.'), recursively\n # check smaller pattern/string\n if p[0] == s[0] or p[0] == '.':\n return isMatch(s[1:], p[1:])\n return False", "def test_match(self, url, criterions=[], har=None):\r\n return len(self.get_matches(url, criterions, har)) != 0", "def evaluate_clause(clause: str, match: str) -> bool:\n result = compile_regex(clause).fullmatch(match)\n return result is not None", "def c_regex(exp, flags=0, group=0) -> Parser:\n if isinstance(exp, (str, bytes)):\n exp = re.compile(exp, flags)\n if isinstance(group, (str, int)):\n group = (group,)\n\n @Parser\n def regex_parser(stream, index):\n match = exp.match(stream, index)\n if match:\n return Result.success(match.end(), match.group(*group))\n else:\n return Result.failure(index, exp.pattern)\n\n return regex_parser", "def isValid(text):\n return bool(re.search(r'\\blight|lights\\b', text, re.IGNORECASE))", "def matches(self, python):\n return False", "def is_matching(patterns, blob):\n for pattern in patterns:\n if re.match(fnmatch.translate(pattern), blob.path):\n return True\n return False", "def _memorized_fnmatch(name: str, pattern: str) -> bool:\n return bool(_compile_fnmatch(pattern).match(name))", "def check(self, data):\r\n if isinstance(data, Iterable):\r\n data = \"\".join([str(x) for x in data])\r\n try:\r\n data = str(data)\r\n except UnicodeDecodeError:\r\n return False\r\n if not data:\r\n return False\r\n return bool(self.__regexp.match(data))", "def match_string(self, string_to_match, regexp):\n\t\tshutit_global.shutit_global_object.yield_to_draw()\n\t\tif not isinstance(string_to_match, str):\n\t\t\treturn None\n\t\tlines = string_to_match.split('\\r\\n')\n\t\t# sometimes they're separated by just a carriage return...\n\t\tnew_lines = []\n\t\tfor line in lines:\n\t\t\tnew_lines = new_lines + line.split('\\r')\n\t\t# and sometimes they're separated by just a newline...\n\t\tfor line in lines:\n\t\t\tnew_lines = new_lines + line.split('\\n')\n\t\tlines = new_lines\n\t\tif not shutit_util.check_regexp(regexp):\n\t\t\tself.fail('Illegal regexp found in match_string call: ' + regexp) # pragma: no cover\n\t\tfor line in lines:\n\t\t\tmatch = re.match(regexp, line)\n\t\t\tif match is not None:\n\t\t\t\tif match.groups():\n\t\t\t\t\treturn match.group(1)\n\t\t\t\treturn True\n\t\treturn None", "def match(self, sentence) -> bool:\r\n pass", "def matches(self, test_string, parse_all=True):\n try:\n self.parse_string(text(test_string), parse_all=parse_all)\n return True\n except ParseException:\n return False", "def match(self):\n\n # We initate this variable which gonna contain the returned data\n result = []\n\n # We compile the regex string\n to_match = comp(self.regex)\n\n # In case we have to use the implementation of ${BASH_REMATCH} we use\n # re.findall otherwise, we use re.search\n if self.rematch: # pylint: disable=no-member\n pre_result = to_match.findall(self.data)\n else:\n pre_result = to_match.search(self.data)\n\n if self.return_data and pre_result is not None: # pylint: disable=no-member\n if self.rematch: # pylint: disable=no-member\n for data in pre_result:\n if isinstance(data, tuple):\n result.extend(list(data))\n else:\n result.append(data)\n\n if self.group != 0: # pylint: disable=no-member\n return result[self.group] # pylint: disable=no-member\n else:\n result = pre_result.group(\n self.group # pylint: disable=no-member\n ).strip()\n\n return result\n elif (\n not self.return_data # pylint: disable=no-member\n and pre_result is not None\n ):\n return True\n return False", "def isValid(text):\n return bool(re.search(r\"\\b((close|activate)\\ (check|tunnel|ubuntu|fedora|windows))\\b\", text, re.IGNORECASE))", "def match_regex(regex: str, string: str):\n postfix_regex = infix_to_postfix(regex)\n nfa = create_nfa_from_postfix(postfix_regex)\n return input_string_to_nfa(string, nfa)", "def check_pass(text):\r\n\r\n upperRegex = re.compile(r'[A-Z]')\r\n lowerRegex = re.compile(r'[a-z]')\r\n lengthRegex = re.compile(r'.{8,}')\r\n digitRegex = re.compile(r'\\d')\r\n\r\n if not upperRegex.search(text):\r\n return False\r\n elif not lowerRegex.search(text):\r\n return False\r\n elif not lengthRegex.search(text):\r\n return False\r\n elif not digitRegex.search(text):\r\n return False\r\n else:\r\n return True", "def fnmatch(pattern, filename) -> bool:\n return _fnmatch(filename, pattern)", "def matches(self):\n return False", "def check(self, s, field='word', cats=None):\n f = self[field]\n if cats is None:\n # treat s as plain regex\n return regex.search(s, f) is not None\n # s is a sound change rule\n try:\n # parse s\n s = sound_changer.parse_rule(s, cats)\n except AttributeError:\n # s is a dict (i.e. already parsed)\n pass\n return bool(sound_changer.find_matches(f, s, cats)[0])", "def validate_string_match(self, pattern, file):\r\n try:\r\n file_open = open(file, 'r')\r\n except:\r\n logging.info(\"file not found\")\r\n return -1\r\n file_data = file_open.read()\r\n ret_out = re.match(pattern, file_data)\r\n if ret_out:\r\n return True, ret_out\r\n else:\r\n return False, ret_out", "def test_empty_string_is_also_a_match(self):\n pattern = \"\"\n s = \"abcdef\"\n self.assertEqual(__, re.search(pattern, s).group())", "def test_regex_case_sensitive_match(self):\n cursor = self.dbh.cursor()\n try:\n expr = self.dbh.get_regex_clause(\"'abc'\", 'a.*')\n qry = self.dbh.get_expr_exec_format() % \"'TRUE'\"\n qry += ' WHERE ' + expr\n\n cursor.execute(qry)\n\n self.assertEqual(cursor.fetchone()[0], 'TRUE')\n finally:\n self.dbh.rollback()\n cursor.close()", "def assert_re(value, regex):\n match = re.search(regex, value)\n assert match is not None, '%s ~= %s' % (value, regex)", "def is_fastq_regex(fastq_regex):\n if \"{sample}\" not in fastq_regex:\n raise argparse.ArgumentTypeError(\n \"The regex should contain at least the wildcard '{sample}'.\")\n return fastq_regex", "def Like(text, pattern):\n return fnmatch.fnmatch(text, pattern)", "def regex_compiled():\n return re.compile(SBE19DataParticle.regex())", "def match_patterns(pathname, patterns):\n for pattern in patterns:\n if fnmatch(pathname, pattern):\n return True\n return False", "def match(self, s):\n if self.re.match(s):\n self.list.append(s)\n return True\n else: return False", "def isValid(text):\r\n return bool(re.search(r'\\bcommute\\b', text, re.IGNORECASE))", "def get_match_with_string(pattern, unknown):\n pattern, unknown = _check_params(pattern, unknown)\n if pattern not in unknown:\n return False\n return True", "def _match_rule(self, name: str, rule: str) -> bool:\n if not rule in self._regexps:\n regexps = []\n for part in os.path.normpath(rule).split(os.sep):\n if part:\n pattern = re.escape(part).replace(\"\\*\", \".*\").replace(\"\\?\", \".\")\n regexp = re.compile(f\"^{pattern}$\", re.IGNORECASE)\n else:\n regexp = None\n regexps.append(regexp)\n self._regexps[rule] = regexps\n for i, part in enumerate(os.path.normpath(name).split(os.sep)):\n try:\n regexp = self._regexps[rule][i]\n except:\n regexp = None\n if part:\n if not regexp or not regexp.match(part):\n return False\n elif regexp:\n return False\n return True", "def __search(findwhat, content, ignorecase, regexp):\n\t\tfrom re import search, IGNORECASE\n\t\tif regexp:\n\t\t\tif ignorecase:\n\t\t\t\tflag = IGNORECASE\n\t\t\telse:\n\t\t\t\tflag = 0\n\t\t\tif search(findwhat, content, flag):\n\t\t\t\treturn True\n\t\telse:\n\t\t\tif ignorecase:\n\t\t\t\tcontent = content.lower()\n\t\t\t\tfindwhat = findwhat.lower()\n\t\t\t\t\n\t\t\tif content.find(findwhat) != -1:\n\t\t\t\treturn True\n\t\treturn False", "def wanted(have, want, regex=False):\n\n assert isinstance(have, basestring)\n\n if want == '__all__':\n return True\n\n if want == '__none__' or not want:\n return False\n\n if regex:\n if isinstance(want, basestring):\n if re.match(want, have):\n return True\n else:\n for item in want:\n if re.match(item, have):\n return True\n\n else:\n if isinstance(want, basestring):\n return True if want == have else False\n\n if have in want:\n return True\n\n return False", "def _text_matches_regex(text, rx):\n match = re.match(rx, text)\n if match:\n return match.group(1)\n return None", "def test_match_right_regexp_to_none():\r\n runmatch(lcode)", "def _matchCPattern(cPattern, node):\n if isinstance(cPattern, str):\n return _matchFeatureConstraints(dPattern=cPattern, node=node)\n # Match Root\n if _matchFeatureConstraints(dPattern=cPattern[0], node=node):\n if _matchCPatternChildren(cPattern[1], node.leftChild) and\\\n _matchCPatternChildren(cPattern[2], node.rightChild):\n return True\n return False", "def matcher(string):\n rec = re.compile(rexp, re.VERBOSE)\n groups = set(rec.groupindex) # index nos of no interest; discard\n m = rec.search(string)\n if m is None: return None\n # Match succeeded at this point\n # match-data -> Python\n mapped_d = {gname : m.group(gname) for gname in groups}\n # postprocess and done!\n return {k : ppers[k](mapped_d[k]) for k in mapped_d}" ]
[ "0.7702972", "0.7421903", "0.73297775", "0.7058684", "0.69862247", "0.691478", "0.6872504", "0.6859625", "0.68152654", "0.67619383", "0.6750344", "0.6741106", "0.66436803", "0.656683", "0.6558872", "0.65424186", "0.64939934", "0.6461387", "0.6382901", "0.63667697", "0.63661885", "0.6359937", "0.62873435", "0.6286437", "0.625598", "0.62513524", "0.6249953", "0.6229542", "0.62285465", "0.6213565", "0.6183295", "0.61565185", "0.614014", "0.6117568", "0.6093455", "0.6069845", "0.6056571", "0.5967633", "0.59449345", "0.5944087", "0.5943852", "0.59409404", "0.5921463", "0.5899774", "0.58972615", "0.5859216", "0.5831413", "0.5807831", "0.5797132", "0.5795225", "0.57819504", "0.576918", "0.57660973", "0.57640785", "0.57529944", "0.5747662", "0.57394576", "0.56994146", "0.5686274", "0.56795347", "0.56795347", "0.56750685", "0.56748503", "0.56673205", "0.5651479", "0.56427467", "0.56302416", "0.5617944", "0.561274", "0.56101954", "0.5610141", "0.5586362", "0.558564", "0.556377", "0.5549145", "0.5534784", "0.55331796", "0.55313355", "0.5528221", "0.55256724", "0.55200344", "0.5516536", "0.54997206", "0.5498147", "0.5490796", "0.5489511", "0.54884773", "0.5487407", "0.5484973", "0.54791933", "0.54648405", "0.544678", "0.5441914", "0.54386646", "0.5435726", "0.5435597", "0.54341054", "0.542701", "0.5423143", "0.5421777", "0.5419407" ]
0.0
-1
A single training step
def train_step(x_batch, y_batch): feed_dict = { cnn.x: x_batch, cnn.y_: y_batch, step_time_placeholder : last_step_time, cnn.keep_prob : FLAGS.keep_prob } _, step, summaries, loss, accuracy = sess.run( [train_op, global_step, train_summary_op, cnn.cross_entropy, cnn.accuracy], feed_dict) time_str = datetime.datetime.now().isoformat() print("{}: step {}, loss {:g}, acc {:g}".format(time_str, step, loss, accuracy)) train_summary_writer.add_summary(summaries, step) train_summary_writer.flush()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def train_step(self):\n pass", "def TrainOneStep(self):\n pass", "def train(self, training_steps=10):", "def train():\n pass", "def training_step(self, **kwargs):\n raise NotImplementedError", "def train(self):\n pass", "def train(self):\n pass", "def train(self):\n pass", "def train(self):\n pass", "def train(self):\n pass", "def train(self)->None:", "def train(self, batch):\n pass", "def train_one_epoch(self):\n raise NotImplementedError", "def train():\n # YOUR TRAINING CODE GOES HERE", "def __call__(self, initial_lr, step, epoch):\n\n pass", "def train(self):\n\t\traise NotImplementedError", "def step(self, epoch):\n\n self.train(epoch)\n self.test(epoch)", "def train(self, ):\n raise NotImplementedError", "def train(self):\n return", "def train(self):\n raise NotImplementedError", "def run_step(self):\n self.hooked_sess.run(self.train_op)", "def train(self, training_data):\n pass", "def train(self) -> Any:\n pass", "def train_loop_pre(self, current_step):\r\n pass", "def train(self):\n raise NotImplementedError()", "def train_step(self, experiences, gamma):\n raise NotImplementedError", "def train(self):\n self.training = True", "def test_training(self):\n\t\tpass", "def train_step(x_batch, y_batch, x_batch_lex):\n feed_dict = {\n cnn.input_x: x_batch,\n cnn.input_y: y_batch,\n # lexicon\n cnn.input_x_lexicon: x_batch_lex,\n cnn.dropout_keep_prob: FLAGS.dropout_keep_prob\n }\n _, step, summaries, loss, accuracy, neg_r, neg_p, f1_neg, f1_pos, avg_f1 = sess.run(\n [train_op, global_step, train_summary_op, cnn.loss, cnn.accuracy,\n cnn.neg_r, cnn.neg_p, cnn.f1_neg, cnn.f1_pos, cnn.avg_f1],\n feed_dict)\n time_str = datetime.datetime.now().isoformat()\n # print(\"{}: step {}, loss {:g}, acc {:g}\".format(time_str, step, loss, accuracy))\n #print(\"{}: step {}, loss {:g}, acc {:g}, neg_r {:g} neg_p {:g} f1_neg {:g}, f1_pos {:g}, f1 {:g}\".\n # format(time_str, step, loss, accuracy, neg_r, neg_p, f1_neg, f1_pos, avg_f1))\n train_summary_writer.add_summary(summaries, step)", "def train(self, trainData):\n pass", "def trainModel( self, featureTrain, classTrain):", "def train(self, num_batches: int):", "def train_step(x_batch, y_batch):\r\n feed_dict = {\r\n cnn.input_x: x_batch,\r\n cnn.input_y: y_batch,\r\n cnn.dropout_keep_prob: FLAGS.dropout_keep_prob\r\n }\r\n\r\n _, step, summaries, loss, accuracy, predictions,y_actual = sess.run(\r\n [train_op, global_step, train_summary_op, cnn.loss, cnn.accuracy, cnn.predictions,cnn.y],\r\n feed_dict)\r\n\r\n time_str = datetime.datetime.now().isoformat()\r\n # print(\"train_f1_score:\", f1_score(y_actual, predictions, average=None))\r\n # print (predictions)\r\n # print(y_actual)\r\n print(\"{}: step {}, loss {:g}, acc {:g}\".format(time_str, step, loss, accuracy))\r\n return accuracy\r\n\r\n train_summary_writer.add_summary(summaries, step)", "def train_step(x_batch, y_batch):\r\n feed_dict = {\r\n rnn.input_x: x_batch,\r\n rnn.input_y: y_batch,\r\n rnn.dropout_keep_prob: FLAGS.dropout_keep_prob\r\n }\r\n _, step, loss, accuracy = sess.run(\r\n [train_op, global_step, rnn.loss, rnn.accuracy],\r\n feed_dict)\r\n return step, loss, accuracy", "def train(self, *args, **kwargs):\n raise NotImplementedError", "def start_training(self):\n self.training = True", "def on_train_batch_begin(self, step, logs=None):", "def train_step(self, x_train, y_train):\n\n input_x_op = self.session.graph.get_operation_by_name(\"input_x\").outputs[0]\n input_y_op = self.session.graph.get_operation_by_name(\"input_y\").outputs[0]\n dropout_keep_prob_op = self.session.graph.get_operation_by_name(\"dropout_keep_prob\").outputs[0]\n global_step_op = self.session.graph.get_operation_by_name(\"global_step\").outputs[0]\n\n optimizer_op = self.session.graph.get_operation_by_name(\"loss/optimizer\").outputs[0]\n loss_op = self.session.graph.get_operation_by_name(\"loss/loss\").outputs[0]\n\n d_ = {\n input_x_op: x_train,\n input_y_op: y_train\n }\n\n self.init_dataset(d_)\n\n train_batches_per_epoch = (len(x_train) - 1) // self.FLAGS.batch_size + 1\n\n sum_loss = 0\n for current_step in range (train_batches_per_epoch):\n\n if self.FLAGS.summary:\n _, step, summaries, loss = self.session.run(\n [optimizer_op, global_step_op, self.train_summary_op, loss_op], feed_dict={dropout_keep_prob_op: self.hyperparams['dropout_keep_prob']})\n \n self.train_summary_writer.add_summary(summaries, step)\n else:\n _, step, loss = self.session.run(\n [optimizer_op, global_step_op, loss_op], feed_dict={dropout_keep_prob_op: self.hyperparams['dropout_keep_prob']})\n \n sum_loss += loss\n\n time_str = datetime.datetime.now().isoformat()\n if (current_step + 1) % 10 == 0:\n print(\"{}: step {}/{}, loss {:g}\".format(time_str, current_step + 1, train_batches_per_epoch, loss))\n\n mean_loss = sum_loss/ train_batches_per_epoch\n\n return mean_loss", "def forward_train(self, *args, **kwargs):\n pass", "def train(self, data):\n pass", "def training_step(self, batch):\n return {}", "def start_training(self):\n i = 0\n for _ in range(self.train_steps):\n print(f\"Start Training Step {i + 1}\")\n self.model.learn(total_timesteps=self.total_time_steps)\n self.model.save(self.save_path)\n print(f\"Finished Training Step {i + 1}\")\n i += 1", "def train_step(x_batch, y_batch):\n feed_dict = {\n cnn.input_x: x_batch,\n cnn.input_y: y_batch,\n cnn.dropout_keep_prob: FLAGS.dropout_keep_prob\n }\n # print(x_batch[0])\n _, step, summaries, loss, accuracy = sess.run(\n [train_op, global_step, train_summary_op, cnn.loss, cnn.accuracy],\n feed_dict)\n time_str = datetime.datetime.now().isoformat()\n if step%100==0:\n print(\"{}: step {}, loss {:g}, acc {:g}\".format(time_str, step, loss, accuracy))\n train_summary_writer.add_summary(summaries, step)", "def train_step(self, sess, task_a_data):\n raise NotImplemented()", "def train_step(self):\r\n batch_images = next(self.data_loader.next_batch())\r\n _, loss, summary, ea = self.sess.run([self.model.train_op, self.model.total_loss, self.model.merged, self.model.euclidean_a_p],\r\n feed_dict={self.model.input: batch_images, self.model.is_training: True})\r\n \r\n return loss, summary", "def run_one_step_simvodis(self):\n self.model_lr_scheduler.step()\n self.set_train()\n\n inputs = next(iter(self.train_loader))\n\n before_op_time = time.time()\n\n outputs, losses = self.process_batch(inputs)\n\n self.model_optimizer.zero_grad()\n losses[\"loss\"].backward()\n self.model_optimizer.step()\n\n duration = time.time() - before_op_time\n\n if self.arguments[\"iteration\"] % (self.opt.save_frequency // 2) == 0:\n self.log_time(self.arguments[\"iteration\"], duration, losses[\"loss\"].cpu().data)\n\n if \"depth_gt\" in inputs:\n self.compute_depth_losses(inputs, outputs, losses)\n\n self.log(\"train\", inputs, outputs, losses)\n self.val()\n\n self.step += 1", "def train_loop_post(self, current_step):\r\n pass", "def train_step(s1_batch, s2_batch, y_batch, lables_batch):\n feed_dict = {\n my_model.sentence_one_word: s1_batch,\n my_model.sentence_two_word: s2_batch,\n my_model.y_true: lables_batch,\n my_model.y: y_batch,\n my_model.is_train: True\n }\n # global_step用于记录全局的step数,就是当前运行到的step\n _, step, summaries, loss, acc, acc_direct, f1 = sess.run(\n [train_op, global_step, train_summary_op, my_model.loss, my_model.accuracy, my_model.accuracy_direct, my_model.F1],\n feed_dict)\n\n time_str = datetime.datetime.now().isoformat()\n print(\"{}: step {}, loss {:g}, acc {:g}, acc_direct {:g}, f1 {:g}\".format(time_str, step, loss, acc, acc_direct, f1))\n train_summary_writer.add_summary(summaries, step)\n endtime = datetime.datetime.now()\n print('训练时间(分钟):', (endtime - starttime).seconds/60)", "def on_train_forward(self, runner):\n self.on_iter_forward(runner)", "def train(self):\n self.epoch = 0\n self.step = 0\n self.start_time = time.time()\n for self.epoch in range(self.num_epochs):\n print(\"EPOHA\")\n self.run_epoch()\n print(\"SAVE MODEL\")\n self.save_model()", "def train_step(x_batch, y_batch):\n\n feed_dict = {\n cnn.input_x: x_batch,\n cnn.input_y: y_batch,\n cnn.dropout_keep_prob: FLAGS.dropout_keep_prob\n }\n _, step, summaries, loss, accuracy = sess.run(\n [train_op, global_step, train_summary_op, cnn.loss,\n cnn.accuracy],\n feed_dict)\n time_str = datetime.datetime.now().isoformat()\n logger.info(\"{}: step {}, loss {:g}, acc {:g}\".format(\n time_str, step, loss, accuracy))\n train_summary_writer.add_summary(summaries, step)", "def trainNet():", "def training(self):\n self.training = True", "def train(self, trainfile):", "def step(self):\n # Fast learning\n task_embedding = self._ilp.infer_task()\n\n # Posterior update\n #self._skip_flag = self._is_graph_same(task_embedding, self._prev_task_embedding)\n self._skip_flag = False # XXX do not skip test\n if not self._skip_flag:\n self._grprop.observe_task(task_embedding)\n self._prev_task_embedding = task_embedding\n else:\n print(\"skipping!\")", "def train_step(x_batch, y_batch):\n feed_dict = {\n cnn.input_x: x_batch,\n cnn.input_y: y_batch,\n cnn.dropout_keep_prob: FLAGS.dropout_keep_prob\n }\n _, step, summaries, loss, accuracy = sess.run(\n [train_op, global_step, train_summary_op, cnn.loss, cnn.accuracy],\n feed_dict)\n time_str = datetime.datetime.now().isoformat()\n print(\"{}: step {}, loss {:g}, acc {:g}\".format(time_str, step, loss, accuracy))\n train_summary_writer.add_summary(summaries, step)", "def train_step(x_batch, y_batch):\n feed_dict = {\n rnn.input_x: x_batch,\n rnn.input_y: y_batch,\n rnn.dropout_keep_prob: FLAGS.dropout_keep_prob\n }\n _, step, summaries, loss, accuracy = sess.run(\n [train_op, global_step, train_summary_op, rnn.loss, rnn.accuracy],\n feed_dict)\n time_str = datetime.datetime.now().isoformat()\n # print(\"{}: step {}, loss {:g}, acc {:g}\".format(time_str, step, loss, accuracy))\n train_summary_writer.add_summary(summaries, step)\n\n return loss,accuracy", "def run(self) -> None:\n self.model = self.trainer.train_model(self.model, self.data)", "def test_train(self):\n print \"x=\",self.trainer.train()", "def run_step(self):\n assert self.model.training, \"[SimpleTrainer] model was changed to eval mode!\"\n start = time.perf_counter()\n \"\"\"\n If your want to do something with the data, you can wrap the dataloader.\n \"\"\"\n data = next(self._data_loader_iter)\n data_time = time.perf_counter() - start\n\n \"\"\"\n If your want to do something with the losses, you can wrap the model.\n \"\"\"\n loss_dict = self.model(data)\n losses = sum(loss for loss in loss_dict.values())\n self._detect_anomaly(losses, loss_dict)\n\n metrics_dict = loss_dict\n metrics_dict[\"data_time\"] = data_time\n self._write_metrics(metrics_dict)\n \n validation_data = next(self.validation_data_loader_iter)\n val_losses_dict = self.model(validation_data)\n val_losses = sum(loss for loss in val_losses_dict.values())\n self._detect_anomaly(val_losses, val_losses_dict)\n\n val_metrics_dict = val_losses_dict\n val_metrics_dict[\"data_time\"] = data_time\n self._write_validation_metrics(val_metrics_dict)\n\n \"\"\"\n If you need accumulate gradients or something similar, you can\n wrap the optimizer with your custom `zero_grad()` method.\n \"\"\"\n self.optimizer.zero_grad()\n losses.backward()\n\n \"\"\"\n If you need gradient clipping/scaling or other processing, you can\n wrap the optimizer with your custom `step()` method.\n \"\"\"\n self.optimizer.step()", "def train_experiment(session, model, result, writer, last_step, max_steps,\n saver, summary_dir, save_step, features, **kwargs):\n del kwargs\n\n step = 0\n\n print('model training started')\n for i in range(last_step, max_steps):\n step += 1\n summary, _ = session.run([result.summary, result.train_op])\n writer.add_summary(summary, i)\n\n if (i + 1) % model._hparams.boost_step == 0 and (\n model._hparams.boosting):\n session.run(result.boosting_op)\n\n if (i + 1) % save_step == 0:\n saver.save(session, os.path.join(summary_dir, 'model.ckpt'),\n global_step=i + 1)", "def train_step(x_batch_1, x_batch_2, x_batch_3):\n feed_dict = {\n cnn.input_x_1: x_batch_1,\n cnn.input_x_2: x_batch_2,\n cnn.input_x_3: x_batch_3,\n cnn.dropout_keep_prob: FLAGS.dropout_keep_prob\n }\n _, step, summaries, loss, accuracy = sess.run(\n [train_op, global_step, train_summary_op, cnn.loss, cnn.accuracy],\n feed_dict)\n time_str = datetime.datetime.now().isoformat()\n print(\"{}: step {}, loss {:g}, acc {:g}\".format(time_str, step, loss, accuracy))\n train_summary_writer.add_summary(summaries, step)", "def train(self, x={}, **kwargs):\n return 0", "def train_step(x_batch, y_batch, batch_idx, epoch_idx):\n feed_dict = {\n cnn.input_x: x_batch,\n cnn.input_y: y_batch,\n cnn.dropout_keep_prob: 0.75\n }\n _, step, summaries, loss, accuracy = sess.run(\n [step_update, global_step, summary_op, cnn.loss, cnn.accuracy],\n feed_dict)\n\n print(\"Epoch {}, Batch_no {} : loss {:g}, acc {:g}\".format(epoch_idx, batch_idx, loss, accuracy))\n train_summary_writer.add_summary(summaries, step)", "def train(self):\n self.epoch = 0\n self.step = 0\n self.start_time = time.time()\n for self.epoch in range(self.opt.num_epochs):\n self.run_epoch()\n if (self.epoch + 1) % self.opt.save_frequency == 0:\n self.save_model()", "def run_train_step(self, sess, summaryWriter):\n\n # Build feed dictionary\n input_feed = {\n self.handle : self.train_handle,\n self.isTraining : True}\n\n # Output feed\n output_feed = [\n self.loss, \n self.accuracy, \n self.global_step, \n self.summaries, \n self.update]\n\n\n # Run train step\n loss, accuracy, global_step, summaries, _ = sess.run(output_feed, input_feed) \n\n # All summaries in the graph are added to Tensorboard\n summaryWriter.add_summary(summaries, global_step)\n\n return loss, accuracy, global_step", "def train_step(x_batch, y_batch):\n feed_dict = {\n cnn.input_x: x_batch,\n cnn.input_y: y_batch,\n cnn.dropout_keep_prob: FLAGS.dropout_keep_prob\n }\n _, step, loss, accuracy = sess.run(\n [train_op, global_step, cnn.loss, cnn.accuracy],\n feed_dict)", "def train_naive(): # add arguments as needed\n pass", "def _train_step(self):\n if self._replay.add_count > self.min_replay_history:\n if self.training_steps % self.update_period == 0:\n self._sample_from_replay_buffer()\n (self._rng, self.optimizer_state, self.online_params,\n loss, quantile_loss, coherence_loss, orthogonality_loss) = train(\n self.network_def,\n self.online_params,\n self.target_network_params,\n self.optimizer,\n self.optimizer_state,\n self.replay_elements['state'],\n self.replay_elements['action'],\n self.replay_elements['next_state'],\n self.replay_elements['reward'],\n self.replay_elements['terminal'],\n self.num_tau_samples,\n self.num_tau_prime_samples,\n self.num_quantile_samples,\n self.cumulative_gamma,\n self.double_dqn,\n self.kappa,\n self._rng,\n self._coherence_weight,\n self._option,\n self._use_ortho_loss,\n self._use_cohe_loss,\n self._tau,\n self._alpha,\n self._clip_value_min)\n if (self.summary_writer is not None and\n self.training_steps > 0 and\n self.training_steps % self.summary_writing_frequency == 0):\n if self._use_ortho_loss and self._use_cohe_loss:\n summary = tf.compat.v1.Summary(value=[\n tf.compat.v1.Summary.Value(\n tag='Losses/Combined', simple_value=loss),\n tf.compat.v1.Summary.Value(\n tag='Losses/Quantile', simple_value=quantile_loss),\n tf.compat.v1.Summary.Value(\n tag='Losses/Incoherence', simple_value=coherence_loss),\n tf.compat.v1.Summary.Value(\n tag='Losses/Orthogonality',\n simple_value=orthogonality_loss),\n ])\n elif self._use_ortho_loss and not self._use_cohe_loss:\n summary = tf.compat.v1.Summary(value=[\n tf.compat.v1.Summary.Value(\n tag='Losses/Combined', simple_value=loss),\n tf.compat.v1.Summary.Value(\n tag='Losses/Quantile', simple_value=quantile_loss),\n tf.compat.v1.Summary.Value(\n tag='Losses/Orthogonality', simple_value=orthogonality_loss),\n ])\n elif self._use_cohe_loss and not self._use_ortho_loss:\n summary = tf.compat.v1.Summary(value=[\n tf.compat.v1.Summary.Value(\n tag='Losses/Combined', simple_value=loss),\n tf.compat.v1.Summary.Value(\n tag='Losses/Quantile', simple_value=quantile_loss),\n tf.compat.v1.Summary.Value(\n tag='Losses/Incoherence', simple_value=coherence_loss),\n ])\n self.summary_writer.add_summary(summary, self.training_steps)\n if self.training_steps % self.target_update_period == 0:\n self._sync_weights()\n\n self.training_steps += 1", "def train_step(x_batch, y_batch):\n feed_dict = {\n cnn.input_x: x_batch,\n cnn.input_y: y_batch,\n cnn.dropout_keep_prob: self.cfg['dropout_keep_prob']\n }\n _, step, summaries, loss, accuracy = sess.run(\n [train_op, global_step, train_summary_op, cnn.loss, cnn.accuracy],\n feed_dict)\n time_str = datetime.datetime.now().isoformat()\n self.logger.debug(\"{}: step {}, loss {:g}, acc {:g}\".format(time_str, step, loss, accuracy))\n train_summary_writer.add_summary(summaries, step)", "def train(self):\n\t\tself.model.fit(self.training_data, self.training_labels)", "def train_step(x_batch, pos_batch, neg_batch):\n feed_dict = {\n rnn.input_x: x_batch,\n rnn.input_xpos: pos_batch,\n rnn.input_xneg: neg_batch,\n rnn.real_len_x: real_len(x_batch),\n rnn.real_len_xpos: real_len(pos_batch),\n rnn.real_len_xneg: real_len(neg_batch),\n rnn.dropout_keep_prob: FLAGS.dropout_keep_prob,\n rnn.batch_size: len(x_batch),\n }\n _, step, summaries, loss, accuracy = sess.run(\n [train_op, global_step, train_summary_op, rnn.loss, rnn.accuracy],\n feed_dict)\n time_str = datetime.datetime.now().isoformat()\n logger.info(\"{}: step {}, loss {:g}, acc {:g}\".format(time_str, step, loss, accuracy))\n train_summary_writer.add_summary(summaries, step)", "def train(self, batch_training=False):\n raise NotImplementedError", "def train_step(x_batch, y_batch):\n feed_dict = {\n cnn.input_x: x_batch,\n cnn.input_y: y_batch,\n cnn.dropout_keep_prob: FLAGS.dropout_keep_prob\n }\n _, step, summaries, loss, accuracy = sess.run(\n [train_op, global_step, train_summary_op, cnn.loss, cnn.accuracy],\n feed_dict)\n time_str = datetime.datetime.now().isoformat()\n print(\"{}: step {}, loss {:g}, acc {:g}\".format(time_str, step, loss, accuracy))\n train_summary_writer.add_summary(summaries, step)", "def train_test_model_batch():\n train=learning.Train_kmer_clf()\n train.run()", "def train_step(x_batch, y_batch):\n feed_dict = {\n cnn.input_x: x_batch,\n cnn.input_y: y_batch,\n cnn.dropout_keep_prob: FLAGS.dropout_keep_prob,\n }\n _, step, loss = sess.run([train_op, global_step, cnn.loss], feed_dict)", "def train_step(x_batch, y_batch):\n feed_dict = {\n cnn.input_x: x_batch,\n cnn.input_y: y_batch,\n cnn.dropout_keep_prob: opts[\"dropout_keep_prob\"]\n }\n _, step, loss, accuracy = sess.run(\n [train_op, global_step, cnn.loss, cnn.accuracy],\n feed_dict)", "def train(self, X, y):", "def train_step(input, target, model, loss_fn, optimizer, **unused):\r\n model.train()\r\n output = model(input)\r\n loss = loss_fn(output, target)\r\n optimizer.backward(loss)\r\n optimizer.step()", "def train():\n import trace\n trace.train()", "def train(self):\n raise NotImplemented()", "def train_step(x_batch, y_batch):\r\n feed_dict = {\r\n cnn.input_x: x_batch,\r\n cnn.input_y: y_batch,\r\n cnn.dropout_keep_prob: FLAGS.dropout_keep_prob\r\n }\r\n _, step, summaries, loss, accuracy = sess.run(\r\n [train_op, global_step, train_summary_op, cnn.loss, cnn.accuracy],\r\n feed_dict)\r\n time_str = datetime.datetime.now().isoformat()\r\n print(\"{}: step {}, loss {:g}, acc {:g}\".format(time_str, step, loss, accuracy))\r\n train_summary_writer.add_summary(summaries, step)", "def train_step(x_batch, y_batch, learning_rate):\n feed_dict = {\n nn.input_x: x_batch,\n nn.input_y: y_batch,\n nn.dropout_keep_prob: 0.8,\n nn.learning_rate: learning_rate\n }\n _, step, loss, accuracy = sess.run(\n [train_op, global_step, nn.loss, nn.accuracy], feed_dict)\n time_str = datetime.datetime.now().isoformat()\n if step % 100 == 0:\n print(\"{}: step {}, lr {:g}, loss {:g}, acc {:g}\".format(time_str, step, learning_rate, loss, accuracy))", "def on_train_end(self):", "def onTrainStepTaken(self, agent):\n pass", "def train(self, X):\n self.X = X", "def train(self, X, Y):\n if self.train_step:\n Model.__X__ = X\n Model.__Y__ = Y\n\n self.train_step.run(session=Model.session, feed_dict={Model.x: X, Model.y: Y})", "def train(self, X, y):\n pass", "def test(self):\n self.training = False", "def forward_train(self, *args, **kwargs):\n raise NotImplementedError('This interface should not be used in current training schedule. Please use `train_step` for training.')", "def train_step(x_batch, y_batch, len_batch):\n feed_dict = {\n cnn.input_x: x_batch,\n cnn.input_y: y_batch,\n cnn.doc_len: len_batch,\n cnn.dropout_keep_prob: dropout_keep_prob,\n }\n _, step, summaries, loss, accuracy, acc_max = sess.run(\n [train_op, global_step, train_summary_op, cnn.loss, cnn.accuracy_sigmoid, cnn.accuracy_max],\n feed_dict)\n time_str = datetime.datetime.now().isoformat()\n print((\"{}: step {}, loss {:g}, acc {:g}, acc_max {:g}\".format(time_str, step, loss, accuracy, acc_max)))\n train_summary_writer.add_summary(summaries, step)", "def on_predict_batch_begin(self, step, logs=None):", "def train_step(self):\n step_actions = self.get_step_actions()\n *_, dones, _ = tf.numpy_function(\n self.step_envs, [step_actions, True, True], self.batch_dtypes\n )\n for done_idx in tf.where(dones):\n gradient_steps = self.gradient_steps or self.episode_steps[done_idx[0]]\n self.update_weights(gradient_steps)\n self.episode_steps.assign(\n (self.episode_steps + self.step_increment) * (1 - dones)\n )", "def on_train_begin(self, logs=None):", "def on_train_begin(self, logs=None):", "def train_step(self, batch_sample, epoch_it):\n batch_x = batch_sample['waveform']\n data_type = batch_sample['data_type']\n batch_target = {\n 'ov': batch_sample['ov'],\n 'sed': batch_sample['sed_label'],\n 'doa': batch_sample['doa_label'],\n }\n if self.cuda:\n batch_x = batch_x.cuda(non_blocking=True)\n batch_target['sed'] = batch_target['sed'].cuda(non_blocking=True)\n batch_target['doa'] = batch_target['doa'].cuda(non_blocking=True)\n\n\n self.optimizer.zero_grad()\n self.af_extractor.train()\n self.model.train()\n\n (batch_x, batch_target) = self.af_extractor((batch_x, batch_target,'train', data_type))\n batch_x = (batch_x - self.mean) / self.std\n if self.cfg['training']['model'] == 'SELD_ATT' or self.cfg['training']['model'] == 'SELD_ATT_LIGHT':\n pred, pred_constraint = self.model(batch_x)\n if self.cfg['training']['model'] == 'EINV2':\n pred = self.model(batch_x)\n if self.cfg['training']['model'] == 'SELD_ATT' or self.cfg['training']['model'] == 'SELD_ATT_LIGHT':\n loss_dict = self.losses.calculate_attention(pred, pred_constraint,batch_target, epoch_it,self.model)\n if self.cfg['training']['model'] == 'EINV2':\n loss_dict = self.losses.calculate(pred, batch_target, epoch_it, self.model)\n\n loss_dict[self.cfg['training']['loss_type']].backward(retain_graph=False)\n self.optimizer.step()\n\n self.train_losses['train_loss_all'] += loss_dict['all'].item()\n self.train_losses['train_loss_sed'] += loss_dict['sed'].item()\n self.train_losses['train_loss_doa'] += loss_dict['doa'].item()\n\n if self.cfg['training']['weight_constraints']:\n self.train_losses['train_loss_weight_orthogonal'] += loss_dict['loss_weight_orthogonal'].item()\n\n if self.cfg['training']['weight_constraints_1']:\n self.train_losses['train_loss_weight_orthogonal_1'] += loss_dict['loss_weight_orthogonal_1'].item()\n\n if self.cfg['training']['layer_constraints']:\n self.train_losses['train_loss_layer_orthogonal'] += loss_dict['loss_layer_orthogonal'].item()\n\n if self.cfg['training']['layer_constraints_1']:\n self.train_losses['train_loss_layer_orthogonal_1'] += loss_dict['loss_layer_orthogonal_1'].item()\n\n if self.cfg['training']['smoothness_loss']:\n self.train_losses['train_loss_doa_smoothness'] += loss_dict['loss_doa_smoothness'].item()", "def train_one_epoch(self):\n\t\tself.model.train()\n\t\ttrain_loss = 0\n\n\t\tfor batch_idx, data in enumerate(self.data_loader.train_loader):\n\t\t\tInput = data[0].float().to(self.device)\n\t\t\tOutput = data[1].float().to(self.device)\n\n\t\t\tself.optimizer.zero_grad()\n\t\t\tloss = self.loss(self.model(Input)[:,0],Output)\n\t\t\ttrain_loss += loss.item()\n\t\t\tloss.backward()\n\t\t\tself.optimizer.step()\n\t\t\tself.current_iteration += 1\n\n\t\tself.summary_writer.add_scalar('training/loss', loss.item(), self.current_epoch)", "def training(self) -> None:\n self.compile_model()\n self.train_epoch()\n self.agent.save()", "def _training_before_hook(self):\n pass", "def train_step(self, params, train_op, train_summary_op, train_summary_writer):\n dkt = self.train_dkt\n sess = self.sess\n global_step = self.global_step\n\n feed_dict = {dkt.input_data: params['input_x'],\n dkt.target_id: params['target_id'],\n dkt.target_correctness: params['target_correctness'],\n dkt.max_steps: params['max_len'],\n dkt.sequence_len: params['seq_len'],\n dkt.keep_prob: self.config.modelConfig.dropout_keep_prob,\n dkt.batch_size: self.config.batch_size}\n\n _, step, summaries, loss, binary_pred, pred, target_correctness = sess.run(\n [train_op, global_step, train_summary_op, dkt.loss, dkt.binary_pred, dkt.pred, dkt.target_correctness],\n feed_dict)\n\n auc, accuracy = gen_metrics(params['seq_len'], binary_pred, pred, target_correctness)\n\n time_str = datetime.datetime.now().isoformat()\n print(\"train: {}: step {}, loss {}, acc {}, auc: {}\".format(time_str, step, loss, accuracy, auc))\n train_summary_writer.add_summary(summaries, step)" ]
[ "0.8901386", "0.88492393", "0.86054355", "0.83921003", "0.8233459", "0.8093186", "0.8093186", "0.8093186", "0.8093186", "0.8093186", "0.79063976", "0.78695196", "0.7857661", "0.7844479", "0.7824819", "0.78059494", "0.7790117", "0.7725741", "0.7722361", "0.7720089", "0.77101547", "0.7614437", "0.76128316", "0.7573674", "0.75181234", "0.7517958", "0.7502613", "0.74946576", "0.7485884", "0.7419394", "0.7408162", "0.7385572", "0.73767316", "0.73444766", "0.73392105", "0.7328757", "0.7319089", "0.7305216", "0.72920346", "0.7291782", "0.7284193", "0.72716326", "0.7266651", "0.72644603", "0.72638094", "0.72598886", "0.7256866", "0.72179145", "0.7202533", "0.7201847", "0.7181789", "0.71783614", "0.71770155", "0.71756923", "0.71656984", "0.7150644", "0.7148445", "0.7146288", "0.71336985", "0.7116938", "0.71115315", "0.71041423", "0.7095207", "0.7083441", "0.7081787", "0.70786285", "0.7074316", "0.7070292", "0.7065663", "0.70606613", "0.7057703", "0.70547724", "0.7047615", "0.7043731", "0.70366484", "0.7029205", "0.7028132", "0.7017181", "0.69984096", "0.699478", "0.6992905", "0.699059", "0.6980158", "0.6948694", "0.69418865", "0.69319844", "0.6917795", "0.69137514", "0.69094604", "0.6900487", "0.68910044", "0.6890741", "0.6877818", "0.6876663", "0.6876663", "0.6871473", "0.6867063", "0.6865395", "0.68529624", "0.683787" ]
0.7098244
62
Evaluates model on a dev set
def dev_step(x_batch, y_batch, writer=None): feed_dict = { cnn.x: x_batch, cnn.y_: y_batch, cnn.keep_prob : 1.0 } step, summaries, loss, accuracy = sess.run( [global_step, dev_summary_op, cnn.cross_entropy, cnn.accuracy], feed_dict) time_str = datetime.datetime.now().isoformat() print("{}: step {}, loss {:g}, acc {:g}".format(time_str, step, loss, accuracy)) if writer: writer.add_summary(summaries, step)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def evaluate():\n log.info('Loading dev data...')\n if args.version_2:\n dev_data = SQuAD('dev', version='2.0')\n else:\n dev_data = SQuAD('dev', version='1.1')\n (_, _), (data_file_name, _) \\\n = dev_data._data_file[dev_data._version][dev_data._segment]\n dev_data_path = os.path.join(dev_data._root, data_file_name)\n\n if args.debug:\n sampled_data = [dev_data[0], dev_data[1], dev_data[2]]\n dev_data = mx.gluon.data.SimpleDataset(sampled_data)\n log.info('Number of records in dev data: %d', len(dev_data))\n\n dev_data_features = preprocess_dataset(\n tokenizer, dev_data, vocab=vocab, max_seq_length=args.max_seq_length,\n doc_stride=args.doc_stride, num_workers=args.num_workers,\n max_query_length=args.max_query_length, load_from_pickle=args.load_pickle,\n feature_file=args.dev_dataset_file)\n\n dev_data_input = convert_full_features_to_input_features(dev_data_features)\n log.info('The number of examples after preprocessing: %d', len(dev_data_input))\n\n dev_dataloader = mx.gluon.data.DataLoader(dev_data_input, batchify_fn=batchify_fn,\n num_workers=4, batch_size=args.test_batch_size,\n shuffle=False, last_batch='keep')\n\n log.info('start prediction')\n\n all_results = collections.defaultdict(list)\n\n epoch_tic = time.time()\n total_num = 0\n for (batch_id, data) in enumerate(dev_dataloader):\n data_list = list(split_and_load(data, ctx))\n for splited_data in data_list:\n example_ids, inputs, token_types, valid_length, p_mask, _, _, _ = splited_data\n total_num += len(inputs)\n outputs = net_eval(inputs, token_types, valid_length, p_mask=p_mask)\n example_ids = example_ids.asnumpy().tolist()\n for c, example_ids in enumerate(example_ids):\n result = RawResultExtended(start_top_log_probs=outputs[0][c].asnumpy().tolist(),\n start_top_index=outputs[1][c].asnumpy().tolist(),\n end_top_log_probs=outputs[2][c].asnumpy().tolist(),\n end_top_index=outputs[3][c].asnumpy().tolist(),\n cls_logits=outputs[4][c].asnumpy().tolist())\n all_results[example_ids].append(result)\n if batch_id % args.log_interval == 0:\n log.info('Batch: %d/%d', batch_id + 1, len(dev_dataloader))\n\n epoch_toc = time.time()\n log.info('Time cost=%2f s, Thoughput=%.2f samples/s', epoch_toc - epoch_tic,\n total_num / (epoch_toc - epoch_tic))\n\n log.info('Get prediction results...')\n\n all_predictions = collections.OrderedDict()\n all_nbest_json = collections.OrderedDict()\n scores_diff_json = collections.OrderedDict()\n for features in dev_data_features:\n results = all_results[features[0].example_id]\n example_qas_id = features[0].qas_id\n score_diff, best_non_null_entry, nbest_json = predict_extended(\n features=features, results=results, n_best_size=args.n_best_size,\n max_answer_length=args.max_answer_length, start_n_top=args.start_top_n,\n end_n_top=args.end_top_n)\n scores_diff_json[example_qas_id] = score_diff\n all_predictions[example_qas_id] = best_non_null_entry\n all_nbest_json[example_qas_id] = nbest_json\n\n output_prediction_file = os.path.join(args.output_dir, 'predictions.json')\n output_nbest_file = os.path.join(args.output_dir, 'nbest_predictions.json')\n output_null_log_odds_file = os.path.join(args.output_dir, 'null_odds.json')\n\n with open(output_prediction_file, 'w') as writer:\n writer.write(json.dumps(all_predictions, indent=4) + '\\n')\n with open(output_nbest_file, 'w') as writer:\n writer.write(json.dumps(all_nbest_json, indent=4) + '\\n')\n with open(output_null_log_odds_file, 'w') as writer:\n writer.write(json.dumps(scores_diff_json, indent=4) + '\\n')\n\n if os.path.exists(sys.path[0] + '/evaluate-v2.0.py'):\n arguments = [\n dev_data_path, output_prediction_file, '--na-prob-thresh',\n str(args.null_score_diff_threshold)\n ]\n if args.version_2:\n arguments += ['--na-prob-file', output_null_log_odds_file]\n subprocess.call([sys.executable, sys.path[0] + '/evaluate-v2.0.py'] + arguments)\n else:\n log.info('Please download evaluate-v2.0.py to get evaluation results for SQuAD. '\n 'Check index.rst for the detail.')", "def eval(self):\n self.train(mode=False)", "def evaluate(model, g, val_nid, device):\n model.eval()\n nfeat = g.ndata['features']\n labels = g.ndata['labels']\n with th.no_grad():\n pred = model.module.inference(g, nfeat, device, args.batch_size, args.num_workers)\n model.train()\n test_acc = Accuracy()\n return test_acc(th.softmax(pred[val_nid], -1), labels[val_nid].to(pred.device))", "def evaluate(X_test, y_test):\n # batch size is 16 for evaluation\n batch_size = 16\n\n # Load Model\n model = load_model('model/model.h5')\n return model.evaluate(X_test, y_test, batch_size, verbose = 1)", "def evaluate_model(model, ds_valid):\n print(\"-- Evaluate Model:\")\n for features, labels in ds_valid:\n valid_step(model, features, labels)\n logs = \"\\nValid Loss: {}, Valid Accuracy: {}\"\n tf.print(tf.strings.format(logs, (valid_loss.result(), valid_metric.result())))\n valid_loss.reset_states()\n train_metric.reset_states()\n valid_metric.reset_states()", "def evaluate(self, output_dir, test_data, device, verbose_logging=False):\r\n tokenizer = self.tokenizer\r\n # device = torch.device(\"cuda:0\")\r\n model = self.model\r\n model.to(device)\r\n args = self.args\r\n\r\n # # reassgin unique_id for features to keep order for federated learning situation\r\n # unique_id = 1000000000\r\n # for feature in self.test_dl.features:\r\n # feature.unique_id = unique_id\r\n # unique_id += 1\r\n\r\n examples = test_data.examples\r\n features = test_data.features\r\n\r\n eval_loss = 0.0\r\n nb_eval_steps = 0\r\n model.eval()\r\n\r\n # if args.n_gpu > 1:\r\n # model = torch.nn.DataParallel(model)\r\n\r\n if self.args.fp16:\r\n from torch.cuda import amp\r\n\r\n all_results = []\r\n for batch in tqdm(test_data, disable=args.silent, desc=\"Running Evaluation\"):\r\n batch = tuple(t.to(device) for t in batch)\r\n\r\n with torch.no_grad():\r\n inputs = {\r\n \"input_ids\": batch[1],\r\n \"attention_mask\": batch[2],\r\n \"token_type_ids\": batch[3],\r\n }\r\n\r\n if self.args.model_type in [\r\n \"xlm\",\r\n \"roberta\",\r\n \"distilbert\",\r\n \"camembert\",\r\n \"electra\",\r\n \"xlmroberta\",\r\n \"bart\",\r\n ]:\r\n del inputs[\"token_type_ids\"]\r\n\r\n example_indices = batch[4]\r\n\r\n if args.model_type in [\"xlnet\", \"xlm\"]:\r\n inputs.update({\"cls_index\": batch[5], \"p_mask\": batch[6]})\r\n\r\n if self.args.fp16:\r\n with amp.autocast():\r\n outputs = model(**inputs)\r\n eval_loss += outputs[0].mean().item()\r\n else:\r\n outputs = model(**inputs)\r\n eval_loss += outputs[0].mean().item()\r\n begin_idx = len(all_results)\r\n for i, _ in enumerate(example_indices):\r\n eval_feature = features[begin_idx + i]\r\n unique_id = int(eval_feature.unique_id)\r\n if args.model_type in [\"xlnet\", \"xlm\"]:\r\n # XLNet uses a more complex post-processing procedure\r\n result = RawResultExtended(\r\n unique_id=unique_id,\r\n start_top_log_probs=to_list(outputs[0][i]),\r\n start_top_index=to_list(outputs[1][i]),\r\n end_top_log_probs=to_list(outputs[2][i]),\r\n end_top_index=to_list(outputs[3][i]),\r\n cls_logits=to_list(outputs[4][i]),\r\n )\r\n else:\r\n result = RawResult(\r\n unique_id=unique_id, start_logits=to_list(outputs[0][i]), end_logits=to_list(outputs[1][i]),\r\n )\r\n all_results.append(result)\r\n\r\n nb_eval_steps += 1\r\n\r\n eval_loss = eval_loss / nb_eval_steps\r\n\r\n prefix = \"test\"\r\n os.makedirs(output_dir, exist_ok=True)\r\n\r\n output_prediction_file = os.path.join(output_dir, \"predictions_{}.json\".format(prefix))\r\n output_nbest_file = os.path.join(output_dir, \"nbest_predictions_{}.json\".format(prefix))\r\n output_null_log_odds_file = os.path.join(output_dir, \"null_odds_{}.json\".format(prefix))\r\n\r\n if args.model_type in [\"xlnet\", \"xlm\"]:\r\n # XLNet uses a more complex post-processing procedure\r\n (all_predictions, all_nbest_json, scores_diff_json, out_eval,) = write_predictions_extended(\r\n examples,\r\n features,\r\n all_results,\r\n args.n_best_size,\r\n args.max_answer_length,\r\n output_prediction_file,\r\n output_nbest_file,\r\n output_null_log_odds_file,\r\n None,\r\n model.config.start_n_top,\r\n model.config.end_n_top,\r\n True,\r\n tokenizer,\r\n verbose_logging,\r\n )\r\n else:\r\n all_predictions, all_nbest_json, scores_diff_json = write_predictions(\r\n examples,\r\n features,\r\n all_results,\r\n args.n_best_size,\r\n args.max_answer_length,\r\n args.do_lower_case,\r\n output_prediction_file,\r\n output_nbest_file,\r\n output_null_log_odds_file,\r\n verbose_logging,\r\n True,\r\n args.null_score_diff_threshold,\r\n )\r\n\r\n return all_predictions, all_nbest_json, scores_diff_json, eval_loss", "def model_evaluate(model,x_train,n_y_array,x_val, vald_array):\n\n scores = model.evaluate(x_train, n_y_array, verbose=1)\n\n scores2 = model.evaluate(x_val, vald_array, verbose=1)\n\n\n print(\"for traininf set\")\n\n print(\"%s: %.2f%%\" % (model.metrics_names[1], scores[1]*100))\n\n print(\"%s: %.2f%%\" % (model.metrics_names[0], scores[0]))\n\n\n\n print(\"for validation set : \") \n\n print(\"%s: %.2f%%\" % (model.metrics_names[1], scores2[1]*100))\n\n print(\"%s: %.2f%%\" % (model.metrics_names[0], scores2[0]))", "def evaluate_golden_run_model(self):\n return self.mm.get_org_model().evaluate(self.batched_ds)", "def evaluate_model(model, testset):\n\n # Sort data by top level label to ease inspection\n testset = testset.sort_using_layer(-1, reverse=True)\n\n # Feed the samples to the model to obtain each layers' activations\n v = testset.get_layer(0)\n hs = model.transform(v)[1:]\n\n # Read model weights\n ws = [params['w'] for params in model.parameters]\n del params\n\n # Take the (hidden) labels from the data set\n ls = testset.get_layers()[1:]\n\n # In each layer, reorder and invert neurons to match best with the labels\n for i in range(len(ls)):\n hs[i], ws[i] = align_with_labels(ls[i], hs[i], ws[i])\n del i\n\n # Measure correlations, etcetera\n metrics = compare(ls, hs)\n\n # Simply return a dict with all used variables\n return locals()", "def _set_eval(self):\n\n if self.model.__dict__['training']:\n self.model.eval()", "def evaluate(self):\n\n\t\t## We should be evaluating on dev dataset as well, so commenting x_test\n\t\t#self.model_score = self.model.evaluate(self.x_test, self.y_test_oh, batch_size=2048)\n\t\tself.model_score = self.model.evaluate(self.x_dev, self.y_dev_oh, batch_size=2048)\n\t\tprint(\"%s score = %f\\n\" %(self.modelName, self.model_score[1]))\n\n\t\t##Saving atucal vs predicted predictions\n\t\t##np.argmax returns the index where it see's 1 in the row\n\t\t#y_pred = np.argmax(self.model.predict(self.x_test, batch_size=2048), axis=1)\n\t\ty_pred = np.argmax(self.model.predict(self.x_dev, batch_size=2048), axis=1)\n\n\t\t## vstack will stack them in 2 rows, so we use Trasnpose to get them in column stack\n\t\t#output_predict = np.vstack((np.argmax(self.y_test_oh, axis=1), y_pred)).T\n\t\toutput_predict = np.vstack((np.argmax(self.y_dev_oh, axis=1), y_pred)).T\n\t\toutputFile = self.resultDir + \"/outputPredict.csv\" \n\t\tnp.savetxt(outputFile, output_predict, fmt=\"%5.0f\", delimiter=\",\")\n\n\t\t##Error Analysis of the prediction\n\t\terrorAnalysis(outputFile)\n\n\t\treturn self.model_score", "def evaluate_model(sess, model, data_set):\n total_cost = 0.0\n total_r_cost = 0.0\n total_kl_cost = 0.0\n for batch in range(data_set.num_batches):\n unused_orig_x, x, s = data_set.get_batch(batch)\n feed = {model.input_data: x, model.sequence_lengths: s}\n (cost, r_cost,\n kl_cost) = sess.run([model.cost, model.r_cost, model.kl_cost], feed)\n total_cost += cost\n total_r_cost += r_cost\n total_kl_cost += kl_cost\n\n total_cost /= (data_set.num_batches)\n total_r_cost /= (data_set.num_batches)\n total_kl_cost /= (data_set.num_batches)\n return (total_cost, total_r_cost, total_kl_cost)", "def evaluate():\n\tmodel.eval()\n\tstddev = 1 # And mean=0\n\tfor batch_idx, (data, _) in enumerate(syn_test_loader):\n\t\tdata = data.cuda()\n\t\tif batch_idx == 0:\n\t\t\tnoise = torch.autograd.Variable(torch.randn(batch_size, bottleneck).cuda() * stddev)\n\t\t\tsample_representation(\"orig_nat\", data, noise)\n\t\t\tsample_representation(\"natural\", data, noise)\n\t\t\tsample_representation(\"orig_syn\", data, noise)\n\t\t\tsample_representation(\"synth\", data, noise)", "def evaluate(args, dev_dataset, model):\n\n if args.dynamic_batching:\n dev_sampler = CustomBatchSampler(dev_dataset, args.dev_batch_size)\n dev_dataloader = DataLoader(\n dev_dataset,\n batch_sampler=dev_sampler,\n num_workers=0,\n collate_fn=dynamic_padding_collate_fn\n )\n else:\n dev_sampler = SequentialSampler(dev_dataset)\n dev_dataloader = DataLoader(dev_dataset, sampler=dev_sampler,\n batch_size=args.dev_batch_size, num_workers=0)\n\n model.eval()\n loss_fn = nn.CrossEntropyLoss(ignore_index=0)\n iterator = tqdm(dev_dataloader, desc=\"Evaluation\", smoothing=0.05)\n loss_cum = None\n num_batch = 0\n for step, batch_cpu in enumerate(iterator):\n num_batch += 1\n\n batch = tuple(t.to(args.device) for t in batch_cpu)\n inputs = {\n \"input_ids\": batch[0],\n \"attention_mask\": batch[1],\n \"token_type_ids\": batch[2],\n }\n\n with torch.no_grad():\n outputs = model(**inputs)\n\n # Calculate loss of just the question part\n q_mask = (inputs['token_type_ids'] == 2)\n masked_labels = inputs['input_ids'].masked_fill(~q_mask, 0)\n shift_labels = masked_labels[..., 1:].contiguous()\n\n lm_logits = outputs[0]\n shift_logits = lm_logits[..., : -1, :].contiguous()\n loss = loss_fn(shift_logits.view(-1, shift_logits.size(-1)),\n shift_labels.view(-1))\n\n if loss_cum is None:\n loss_cum = loss\n else:\n loss_cum += loss\n\n model.train()\n\n return loss_cum.item() / num_batch", "def test_evaluate(self):\n # Check build does not raise errors\n dataset = KDDCupDataset()\n dataset.create_fixed_samples(\n *self.data, samples_num=1, partition_sizes=self.partition_sizes)\n dataset.set_current_sample(0)\n model = self.MODEL(dataset, **self.model_arguments)\n model.fit(training_epochs=50)\n metric = model.evaluate('test')\n self.assertLessEqual(0, metric)\n self.assertGreaterEqual(1, metric)", "def evaluate(x_dev, y_dev, sess):\n data_len = len(x_dev)\n batch_eval = dataHelper.batch_iter_eval(x_dev, y_dev)\n total_loss = 0.0\n total_acc = 0.0\n for x_batch_eval, y_batch_eval in batch_eval:\n batch_len = len(x_batch_eval)\n feed_dict = {\n model.input_x: x_batch_eval,\n model.input_y: y_batch_eval,\n model.dropout_keep_prob: 1.0\n }\n loss, accuracy = sess.run(\n [model.loss, model.accuracy],\n feed_dict)\n total_loss += loss * batch_len\n total_acc += accuracy * batch_len\n # time_str = datetime.datetime.now().isoformat()\n # print(\"{}: loss {:g}, acc {:g}\".format(time_str, total_loss / data_len, total_acc / data_len))\n return total_loss / data_len, total_acc / data_len", "def evaluate(self):\n self.training = False", "def evaluate(self, train_set=\"train_set\", test_set=\"test_set\", targets=\"targets\", k=10):\n\n test_set = self.cache.fetch(test_set) if isinstance(test_set, str) else test_set\n\n # Predict\n preds = self.run(dataset=train_set, targets=targets, k=k)\n\n # Evaluate model\n print(\"evaluating model ...\")\n score = evaluate(preds, test_set)\n print(\"MAP@{}: {:.5f}\\n\".format(k, score))\n\n return score", "def evaluate(cfg: DictConfig):\n\n # suppress TensorFlow and DALI warnings\n suppress_warnings()\n\n if cfg.USE_MULTI_GPUS.VALUE:\n # change number of visible gpus for evaluation\n set_gpus(cfg.USE_MULTI_GPUS.GPU_IDS)\n # update batch size according to available gpus\n data_generator.update_batch_size(cfg)\n\n if cfg.OPTIMIZATION.AMP:\n print(\"Enabling Automatic Mixed Precision(AMP) training\")\n policy = mixed_precision.Policy('mixed_float16')\n mixed_precision.set_global_policy(policy)\n\n if cfg.OPTIMIZATION.XLA:\n print(\"Enabling Automatic Mixed Precision(XLA) training\")\n tf.config.optimizer.set_jit(True)\n\n # create model\n strategy = None\n if cfg.USE_MULTI_GPUS.VALUE:\n # multi gpu training using tensorflow mirrored strategy\n strategy = tf.distribute.MirroredStrategy(\n cross_device_ops=tf.distribute.HierarchicalCopyAllReduce()\n )\n print('Number of visible gpu devices: {}'.format(strategy.num_replicas_in_sync))\n with strategy.scope():\n optimizer = tf.keras.optimizers.Adam(\n learning_rate=cfg.HYPER_PARAMETERS.LEARNING_RATE\n ) # optimizer\n if cfg.OPTIMIZATION.AMP:\n optimizer = mixed_precision.LossScaleOptimizer(\n optimizer,\n dynamic=True\n )\n dice_coef = DiceCoefficient(post_processed=True, classes=cfg.OUTPUT.CLASSES)\n dice_coef = tf.keras.metrics.MeanMetricWrapper(name=\"dice_coef\", fn=dice_coef)\n model = prepare_model(cfg, training=True)\n else:\n optimizer = tf.keras.optimizers.Adam(\n learning_rate=cfg.HYPER_PARAMETERS.LEARNING_RATE\n ) # optimizer\n if cfg.OPTIMIZATION.AMP:\n optimizer = mixed_precision.LossScaleOptimizer(\n optimizer,\n dynamic=True\n )\n dice_coef = DiceCoefficient(post_processed=True, classes=cfg.OUTPUT.CLASSES)\n dice_coef = tf.keras.metrics.MeanMetricWrapper(name=\"dice_coef\", fn=dice_coef)\n model = prepare_model(cfg, training=True)\n\n model.compile(\n optimizer=optimizer,\n loss=unet3p_hybrid_loss,\n metrics=[dice_coef],\n )\n\n # weights model path\n checkpoint_path = join_paths(\n cfg.WORK_DIR,\n cfg.CALLBACKS.MODEL_CHECKPOINT.PATH,\n f\"{cfg.MODEL.WEIGHTS_FILE_NAME}.hdf5\"\n )\n\n assert os.path.exists(checkpoint_path), \\\n f\"Model weight's file does not exist at \\n{checkpoint_path}\"\n\n # TODO: verify without augment it produces same results\n # load model weights\n model.load_weights(checkpoint_path, by_name=True, skip_mismatch=True)\n model.summary()\n\n # data generators\n val_generator = data_generator.get_data_generator(cfg, \"VAL\", strategy)\n validation_steps = data_generator.get_iterations(cfg, mode=\"VAL\")\n\n # evaluation metric\n evaluation_metric = \"dice_coef\"\n if len(model.outputs) > 1:\n evaluation_metric = f\"{model.output_names[0]}_dice_coef\"\n\n result = model.evaluate(\n x=val_generator,\n steps=validation_steps,\n workers=cfg.DATALOADER_WORKERS,\n return_dict=True,\n )\n\n # return computed loss, validation accuracy, and it's metric name\n return result, evaluation_metric", "def evaluate(cfg: DictConfig):\n\n experiments = cfg.get('experiment_type', f'{cfg.model.name}_only')\n fixed_t0 = cfg.get('fixed_t0', False)\n ext = '_fixedT0' if fixed_t0 else ''\n\n base_dir = cfg.device.root\n datasource = cfg.datasource.name\n\n if experiments == 'ablations':\n models = {\n 'FluxRGNN': ['final',\n 'final_without_encoder',\n 'final_without_boundary'],\n 'LocalLSTM': ['final']\n }\n elif experiments == 'final':\n models = {\n 'FluxRGNN': ['final'],\n 'GAM': ['final'],\n 'HA': ['final'],\n 'GBT': ['final']\n }\n else:\n m = cfg.model.name\n year = cfg.datasource.test_year\n\n # find all experiments available for this model, datasource and test year\n result_dir = osp.join(base_dir, 'results', datasource, m, f'test_{year}')\n models = {\n m : [ f.name for f in os.scandir(result_dir) if f.is_dir() ]\n }\n\n\n # thresholds for binary classification metrics\n if cfg.datasource.name == 'abm':\n thresholds = [0.0019, 0.0207]\n else:\n thresholds = [0, 10, 20]\n\n rmse_per_hour = []\n mae_per_hour = []\n pcc_per_hour = []\n bin_per_hour = []\n\n rmse_per_night = []\n mae_per_night = []\n\n output_dir = osp.join(base_dir, 'results', datasource, f'performance_evaluation{ext}', experiments)\n os.makedirs(output_dir, exist_ok=True)\n\n counter = 0\n\n for m, dirs in models.items():\n print(f'evaluate {m}')\n\n for d in dirs:\n result_dir = osp.join(base_dir, 'results', datasource, m, f'test_{cfg.datasource.test_year}', d)\n\n # check if directory exists\n if os.path.isdir(result_dir):\n results, model_cfg = load_cv_results(result_dir, trials=cfg.task.repeats, ext=ext)\n\n df_prep = pd.read_csv(osp.join(base_dir, 'data', 'preprocessed',\n f'{model_cfg[\"t_unit\"]}_{model_cfg[\"model\"][\"edge_type\"]}_ndummy={model_cfg[\"datasource\"][\"n_dummy_radars\"]}',\n datasource, cfg.season, str(cfg.datasource.test_year), 'dynamic_features.csv'))\n tidx2night = dict(zip(df_prep.tidx, df_prep.nightID))\n\n rmse_per_hour.append(compute_rmse(m, d, results, tidx2night, groupby=['horizon', 'trial'],\n threshold=0, km2=True, fixed_t0=fixed_t0))\n mae_per_hour.append(compute_mae(m, d, results, tidx2night, groupby=['horizon', 'trial'],\n threshold=0, km2=True, fixed_t0=fixed_t0))\n pcc_per_hour.append(compute_pcc(m, d, results, tidx2night, groupby=['horizon', 'trial'],\n threshold=0, km2=True, fixed_t0=fixed_t0))\n\n if fixed_t0:\n rmse_per_night.append(compute_rmse_per_night(m, d, results, tidx2night, groupby=['night_horizon', 'trial']))\n mae_per_night.append(compute_mae_per_night(m, d, results, tidx2night, groupby=['night_horizon', 'trial']))\n\n # compute binary classification measures\n for thr in thresholds:\n bin_per_hour.append(compute_bin(m, d, results, groupby=['horizon', 'trial'], threshold=thr, km2=True))\n\n counter += 1\n\n else:\n print(f'Experiment \"{d}\" for model \"{m}\" and datasource \"{datasource}\" is not available. '\n f'Use \"run_experiments.py model={m} datasource={datasource} +experiment={d}\" to run this experiment.')\n\n if counter > 0:\n rmse_per_hour = pd.concat(rmse_per_hour)\n rmse_per_hour.to_csv(osp.join(output_dir, f'rmse_per_hour.csv'))\n\n mae_per_hour = pd.concat(mae_per_hour)\n mae_per_hour.to_csv(osp.join(output_dir, f'mae_per_hour.csv'))\n\n pcc_per_hour = pd.concat(pcc_per_hour)\n pcc_per_hour.to_csv(osp.join(output_dir, f'pcc_per_hour.csv'))\n\n bin_per_hour = pd.concat(bin_per_hour)\n bin_per_hour.to_csv(osp.join(output_dir, f'bin_per_hour.csv'))\n\n if fixed_t0:\n rmse_per_night = pd.concat(rmse_per_night)\n rmse_per_night.to_csv(osp.join(output_dir, f'rmse_per_night.csv'))\n\n mae_per_night = pd.concat(mae_per_night)\n mae_per_night.to_csv(osp.join(output_dir, f'mae_per_night.csv'))", "def evaluate_model(args, eval_runs, warm_runs, metrics=['psnr', 'ssim', 'fps']):\n upsampler = Upsampler(args)\n if warm_runs > 0:\n print(\"Warming up for evaluation\")\n for i in range(warm_runs):\n print(\"Performing warm-up run\", str(i+1))\n for sequence in ['foliage', 'walk', 'calendar', 'city']:\n bix_dir = os.path.join(VID4_DIR, 'BIx4', sequence)\n upsampler.run_dir(bix_dir, reset=False)\n \n time = 0.\n psnrs = []\n ssims = []\n for i in range(eval_runs):\n run_psnrs = []\n run_ssims = []\n print(\"Performing evaluation run\", str(i+1))\n for sequence in ['foliage', 'walk', 'calendar', 'city']:\n bix_dir = os.path.join(VID4_DIR, 'BIx4', sequence)\n gt_dir = os.path.join(VID4_DIR, 'GT', sequence)\n print(\"Evaluating on\", bix_dir)\n time += upsampler.run_dir(bix_dir, reset=False)\n vid_psnrs, vid_ssims = _eval_sr_perf(os.path.join(bix_dir, 'up'), gt_dir)\n run_psnrs += vid_psnrs\n run_ssims += vid_ssims\n if i == eval_runs-1:\n with open(os.path.join(upsampler.get_model_dir(), \"psnr.txt\"), \"w\") as f:\n f.writelines(str(psnr) + '\\n' for psnr in run_psnrs)\n with open(os.path.join(upsampler.get_model_dir(), \"ssim.txt\"), \"w\") as f:\n f.writelines(str(ssim) + '\\n' for ssim in run_ssims)\n psnrs += run_psnrs\n ssims += run_ssims\n\n fps = VID4_LENGTH/ (time/eval_runs)\n return Performance(psnr=psnrs, ssim=ssims, fps=fps)", "def evaluate(self, test_data):\n result = self.model.run(test_data)\n self._save_result(result)", "def eval_model_on_valid(args):\n cfg, lbl = util.get_label_cfg_by_args(args)\n uid = cfg['uniqueid']\n print('We are playing with %s' % uid)\n outdir='models/%s/gate_expert' % uid\n outname='gate_expert_model.pt'\n if KLLOSS:\n outname = 'gate_expert_kldiv_model.pt'\n if args.warm:\n outname = outname.replace('.pt', '_warm.pt')\n mdl_path = os.path.join(outdir, outname)\n gate_expert = GateExpertNet(mdl_path, False)\n eval_fun = gate_expert.get_y\n\n valid_set = np.load(cfg['valid_path'])\n valid_x = valid_set[cfg['x_name']]\n valid_y = valid_set[cfg['y_name']]\n predy = eval_fun(valid_x)\n # dump output into some file\n valid_name = 'data/%s/gate_expert_valid_data.npz' % uid\n if KLLOSS:\n valid_name = valid_name.replace('_valid', '_kldiv_valid')\n if args.warm:\n valid_name = valid_name.replace('.npz', '_warm.npz')\n np.savez(valid_name, x=valid_x, y=predy)", "def set_eval(self):\n self.model.eval()", "def eval(self, logger=None):\n self.model.eval()\n self.model_DP.eval()\n logger.info(\"Successfully set the model eval mode\")", "def train_and_eval(self):\n self.__create_indexes()\n model = None\n model = None\n if self.model == 'OMult':\n model = OMult(self.kwargs)\n elif self.model == 'ConvO':\n model = ConvO(self.kwargs)\n elif self.model == 'QMult':\n model = QMult(self.kwargs)\n elif self.model == 'ConvQ':\n model = ConvQ(self.kwargs)\n elif self.model == 'OMultBatch':\n model = OMultBatch(self.kwargs)\n elif self.model == 'ConvOBatch':\n model = ConvOBatch(self.kwargs)\n elif self.model == 'QMultBatch':\n model = QMultBatch(self.kwargs)\n elif self.model == 'ConvQBatch':\n model = ConvQBatch(self.kwargs)\n else:\n print(self.model, ' is not valid name')\n raise ValueError\n\n self.train(model)\n self.eval(model)", "def evaluate_on_test_set(self, energy_threshold=80):\n losses = []\n accuracies = []\n codings = []\n codings_label = []\n losses_item = []\n test_loader = self.dm.get_test_set()\n N = 0\n gamma_sum = 0\n mu_sum = 0\n cov_mat_sum = 0\n\n # Change the model to evaluation mode\n self.model.eval()\n\n with torch.no_grad():\n # Create pytorch's train data_loader\n train_loader = self.dm.get_train_set()\n\n for i, data in enumerate(train_loader, 0):\n # transfer tensors to selected device\n train_inputs, _ = data[0].to(self.device), data[1].to(self.device)\n\n # forward pass\n code, x_hat, cosim, z, gamma = self.model(train_inputs)\n phi, mu, cov_mat = self.model.compute_params(z, gamma)\n\n batch_gamma_sum = gamma.sum(axis=0)\n\n gamma_sum += batch_gamma_sum\n mu_sum += mu * batch_gamma_sum.unsqueeze(-1) # keep sums of the numerator only\n cov_mat_sum += cov_mat * batch_gamma_sum.unsqueeze(-1).unsqueeze(-1) # keep sums of the numerator only\n\n N += train_inputs.shape[0]\n\n train_phi = gamma_sum / N\n train_mu = mu_sum / gamma_sum.unsqueeze(-1)\n train_cov = cov_mat_sum / gamma_sum.unsqueeze(-1).unsqueeze(-1)\n\n print(\"Train N:\", N)\n print(\"phi :\\n\", train_phi)\n print(\"mu :\\n\", train_mu)\n print(\"cov :\\n\", train_cov)\n\n # Calculate energy using estimated parameters\n\n train_energy = []\n train_labels = []\n train_z = []\n\n for i, data in enumerate(train_loader, 0):\n # transfer tensors to selected device\n train_inputs, train_inputs_labels = data[0].to(self.device), data[1]\n\n # forward pass\n code, x_hat, cosim, z, gamma = self.model(train_inputs)\n sample_energy, pen_cov_mat = self.model.estimate_sample_energy(z,\n train_phi,\n train_mu,\n train_cov,\n average_it=False,\n device=self.device)\n\n train_energy.append(sample_energy.cpu().numpy())\n train_z.append(z.cpu().numpy())\n train_labels.append(train_inputs_labels.numpy())\n\n train_energy = np.concatenate(train_energy, axis=0)\n train_z = np.concatenate(train_z, axis=0)\n train_labels = np.concatenate(train_labels, axis=0)\n\n test_energy = []\n test_labels = []\n test_z = []\n\n for data in test_loader:\n test_inputs, label_inputs = data[0].to(self.device), data[1]\n\n # forward pass\n code, x_hat, cosim, z, gamma = self.model(test_inputs)\n sample_energy, pen_cov_mat = self.model.estimate_sample_energy(z,\n train_phi,\n train_mu,\n train_cov,\n average_it=False,\n device=self.device)\n test_energy.append(sample_energy.cpu().numpy())\n test_z.append(z.cpu().numpy())\n test_labels.append(label_inputs.numpy())\n\n test_energy = np.concatenate(test_energy, axis=0)\n test_z = np.concatenate(test_z, axis=0)\n test_labels = np.concatenate(test_labels, axis=0)\n\n combined_energy = np.concatenate([train_energy, test_energy], axis=0)\n combined_labels = np.concatenate([train_labels, test_labels], axis=0)\n\n thresh = np.percentile(combined_energy, energy_threshold)\n print(\"Threshold :\", thresh)\n\n # Prediction using the threshold value\n pred = (test_energy > thresh).astype(int)\n groundtruth = test_labels.astype(int)\n\n accuracy = accuracy_score(groundtruth, pred)\n precision, recall, f_score, support = prf(groundtruth, pred, average='binary')\n\n print(f\"Accuracy:{accuracy}, \"\n f\"Precision:{precision}, \"\n f\"Recall:{recall}, \"\n f\"F-score:{f_score}, \"\n f\"\\nconfusion-matrix: {confusion_matrix(groundtruth, pred)}\")\n\n # switch back to train mode\n self.model.train()\n return accuracy, precision, recall, f_score, test_z, test_labels, combined_energy", "def evaluate():\n model.eval()\n with torch.no_grad():\n loss, n = 0, 0\n for xb, yb in valid_dl:\n n += len(xb)\n loss += loss_func(model(xb), yb) * len(xb)\n\n return loss/n", "def evaluate(args, model, tokenizer, eval_dataset, eval_dataloader, task_name, model_type, split, step):\n model.eval()\n processor = MoralStoriesProcessor()\n results = dict()\n softmax = torch.nn.Softmax(dim=1)\n\n # Eval!\n logger.info('***** Running evaluation on the validation / test set *****')\n logger.info(' Num examples = %d', len(eval_dataset))\n logger.info(' Batch size = %d', args.eval_batch_size)\n batch_losses = list()\n eval_loss = 0.0\n micro_loss, macro_loss = 0.0, 0.0\n num_batches, num_tokens = 0, 0\n preds = None\n soft_preds = None\n out_label_ids = None\n # Perform a single evaluation step\n for batch in tqdm(eval_dataloader, desc='Evaluating', mininterval=10, ncols=100):\n batch = tuple(t.to(args.device) for t in batch)\n with torch.no_grad():\n if 'gen' not in task_name:\n inputs = {'input_ids': batch[0],\n 'attention_mask': batch[1],\n 'token_type_ids': batch[2] if model_type == 'bert' else None,\n 'labels': batch[3]}\n else:\n inputs = {'input_ids': batch[0],\n 'attention_mask': batch[1],\n 'labels': batch[3]}\n if 'gpt2' not in model_type:\n # Prepare decoder inputs and labels for enc-dec models\n inputs['labels'] = batch[3][:, 1:].contiguous() # shift\n decoder_input_ids = batch[3][:, :-1].clone() # shift\n decoder_input_ids[decoder_input_ids == -100] = tokenizer.pad_token_id # remove masking\n inputs['decoder_input_ids'] = decoder_input_ids.contiguous()\n\n outputs = model(**inputs)\n\n tmp_eval_loss, logits = outputs[:2]\n soft_logits = softmax(logits)\n eval_loss += tmp_eval_loss.mean().item()\n batch_losses.append(tmp_eval_loss.item())\n\n if 'gen' not in task_name:\n if preds is None:\n preds = logits.detach().cpu().numpy()\n soft_preds = soft_logits.detach().cpu().numpy()\n out_label_ids = inputs['labels'].detach().cpu().numpy()\n else:\n preds = np.append(preds, logits.detach().cpu().numpy(), axis=0)\n soft_preds = np.append(soft_preds, soft_logits.detach().cpu().numpy(), axis=0)\n out_label_ids = np.append(out_label_ids, inputs['labels'].detach().cpu().numpy(), axis=0)\n else:\n # Obtain per-token loss for perplexity computation\n batch_loss = get_token_loss(args, logits, batch[3], batch[4], model_type=model_type)\n macro_loss += batch_loss.mean().item()\n micro_loss += batch_loss.sum().item()\n num_batches += 1\n num_tokens += batch_loss.view(-1).shape[0]\n\n # Compute and update evaluation metric values\n if 'gen' not in task_name:\n # Isolate model predictions\n preds = np.argmax(preds, axis=1)\n soft_preds = soft_preds.tolist()\n curr_result = compute_cls_metrics(preds, out_label_ids)\n else:\n macro_perplexity = torch.exp(torch.tensor(macro_loss / num_batches)).item()\n micro_perplexity = torch.exp(torch.tensor(micro_loss / num_tokens)).item()\n curr_result = {'macro_perplexity': macro_perplexity,\n 'micro_perplexity': micro_perplexity}\n\n if len(results.keys()) == 0:\n for k, v in curr_result.items():\n results[k] = [v]\n else:\n for k, v in curr_result.items():\n results[k].append(v)\n\n # Log metrics\n output_eval_file = os.path.join(args.output_dir, 'results_{}_{}.txt'.format(task_name, split))\n with open(output_eval_file, 'a') as writer:\n logger.info('***** Eval results *****')\n writer.write('STEP: {:s}\\n'.format(str(step)))\n for key in sorted(curr_result.keys()):\n logger.info(' %s = %s', key, str(curr_result[key]))\n writer.write('%s = %s\\n' % (key, str(curr_result[key])))\n\n # Log predictions\n if 'gen' not in task_name:\n output_pred_file = \\\n os.path.join(args.output_dir, 'predictions_{}_{}_{}.lst'.format(task_name, split, step))\n with open(output_pred_file, 'w') as writer:\n logger.info('***** Write predictions *****')\n for pred in preds:\n writer.write('{}\\n'.format(processor.get_labels()[pred]))\n\n # Maintain a single metrics file\n if os.path.exists(args.output_dir):\n with open(os.path.join(args.output_dir, 'metrics_{}_{}.json'.format(task_name, split)), 'w') as f:\n f.write(json.dumps(results))\n f.close()\n\n # Report mean dev loss\n mean_eval_loss = eval_loss / len(eval_dataloader)\n logging.info('\\n' + '*' * 10)\n logging.info('Mean development loss: {:.4f}'.format(mean_eval_loss))\n logging.info('*' * 10 + '\\n')\n\n return results, mean_eval_loss, preds, soft_preds", "def evaluate_model():\n\n # Get the processed data (in proper format to evaluate the NER model)\n data = get_json_from_file_path(PROCESSED_DATA_PATH)\n # Split the dataset for training and test as we did for training\n train_data, test_data = train_test_split(data, train_size=0.7, \n random_state=4)\n\n # Load the model trained\n try:\n ner_model = spacy.load(OUTPUT_MODEL_PATH)\n except Exception as err:\n msg = f'Could not load the model. Error: {err}'\n raise Exception(msg)\n\n # Compute evaluation scores\n print('Computing metrics...')\n scores = evaluate(ner_model, test_data)\n # General metrics of the model\n f_score = scores.get('ents_f')\n precision = scores.get('ents_p')\n recall = scores.get('ents_r')\n print('\\nScoring:')\n print(f'F-score: {f_score}')\n print(f'Precision: {precision}')\n print(f'Recall: {recall}')\n\n # Get the specific scores for each entity \n scores_per_entity = scores.get('ents_per_type')\n # Get the F-score of the entities\n f_scores_of_entities = []\n for entity_scores in scores_per_entity.values():\n f_scores_of_entities.append(entity_scores['f'])\n # Compute the macro averaged F-score\n macro_avg_f_score = sum(f_scores_of_entities)/len(f_scores_of_entities)\n print(f'Macro averaged F-score: {macro_avg_f_score}')\n \n print('\\nScores per entity;')\n print('{:<15} {:<10} {:<10} {:<10}'.format('Entity','F-score','Precision','Recall'))\n for key, value in scores_per_entity.items():\n entity = key\n f, p, r = value['f'], value['p'], value['r']\n print('{:<15} {:<10.2f} {:<10.2f} {:<10.2f}'.format(entity, f, p, r))", "def eval_model(net, val_iter):\n correct = 0\n total = 0\n cm = conf.ConfusionMatrix([0, 1])\n net.eval()\n with torch.no_grad():\n for batch in val_iter:\n total += batch.correct.size(0)\n prediction = predict_batch(net, batch)\n cm.add_entry(batch.correct.tolist(), prediction.tolist())\n correct += (prediction == batch.correct).sum().item()\n\n return correct/total, cm.get_f1()", "def evaluate(self, x_test, y_test, verbose=0):\n\n if self.model is None:\n raise StandardError('Model is not built. Run build method or load model before fitting')\n\n test_results = self.model.evaluate(x_test,\n y_test,\n batch_size=self.batch_size,\n verbose=verbose)\n self.val_history = test_results\n return test_results", "def evaluate(self, x_test, y_test, verbose=0):\n\n if self.model is None:\n raise StandardError('Model is not built. Run build method or load model before fitting')\n\n test_results = self.model.evaluate(x_test,\n y_test,\n batch_size=self.batch_size,\n verbose=verbose)\n self.val_history = test_results\n return test_results", "def evaluate(self, x_test, y_test, verbose=0):\n\n if self.model is None:\n raise StandardError('Model is not built. Run build method or load model before fitting')\n\n test_results = self.model.evaluate(x_test,\n y_test,\n batch_size=self.batch_size,\n verbose=verbose)\n self.val_history = test_results\n return test_results", "def evaluate_model(self, predictions, expected, bypass_data_to_eval):\n\n result = []\n for i, unique_id in enumerate(np.squeeze(expected[\"unique_ids\"])):\n start_logits = predictions['tf_electra_for_question_answering'][i]\n start_top_index = predictions['tf_electra_for_question_answering_1'\n ][i]\n end_logits = predictions['tf_electra_for_question_answering_2'][i]\n end_top_index = predictions['tf_electra_for_question_answering_3'][i\n ]\n cls_logits = predictions['tf_electra_for_question_answering_4'][i]\n\n result.append(\n SquadResult(\n unique_id,\n start_logits.tolist(),\n end_logits.tolist(),\n start_top_index=start_top_index.tolist(),\n end_top_index=end_top_index.tolist(),\n cls_logits=cls_logits.tolist(),\n )\n )\n\n dev_features = bypass_data_to_eval[\"dev_features\"]\n dev_examples = bypass_data_to_eval[\"dev_examples\"]\n\n answers, nbest_answers = get_answers(\n dev_examples, dev_features, result, self._args\n )\n\n output_prediction_file = os.path.join(\n self._args.output_dir, \"predictions.json\"\n )\n output_nbest_file = os.path.join(\n self._args.output_dir, \"nbest_predictions.json\"\n )\n\n with open(output_prediction_file, \"w\") as f:\n f.write(json.dumps(answers, indent=4) + \"\\n\")\n with open(output_nbest_file, \"w\") as f:\n f.write(json.dumps(nbest_answers, indent=4) + \"\\n\")\n\n if self._args.version_2_with_negative:\n dev_file = \"dev-v2.0.json\"\n eval_file = \"evaluate-v2.0.py\"\n else:\n dev_file = \"dev-v1.1.json\"\n eval_file = \"evaluate-v1.1.py\"\n\n command_str = (\n f\"{sys.executable} {os.path.join(self._args.data_dir, eval_file)} \"\n f\"{os.path.join(self._args.data_dir, dev_file)} \"\n f\"{output_prediction_file}\"\n )\n\n logging.debug(f\"\\nExecuting: `{command_str}`\\n\")\n\n eval_out = subprocess.check_output(shlex.split(command_str))\n\n # scores: {'exact_match': 87.06717123935667, 'f1': 92.78048326711645}\n scores = json.loads(eval_out.decode(\"UTF-8\").strip())\n\n logging.debug(\"scores:\", scores)\n\n metric_units = \"f1\"\n\n return scores[metric_units], metric_units", "def model_evaluate(self, test):\n features = {name: np.array(value) for name, value in test.items()}\n labels = {name: features.pop(name) for name in self.label_names}\n metrics = self.model.evaluate(x=features, y=labels, batch_size=5)\n return metrics", "def evaluate():\n with tf.Graph().as_default() as g:\n \n # Get hazy and clean images for SYNTHIA.\n val = FLAGS.val\n hazy_images, clean_images_ground_truth, _ = model_spec.input(val)\n\n # Build a Graph that computes the dehazed predictions from the\n # inference model.\n clean_images_predicted = model_spec.inference(hazy_images)\n\n # Calculate loss (only the data term).\n loss = model_spec.data_loss(clean_images_predicted, clean_images_ground_truth)\n\n # Restore the moving average version of the learned variables for eval.\n variable_averages = tf.train.ExponentialMovingAverage(\n model_spec.MOVING_AVERAGE_DECAY)\n variables_to_restore = variable_averages.variables_to_restore()\n saver = tf.train.Saver(variables_to_restore)\n\n # Build the summary operation based on the TF collection of Summaries.\n summary_op = tf.merge_all_summaries()\n\n summary_writer = tf.train.SummaryWriter(FLAGS.eval_dir, g)\n\n while True:\n eval_once(saver, summary_writer, loss, summary_op)\n if FLAGS.run_once:\n print('Finished one-off evaluation.')\n break\n time.sleep(FLAGS.eval_interval_secs)", "def evaluate(self, dataset):\n return self.model.evaluate(dataset.X_val, dataset.y_val)", "def train_eval(model, train_set):\n num_train_batch = len(train_set)\n train_loss = np.zeros((num_train_batch, 1), dtype=float)\n train_acc = np.zeros((num_train_batch, 1), dtype=float)\n shuffle(train_set)\n for ibatch, batch in enumerate(train_set):\n result = model.train_on_batch({'input':batch[0]}, {'fp1':batch[1], 'fp2':batch[1], 'fp3':batch[1], 'ave':batch[1]})\n train_loss[ibatch] = result[0]\n train_acc[ibatch] = result[-1]\n return np.mean(train_loss), np.mean(train_acc)", "def eval_all(self, sess, dev=False):\n lers = {}\n decode_fns = self._test_model.get_decode_fns()\n metrics = self.hparams.metrics.split(',')\n\n input_data = self._input_data_dev if dev else self._input_data_test\n if input_data is None:\n return None\n input_data.reset_iterator(sess)\n while True:\n try:\n ground_truth_labels, predicted_labels, ground_truth_len, predicted_len = self.eval(sess, dev)\n for acc_id, (gt_labels, p_labels, gt_len, p_len) in \\\n enumerate(zip(ground_truth_labels, predicted_labels, ground_truth_len, predicted_len)):\n if acc_id not in lers: lers[acc_id] = []\n for i in range(len(gt_labels)):\n ler, _, _ = ops_utils.evaluate(\n gt_labels[i],#[:gt_len[i]],\n p_labels[i],#[:p_len[i]],\n decode_fns[acc_id],\n metrics[acc_id])\n if ler is not None:\n lers[acc_id].append(ler)\n except tf.errors.OutOfRangeError:\n break\n\n return {acc_id: sum(lers[acc_id]) / len(lers[acc_id]) for acc_id in lers}", "def evaluate(self):\n # Method variables definition\n X_train, X_test, y_train, y_test = dm.reshape_y_set_split_data(self.datasetManager)\n featureScaleDependentVariables = self.datasetManager.params.featureScaleDependentVariables\n\n # Feature Scaling\n X_scaler, X_train = dm.do_feature_scaling(X_train)\n if featureScaleDependentVariables:\n y_scaler, y_train = dm.do_feature_scaling(y_train)\n else:\n y_scaler = None\n y_train = self.datasetManager.y_train\n \n self.X_scaler = X_scaler\n self.y_scaler = y_scaler\n\n # Training the SVR model on the training set\n regressor = SVR(kernel = 'rbf')\n regressor.fit(X_train, y_train.ravel())\n self.regressor = regressor\n\n # Predicting the Test set results\n self.y_pred = y_scaler.inverse_transform(regressor.predict(X_scaler.transform(X_test))) if featureScaleDependentVariables else regressor.predict(X_test)\n \n # Returning the process result : the regression type and the predicted dependent variables set\n return [\"Support Vector Regression\", self.get_r2_score(y_test, self.y_pred)]", "def do_eval(sess,model,valid,batch_size):\n valid_X,valid_y,valid_p=valid\n number_examples=valid_X.shape[0]\n if number_examples>10000:\n number_examples=validation_size\n print(\"do_eval.valid.number_examples:\",number_examples)\n if number_examples>validation_size: valid_X,valid_y,valid_p=valid_X[0:validation_size],valid_y[0:validation_size],valid_p[0:validation_size]\n eval_loss,eval_counter,eval_acc=0.0,0,0.0\n for start,end in zip(range(0,number_examples,batch_size),range(batch_size,number_examples,batch_size)):\n feed_dict = {model.x_mask_lm: valid_X[start:end],model.y_mask_lm: valid_y[start:end],model.p_mask_lm:valid_p[start:end],\n model.dropout_keep_prob: 1.0} # FLAGS.dropout_keep_prob\n curr_eval_loss, logits_lm, accuracy_lm= sess.run([model.loss_val_lm,model.logits_lm,model.accuracy_lm],feed_dict) # logits:[batch_size,label_size]\n eval_loss=eval_loss+curr_eval_loss\n eval_acc=eval_acc+accuracy_lm\n eval_counter=eval_counter+1\n return eval_loss/float(eval_counter+small_value), eval_acc/float(eval_counter+small_value)", "def evaluate(model, val_data, epoch):\n print('validating')\n\n # 设置为评估模式 \n model.eval() \n\n val_loss = []\n with torch.no_grad():\n DEVICE = config.DEVICE\n\n val_dataloader = DataLoader(dataset=val_data,\n batch_size=config.batch_size,\n shuffle=True,\n pin_memory=True, drop_last=True,\n collate_fn=collate_fn)\n\n for batch, data in enumerate(tqdm(val_dataloader)):\n\n x, y, x_len, y_len, oov, len_oovs = data\n\n if config.is_cuda:\n x = x.to(DEVICE)\n y = y.to(DEVICE)\n x_len = x_len.to(DEVICE)\n len_oovs = len_oovs.to(DEVICE)\n\n loss = model(x, x_len, y, len_oovs, batch=batch, \n num_batches=len(val_dataloader),\n teacher_forcing=True)\n\n val_loss.append(loss.item())\n\n return np.mean(val_loss)", "def evaluate_model():\n\n print '\\n\\tevaluate result'\n os.system('./conlleval.pl -d \\'\\t\\' < ' + encoded_test + ' >> ' + result_file)\n print '\\t--done\\n'", "def evaluate(self, dataset):\n\t\tpass", "def test_eval(model, test_set):\n num_test_batch = len(test_set)\n test_loss = np.zeros((num_test_batch, 1), dtype=float)\n test_acc = np.zeros((num_test_batch, 1), dtype=float)\n for ibatch, batch in enumerate(test_set):\n result = model.test_on_batch({'input':batch[0]}, {'fp1':batch[1], 'fp2':batch[1], 'fp3':batch[1], 'ave':batch[1]})\n test_loss[ibatch] = result[0]\n test_acc[ibatch] = result[-1]\n return np.mean(test_loss), np.mean(test_acc)", "def evaluate(self):\n predictions = self.model.predict(self.test[0])\n accuracy = accuracy_score(self.test[1], predictions)\n print(\"Accuracy:\", str(accuracy * 100) + \"%\")\n self.plot_results(predictions)", "def evaluate(hparams, summary_dir, num_gpus, model_type, eval_set, eval_size,\n eval_shard, data_dir, num_targets, dataset, validate, seed,\n shuffled, shift, pad, batch_size=100, checkpoint=None):\n output_dir = summary_dir\n load_dir = summary_dir + '/train/'\n summary_dir += '/eval/' + FLAGS.dataset + '/' + eval_set\n with tf.Graph().as_default():\n features = get_features(eval_set, batch_size, num_gpus, data_dir,\n num_targets, dataset, validate, evaluate=True,\n seed=seed, shuffled=shuffled, shift=shift,\n pad=pad, eval_shard=eval_shard)\n model = models[model_type](hparams)\n result, _ = model.multi_gpu(features, num_gpus)\n test_writer = tf.summary.FileWriter(summary_dir)\n seen_step = -1\n paused = 0\n while paused < 360:\n print('start evaluation, model defined')\n if checkpoint:\n step = extract_step(checkpoint)\n last_checkpoint = checkpoint\n else:\n step, last_checkpoint = find_checkpoint(load_dir, seen_step)\n if step == -1:\n time.sleep(60)\n paused += 1\n else:\n paused = 0\n seen_step = step\n run_experiment(load_eval, last_checkpoint, test_writer,\n eval_experiment, model, result,\n eval_size // batch_size, features=features,\n eval_set=eval_set, output_dir=output_dir,\n unsupervised=hparams.unsupervised,\n num_gpus=num_gpus)\n if checkpoint:\n break\n\n test_writer.close()", "def train_and_eval():\n # train_file_name = 'adult.data'\n # test_file_name = 'adult.test'\n train_file_name = 'poker-hand-testing.data'\n test_file_name = 'poker-hand-training-true.data'\n #test_file_name = maybe_download()\n df_train = pd.read_csv(\n tf.gfile.Open(\"/opt/tensor/race_result_clean.csv\"),\n names=COLUMNS,\n skipinitialspace=True,\n skiprows=1)\n df_test = pd.read_csv(\n tf.gfile.Open(\"/opt/tensor/race_result_clean.csv\"),\n names=COLUMNS,\n skipinitialspace=True,\n skiprows=1)\n\n #df_train[LABEL_COLUMN] = (df_train[\"CLASS_Poker_Hand\"].apply(lambda x: x>5)).astype(int)\n #df_test[LABEL_COLUMN] = (df_test[\"CLASS_Poker_Hand\"].apply(lambda x: x>5)).astype(int)\n\n model_dir = tempfile.mkdtemp() if not FLAGS.model_dir else FLAGS.model_dir\n print(\"model directory = %s\" % model_dir)\n m = build_estimator(model_dir)\n print(m)\n m.fit(input_fn=lambda: input_fn(df_train), steps=FLAGS.train_steps)\n results = m.evaluate(input_fn=lambda: input_fn(df_test), steps=1)\n for key in sorted(results):\n print(\"%s: %s\" % (key, results[key]))", "def evaluate(self):\n\n\t\tself.model_score = self.model.evaluate(self.x_test, self.y_test, batch_size=2048)\n\t\tprint(\"%s score = %f\\n\" %(self.modelName, self.model_score[1]))\n\t\treturn self.model_score", "def evaluate(model, eval_data, num_labels): \n # Turn on the evaluation state to ignore dropouts\n model.eval()\n results = [predict(model, x) for x, y in eval_data]\n f1_score, accuracy = get_metrics(np.array([y for x, y in eval_data]), results, num_labels)\n return f1_score, accuracy", "def train(self, session, train_examples, dev_examples, train_dir):\n\n # some free code to print out number of parameters in your model\n # it's always good to check!\n # you will also want to save your model parameters in train_dir\n # so that you can use your trained model to make predictions, or\n # even continue training\n\n tic = time.time()\n params = tf.trainable_variables()\n num_params = sum(map(lambda t: np.prod(tf.shape(t.value()).eval()), params))\n toc = time.time()\n logging.info(\"Number of params: %d (retreival took %f secs)\" % (num_params, toc - tic))\n\n if self.summary_flag:\n self.train_writer = tf.summary.FileWriter(self.summaries_dir + '/train', session.graph)\n\n logging.info(\"Train Loss File: {}\".format(self.train_loss_log))\n logging.info(\"Dev Loss File: {}\".format(self.dev_loss_log))\n best_score = 100000\n train_log = open(self.train_loss_log, \"w\")\n dev_log = open(self.dev_loss_log, \"w\")\n for epoch in range(self.n_epoch):\n print(\"Epoch {:} out of {:}\".format(epoch + 1, self.n_epoch))\n dev_score = self.run_epoch(session, train_examples, dev_examples, epoch, train_log)\n dev_log.write(\"{},{}\\n\".format(epoch + 1, dev_score))\n logging.info(\"Average Dev Cost: {}\".format(dev_score))\n logging.info(\"train F1 & EM\")\n f1, em = self.evaluate_answer(session, train_examples, self.rev_vocab, log = True)\n logging.info(\"Dev F1 & EM\")\n f1, em = self.evaluate_answer(session, dev_examples, self.rev_vocab, log = True)\n if dev_score < best_score:\n best_score = dev_score\n print(\"New best dev score! Saving model in {}\".format(train_dir + \"/\" + self.model_name))\n self.saver.save(session, train_dir + \"/\" + self.model_name)\n\n return best_score", "def model_evaluation(X_train, y_train, X_test, y_test, k=16):\n print(\">>>>>>> x.shape\", X_train.shape)\n p_matrix, X_reduce = dimension_reduction(X_train, k=k)\n print(\"model training ...\")\n bdt = AdaBoostClassifier(DecisionTreeClassifier(max_depth=2), n_estimators=30, learning_rate=1)\n bdt.fit(X_reduce, y_train)\n print(\"fit succeed\")\n\n X_test = np.dot(X_test, p_matrix)\n y_pred = bdt.predict(X_test)\n print(classification_report(y_test, y_pred, target_names=['benign', 'gafgyt', 'miari'], digits=4))", "def model_run(self, model, estimators):\n model.fit(self.X_train, self.y_train)\n y_score = model.predict(self.X_test)\n accu_train = np.sum(model.predict(self.X_train) == self.y_train) / self.y_train.size\n accu_test = np.sum(y_score == self.y_test) / self.y_test.size\n\n self.results.write(\"Model Results\\n\")\n self.results.write(\"Number of Estimators: \" + str(estimators) + \"\\n\")\n self.results.write(\"Accuracy on Train: \" + str(accu_train) + \"\\n\")\n self.results.write(\"Accuracy on Test: \" + str(accu_test) + \"\\n\")\n return model", "def evaluate_model(X_train, X_test, y_train, y_test, batch_size, nb_epoch):\n model = Sequential()\n model.add(Dense(512, input_shape=(784,)))\n model.add(Activation(\"relu\"))\n model.add(Dropout(0.2))\n model.add(Dense(512))\n model.add(Activation(\"relu\"))\n model.add(Dropout(0.2))\n model.add(Dense(10))\n model.add(Activation(\"softmax\"))\n model.compile(loss=\"categorical_crossentropy\",\n optimizer=RMSprop(),\n metrics=[\"accuracy\"])\n model.fit(X_train, y_train, batch_size=batch_size, nb_epoch=nb_epoch,\n verbose=1, validation_data=(X_test, y_test))\n results = model.evaluate(X_test, y_test, verbose=0)\n return results, model", "def set_eval(self):\n for m in self.models.values():\n m.eval()", "def evaluate(data_loader, model, device):\n\n\tmodel.eval()\n\ttotal_num_examples = 0\n\ttotal_error = 0\n\tfor idx, batch in enumerate(data_loader):\n\t\tquestion_feature_vec = batch['feature_vec'].to(device)\n\t\tquestion_len = batch['len'].to(device)\n\t\tlabels = batch['labels'].to(device)\n\n\t\t####Your code here ---\n\n\t\t# get the output from the model\n\t\tlogits = model(question_feature_vec, question_len)\n\n\t\t# get error, num_examples using accuracy_fn defined previously\n\t\terror, num_examples = accuracy_fn(logits, labels)\n\n\t\t# update total_error and total_num_examples\n\t\ttotal_error += error\n\t\ttotal_num_examples += num_examples\n\n\taccuracy = 1 - total_error / total_num_examples\n\treturn accuracy", "def fit(self,\n X_train,\n y_train, \n X_test, \n y_test,\n max_evals,\n **kwargs,\n ):\n \n self.max_evals = max_evals\n \n for key in self.models_dict.keys():\n \n path_model_dir = self.path_model_dirs[key]\n \n if self.verbose >=1: \n print('\\n----',key,'----')\n print('path_model_dir:',path_model_dir)\n \n model_dict = self.models_dict[key]\n model_type = str(type(model_dict['model']))\n \n if 'sklearn' in model_type or 'xgboost' in model_type:\n path_file = _os.path.join(path_model_dir,'model_dict.dill')\n elif 'Net' in key:\n path_file = _os.path.join(path_model_dir,'best_model.h5')\n \n if self.retrain or _os.path.isfile(path_file)==False:\n model_dict = self._single_model_BayesianSearchCV(key, \n model_dict, \n X_train, y_train, \n X_test, y_test,\n path_model_dir,\n **kwargs)\n self.models_dict[key] = model_dict\n \n\n else: #reload previously trained model\n if 'sklearn' in str(type(self.models_dict[key]['model'])):\n self.models_dict[key] = self.load('model_dict', 'dill', path_model_dir)\n elif 'Net' in key:\n #check kwargs for epochs\n epochs = 100\n for item in self.kwargs.items():\n if 'epochs' in item[0]: epochs = item[1]\n self.models_dict[key]['best_model'] = _NeuralNet.utils.load_model(\n _os.path.join(path_model_dir,'best_model.h5'))\n self.models_dict[key]['best_params'] = self.load('best_params', 'dill', path_model_dir)\n \n if 'Net' in key:\n y_pred = self.models_dict[key]['best_model'].predict(_np.array(X_test))\n else:\n y_pred = self.models_dict[key]['best_model'].predict(X_test)\n \n\n if 'Net' not in key:\n self.models_dict[key]['best_pred_score'] = self.models_dict[key]['best_model'].score(X_test, y_test)\n y_pred_proba = self.models_dict[key]['best_model'].predict_proba(X_test)[:,1]\n else:\n \n if 'crossentropy' in self.models_dict[key]['best_model'].loss:\n y_pred_proba = y_pred\n y_pred = (y_pred < 0.5).astype(int)\n \n self.models_dict[key]['best_pred_score'] = self.models_dict[key]['best_model'].evaluate(_np.array(X_test), \n _np.array(y_test),\n verbose =0)\n \n if self.verbose >=1:\n try:\n print('\\tbest_cv_score:',self.models_dict[key]['best_cv_score'])\n except Exception as e:\n print('Exception occured for:'+str(e))\n try:\n print('\\tbest_pred_score:',self.models_dict[key]['best_pred_score'])\n except Exception as e:\n print('Exception occured for:'+str(e))\n\n for metric_key in self.metrics.keys():\n if self.metrics[metric_key] !=None:\n try:\n if 'roc' in metric_key:\n self.models_dict[key][metric_key] = self.metrics[metric_key](y_test, y_pred_proba)\n else:\n self.models_dict[key][metric_key] = self.metrics[metric_key](y_test, y_pred)\n print('\\t',metric_key,':',self.models_dict[key][metric_key])\n except Exception as e:\n print('Exception occured for',metric_key,':',str(e))\n\n if 'sklearn' in str(type(self.models_dict[key]['model'])):\n self.save(self.models_dict[key], 'model_dict', 'dill', path_model_dir)\n elif 'Net' in key:\n model_dict_subset = self.models_dict[key].copy()\n for key in self.models_dict[key].keys():\n if key not in ['y_test','y_pred','best_pred_score'] +list(self.metrics.keys()):\n model_dict_subset.pop(key)", "def validate(self):\n self.set_model_mode('eval')\n self.evaluator.reset()\n losses = MetricMeter()\n\n print('Do evaluation on {} set'.format('valid set'))\n data_loader = self.val_loader\n assert data_loader is not None\n for batch_idx, batch in enumerate(data_loader):\n input, label = self.parse_batch_test(batch)\n loss = self.forward_backward(batch, backprob=False)\n losses.update(loss)\n # total_loss += loss['loss']\n output = self.model_inference(input)\n self.evaluator.process(output, label)\n\n results = self.evaluator.evaluate()\n total_loss = losses.meters['loss_x'].avg\n\n for k, v in results.items():\n tag = '{}/{}'.format('validation', k)\n self.write_scalar(tag, v, self.epoch)\n # if full_results:\n return [total_loss,losses.dict_results(),results]\n # return total_loss", "def evaluate():\n sess = tf.Session()\n tf.logging.info(\"Building graph...\")\n\n embeddings = load_embeddings()\n tf_data = load_batched_dataset(False, embeddings)\n it = tf_data.make_initializable_iterator()\n features, labels = it.get_next()\n\n logits = predict(False, embeddings, features[\"premise\"],\n features[\"hypothesis\"])\n accuracy, update_ops = tf.metrics.accuracy(\n tf.argmax(logits, 1, output_type=tf.int32), tf.to_int32(labels))\n\n tf.logging.info(\"Running initializers...\")\n checkpoint_file = FLAGS.checkpoint_file\n if checkpoint_file is not None:\n saver = tf.train.Saver(tf.trainable_variables())\n tf.logging.info(\"Restoring from checkpoint: \" + checkpoint_file)\n saver.restore(sess, checkpoint_file)\n else:\n tf.logging.warning(\"No checkpoint given, evaling model with random weights\")\n sess.run(tf.global_variables_initializer())\n sess.run(tf.local_variables_initializer())\n sess.run(tf.tables_initializer())\n sess.run(it.initializer)\n\n tf.logging.info(\"Starting loop....\")\n while True:\n try:\n sess.run(update_ops)\n except tf.errors.OutOfRangeError:\n break\n tf.logging.info(\"Done\")\n\n accuracy = sess.run(accuracy)\n print(\"Accuracy: %f\" % accuracy)", "def evaluate(self, train_set, test_set, shuffle_batch=True,\n epochs=25, lr_decay=0.95, sqr_norm_lim=9,labels=None,model=None): \n cost = self.negative_log_likelihood(self.y) \n dropout_cost = self.dropout_negative_log_likelihood(self.y)\n # adadelta upgrades: dict of variable:delta\n grad_updates = self.sgd_updates_adadelta(dropout_cost, lr_decay, 1e-6, sqr_norm_lim)\n # shuffle dataset and assign to mini batches.\n # if dataset size is not a multiple of batch size, replicate \n # extra data (at random)\n np.random.seed(3435)\n batch_size = self.batch_size\n if train_set.shape[0] % batch_size > 0:\n extra_data_num = batch_size - train_set.shape[0] % batch_size\n #extra_data = train_set[np.random.choice(train_set.shape[0], extra_data_num)]\n perm_set = np.random.permutation(train_set) \n extra_data = perm_set[:extra_data_num]\n new_data = np.append(train_set, extra_data, axis=0)\n else:\n new_data = train_set\n \n shuffled_data = np.random.permutation(new_data) # Attardi\n n_batches = shuffled_data.shape[0]/batch_size\n # divide train set into 90% train, 10% validation sets\n n_train_batches = int(np.round(n_batches*0.8))\n n_val_batches = n_batches - n_train_batches\n train_set = shuffled_data[:n_train_batches*batch_size,:]\n val_set = shuffled_data[n_train_batches*batch_size:,:] \n # push data to gpu \n # the dataset has the format [word_indices,padding,user,label]\n train_set_x, train_set_y = shared_dataset(train_set[:,:-2], train_set[:,-1]) \n train_set_u = theano.shared(np.asarray(train_set[:,-2],dtype='int32')) \n # val_set_x = val_set[:,:-2]\n # val_set_u = val_set[:,-2]\n # val_set_y = val_set[:,-1]\n val_set_x, val_set_y = shared_dataset(val_set[:,:-2], val_set[:,-1])\n val_set_u = theano.shared(np.asarray(val_set[:,-2],dtype='int32')) \n test_set_x = test_set[:,:-2]\n test_set_u = test_set[:,-2]\n test_set_y = test_set[:,-1] \n batch_start = self.index * batch_size\n batch_end = batch_start + batch_size\n\n # compile Theano functions to get train/val/test errors\n \n \n test_y_pred = self.predict(test_set_x,test_set_u)\n test_error = T.mean(T.neq(test_y_pred, self.y))\n # errors on train set\n if self.Users is not None:\n train_model = theano.function([self.index], cost, updates=grad_updates,\n givens={\n self.x: train_set_x[batch_start:batch_end],\n self.y: train_set_y[batch_start:batch_end],\n self.u: train_set_u[batch_start:batch_end]\n },\n allow_input_downcast = True)\n\n train_error = theano.function([self.index], self.errors(self.y),\n givens={\n self.x: train_set_x[batch_start:batch_end],\n self.y: train_set_y[batch_start:batch_end],\n self.u: train_set_u[batch_start:batch_end]},\n allow_input_downcast=True)\n val_model = theano.function([self.index], self.errors(self.y),\n givens={\n self.x: val_set_x[batch_start:batch_end],\n self.y: val_set_y[batch_start:batch_end], \n self.u: val_set_u[batch_start:batch_end]},\n allow_input_downcast=True)\n test_model = theano.function([self.x, self.u, self.y], test_error, allow_input_downcast=True)\n else:\n train_model = theano.function([self.index], cost, updates=grad_updates,\n givens={\n self.x: train_set_x[batch_start:batch_end],\n self.y: train_set_y[batch_start:batch_end]},\n allow_input_downcast = True)\n\n train_error = theano.function([self.index], self.errors(self.y),\n givens={\n self.x: train_set_x[batch_start:batch_end],\n self.y: train_set_y[batch_start:batch_end]},\n allow_input_downcast=True)\n\n val_model = theano.function([self.index], self.errors(self.y),\n givens={\n self.x: val_set_x[batch_start:batch_end],\n self.y: val_set_y[batch_start:batch_end]},\n allow_input_downcast=True)\n test_model = theano.function([self.x, self.y], test_error, allow_input_downcast=True)\n\n # start training over mini-batches\n print 'training...' \n best_val_perf = 0\n test_perf = 0 \n patience = 5\n drops = 0\n prev_val_perf = 0 \n for epoch in xrange(epochs):\n start_time = time.time()\n # FIXME: should permute whole set rather than minibatch indexes\n if shuffle_batch:\n for minibatch_index in np.random.permutation(range(n_train_batches)):\n cost_epoch = train_model(minibatch_index)\n self.set_zero(self.zero_vec) # CHECKME: Why?\n else:\n for minibatch_index in xrange(n_train_batches):\n cost_epoch = train_model(minibatch_index) \n self.set_zero(self.zero_vec)\n train_losses = [train_error(i) for i in xrange(n_train_batches)]\n train_perf = 1 - np.mean(train_losses)\n val_losses = [val_model(i) for i in xrange(n_val_batches)]\n val_perf = 1 - np.mean(val_losses) \n info = 'epoch: %i\\%i (%.2f secs) train acc: %.2f %% | val acc: %.2f %%' % (\n epoch,epochs, time.time()-start_time, train_perf * 100., val_perf*100.) \n # from ipdb import set_trace; set_trace()\n if val_perf > prev_val_perf: \n drops=0\n if val_perf >= best_val_perf:\n best_val_perf = val_perf\n info+= \" **\"\n if model:\n # print \"save model\"\n self.save(model)\n if self.Users is not None:\n test_loss = test_model(test_set_x, test_set_u, test_set_y)\n else:\n test_loss = test_model(test_set_x, test_set_y)\n test_perf = 1 - test_loss \n else: \n drops+=1\n if drops >= patience:\n print \"Ran out of patience...\"\n break\n prev_val_perf = val_perf\n print info\n # set_trace() \n return test_perf", "def evaluate_model(model, X_test, y_test):\n # run prediction with test data\n y_pred = model.predict(X_test)\n\n # print precision, recall and f1-score\n i = 0\n for col in y_test:\n print('Evaluation for \"{}\": \\n {} \\n\\n'.format(col, classification_report(y_test[col], y_pred[:,i])))\n i += 1", "def evaluate_model(model, X_train, y_train, X_test, y_test):\n model = model\n model.fit(X_train, y_train)\n\n y_pred = model.predict(X_test)\n\n report = classificationreport(y_test, y_pred, target_names= [\"0\", \"1\"], output_dict=True)\n\n return report", "def run(self):\n\n # In case there's no exogenous\n if len(self.demand.columns) == 1:\n forecast, model, error = self.run_model()\n best = pd.DataFrame([[forecast, model, error, None]],\n columns=['forecast', 'model', 'error_predictions','additional_features'])\n\n else:\n # Define external variables to be iteratively included\n # Possibilities: 'sales', 'marketing', 'macroeconomics', 'stock',...\n exogenous = self.demand.drop(columns=\"y\")\n features_list = list(exogenous.columns)\n\n # Build all possible combinations of external features. Order is not relevant\n features_combinations = []\n for i in range(1, len(features_list) + 1):\n features_combinations += list(itertools.combinations(features_list, i))\n\n # Create a model per set of external features\n results = list()\n for f in features_combinations:\n forecast, model, error = self.run_model(additional_features=f)\n results.append((forecast, model, error, '_'.join(f)))\n\n results_df = pd.DataFrame(results)\n results_df.columns = ['forecast', 'model', 'error_predictions','additional_features']\n best = self.check_best(results_df)\n\n return best.iloc[0], best['error_predictions'].values[0] < self.error_target", "def _evaluate_during_fit(self, test_loader, epoch):", "def evaluate_from_featurizations(self, sess, featurizations, y):\n feed_dict = {self.featurizations: featurizations, self.y: y}\n loss, acc = sess.run([self.loss, self.accuracy], feed_dict = feed_dict)\n self.logger.info(\"Model was evaluated from featurizations\")\n return loss, acc", "def evaluateModel(model, val_data, abs_idx2word, device, batch_size):\n #modify abs_idx2word by removing pad tokens so as to correctly calculate Reouge scores\n abs_idx2word[0] = ''\n\n #data setup\n val_data.move_to(torch.device('cpu')) #keep data on cpu\n val_dataloader = data.DataLoader(val_data, batch_size=batch_size, shuffle=True, num_workers=0)\n #model instantiation\n model = model.to(device=device)\n #evaluation\n logger.debug(f'\\tModel eval on validation data...')\n r1, r2, rl = evaluate.evaluate_model(model, val_dataloader, abs_idx2word, device, print_example=True)\n logger.debug(f'\\nRouge-1 is {r1:.4f}, Rouge-2 is {r2:.4f}, and Rouge-l is {rl:.4f}')", "def test_evaluate():\n X_train, X_test, y_train, y_test = src.load()\n clf, score = src.train(X_train, y_train)\n test_score = src.evaluate(clf, X_test, y_test)\n assert isinstance(test_score, float)", "def evaluate(self,**kwargs):\n # setup model\n self.optimizer = SGD(lr = 0,momentum=0,decay = 0)\n self.createModel()\n self.setGenerators()\n self.printParameters()\n output = {}\n\n if kwargs['validationOnly'] != None:\n if kwargs['validationOnly'] == True:\n valOnly = True\n else:\n valOnly = False\n else:\n valOnly = False\n\n if valOnly == False:\n trainOutput = self.model.evaluate_generator(\n generator = self.trainGen,\n steps=self.steps_per_epoch,\n use_multiprocessing=True,\n verbose=1\n )\n output['loss'] = trainOutput[0]\n for i in range(len(self.metricsAsString)):\n output[self.metricsAsString[i]] = trainOutput[i+1]\n\n print(\"loss : \" + str(output['loss']))\n for i in range(len(self.metricsAsString)):\n tmp = self.metricsAsString[i] \n print(tmp + \" : \" + str(output[tmp])) \n\n validationOutput = self.model.evaluate_generator(\n generator = self.validateGen,\n steps=self.validation_steps, \n use_multiprocessing=True, \n verbose=1)\n \n output['val_loss'] = validationOutput[0]\n for i in range(len(self.metricsAsString)):\n output[\"val_\" + self.metricsAsString[i]] = validationOutput[i+1]\n \n\n print(\"val_loss : \" + str(output['val_loss']))\n for i in range(len(self.metricsAsString)):\n tmp = \"val_\" + self.metricsAsString[i] \n print(tmp + \" : \" + str(output[tmp]))", "def evaluate(net, dev, batcher): \n def accuracy(outputs, labels):\n correct = 0\n total = 0\n misclassified = []\n for (i, output) in enumerate(outputs):\n total += 1\n if labels[i] == output.argmax():\n correct += 1 \n return correct, total, misclassified\n val_loader = batcher(dev, 128)\n total_val_loss = 0\n correct = 0\n total = 0\n misclassified = []\n loss = torch.nn.CrossEntropyLoss() \n for data in val_loader:\n inputs = data[:,1:]\n labels = torch.clamp(data[:,0], min=0).long()\n\n val_outputs = net(inputs) \n val_loss_size = loss(val_outputs, labels)\n\n correct_inc, total_inc, misclassified_inc = accuracy(val_outputs, \n labels)\n correct += correct_inc\n total += total_inc\n misclassified += misclassified_inc\n total_val_loss += val_loss_size.data.item()\n return correct/total, misclassified", "def evaluate_model(predictions_table: pd.DataFrame, ratings_table: pd.DataFrame,\n test_table: pd.DataFrame, viewed_table: pd.DataFrame,\n all_predictions_table: pd.DataFrame) -> Dict:\n\n viewed = viewed_table.to_numpy()\n predictions = predictions_table.to_numpy()\n all_predictions = all_predictions_table.to_numpy()\n ratings = ratings_table.to_numpy()\n test = test_table.to_numpy(copy=True)\n test_demeaned = test - np.mean(test, axis=1).reshape(-1, 1)\n test_set = test_table.apply(lambda x: x != 0.0).astype(int).to_numpy()\n\n result = dict()\n\n # Test set evaluation\n test_predictions = np.multiply(all_predictions, test_set)\n diff = np.subtract(test_predictions, test_demeaned)\n rmse = np.sqrt(np.mean(np.square(diff)))\n result.update({\"RMSE\": rmse})\n mae = np.mean(np.absolute(diff))\n result.update({\"MAE\": mae})\n\n # overall evaluation\n movies_recommended = set()\n i = 0\n for index, row in predictions_table.iterrows():\n if i > 10000: break\n else: i += 1\n\n labels = row.nlargest(20).keys().tolist()\n for movie in labels:\n movies_recommended.update(movie)\n\n result.update({\"Coverage\": len(movies_recommended)})\n\n return result", "def evaluate_model(valp):\n\n a = valp.predict(data_inputs[\"Test\"], [], new=True)[0]\n\n m2e = np.mean(mse(a[\"o0\"], data_outputs[\"Test\"][\"o0\"]))\n acc = 1 - acc_err(a[\"o1\"][:, 0], np.argmax(data_outputs[\"Test\"][\"o1\"], axis=1))\n i_d = 50-np.mean(inception_score(a[\"o2\"][:100]))\n\n return np.array([m2e, acc, i_d])", "def eval(self):\n\n # parameters initialize\n torch = import_optional_dependency(\"torch\")\n eval_total = 0\n eval_correct = 0\n eval_loss = 0\n self._set_eval()\n\n # display the information\n if self.info:\n print(f\"\\rEvaluating...\", end=\"\")\n\n # start eval part\n for i, (source, target) in enumerate(self.eval_dataset):\n # send data to device\n source = source.to(self.device)\n target = target.to(self.device)\n\n result = self.model(source)\n eval_loss += self.criterion(result, target).item()\n _, predicted = torch.max(result.data, 1)\n eval_total += target.size(0)\n eval_correct += (predicted == target).sum().item()\n\n accuracy = eval_correct / eval_total\n eval_loss = eval_loss / eval_total\n\n if self.info:\n print(f\"\\rEvaluation loss: { eval_loss } | Accuracy: { accuracy }\")\n\n return eval_loss, accuracy", "def evaluate_model(self, test_data, test_labels,verbose=2):\n test_loss, test_acc = self.model.evaluate(test_data, test_labels, verbose=verbose)\n return test_loss, test_acc", "def evaluate_model(model, model_name, X_train, Y_train, X_test, ground_truth):\n\tprint(\"\t\tModel [\" + model_name + \"]\")\n\tmodel.fit(X_train, Y_train)\n\tY_pred = model.predict(X_test).astype(int)\n\tregression = np.sqrt(metrics.mean_squared_error(ground_truth, Y_pred))\n\treturn regression", "def print_eval(trainset, testset, exptypes=EXPTYPES, semantic=False, savemodels=False, loadmodels=False, deprep=False, externals=True, predict=True):\n system_pairs = []\n print \"== cleaning lsts ==\"\n cleanupnonespanexpressions(testset)\n cleanholdercandidates(testset)\n cleanholders(testset)\n cleanupnonespanexpressions(trainset)\n cleanholdercandidates(trainset)\n cleanholders(trainset)\n \n print \"== train ==\"\n ev = evaluate()\n features, labels, stats = getfeaturesandlabels(trainset, semantic=semantic, predict=False)\n print counters, '\\n'\n\n print \"== test ==\"\n counters.clear()\n ftest, ltest, stest = getfeaturesandlabels(testset, semantic=semantic, predict=predict)\n print counters\n for exp in exptypes:\n vec, X, y = create_matrix(features[exp], labels[exp])\n if externals:\n vecw, Xw, yw = create_matrix(features[exp + 'w'], labels[exp + 'w'])\n vecimp, Ximp, yimp = create_matrix(features[exp + 'w'], labels[exp + 'implicit'])\n if loadmodels:\n clf = read_model(loadmodels + exp)\n else:\n clf = create_model(X, y)\n if externals:\n clfw = create_model(Xw, yw)\n clfimp = create_model(Ximp, yimp)\n if savemodels:\n write_model(clf, savemodels + exp)\n print \"== eval ==\"\n if deprep:\n print \"== {} ==\".format(deprep)\n Xt, yt = transform_to_matrix(ftest[exp], ltest[exp], vec)\n if externals:\n Xtw, ytw = transform_to_matrix(ftest[exp + 'w'], ltest[exp + 'w'], vecw)\n Xtimp, ytimp = transform_to_matrix(ftest[exp + 'w'], ltest[exp + 'implicit'], vecimp)\n results = clf.predict_proba(Xt)\n s_p_w = False\n s_p_imp = False\n gold_p1 = ev.get_unique_exp(copy.deepcopy(stest['positions'][exp + 'w']), exp, count=False)\n gold_p2 = copy.deepcopy(gold_p1)\n gold_p3 = copy.deepcopy(gold_p1)\n if clfw:\n resultsw = clfw.predict_proba(Xtw)\n s_p_w=ev.get_system_pairs_prob(stest['positions'][exp + 'w'], resultsw, gold_p1)\n counters['s_p_w' + exp] = len(s_p_w)\n if DEBUG:\n print \"RESULTSW\"\n print resultsw\n if clfimp:\n resultsimp = clfimp.predict_proba(Xtimp)\n s_p_imp=ev.get_system_pairs_prob(stest['positions'][exp + 'implicit'], resultsimp, gold_p2)\n counters['s_p_imp' + exp] = len(s_p_imp)\n if DEBUG:\n print \"RESULTSIMP\"\n print resultsimp\n s_p_int=ev.get_system_pairs_prob(stest['positions'][exp], results, gold_p3)\n counters['s_p_int' + exp] = len(s_p_int)\n system_pairs_exp = ev.merge_system_pairs(s_p_int, s_p_imp=s_p_imp, s_p_w=s_p_w)\n counters['system_pairs_all' + exp] = len(system_pairs_exp)\n for pair in system_pairs_exp:\n if 'confidence' in pair and pair['confidence'] > 0:\n counters['system_pairs' + exp] += 1\n if predict:\n ssc_exp = ev.spansetcoverage_o_p(system_pairs_exp, exptype=exp)\n print \"system exp - {}:\\n{}\".format(exp, prf_prettystring(ssc_exp))\n else:\n ssc_exp = ev.spansetcoverage_o_p(system_pairs_exp, exptype=exp)\n print \"gold exp - {}:\\n{}\".format(exp, prf_prettystring(ssc_exp))\n system_pairs.extend(system_pairs_exp)\n if predict:\n ssc = ev.spansetcoverage_o_p(system_pairs)\n print \"system exp - all:\\n\", prf_prettystring(ssc)\n else:\n ssc = ev.spansetcoverage_o_p(system_pairs)\n print \"gold exp - all: \\n\", prf_prettystring(ssc)\n \n for k,v in sorted(counters.items(), key=lambda x: x[0]):\n print k, v\n if isinstance(deprep, basestring):\n dump_jsonfile(system_pairs, 'system_pairs-' + deprep + '.json')\n return {'stats': stest, 'system_pairs': system_pairs}", "def train_and_validate(trnK, trnY, valK, valY, Cs):\n models = []\n trn_error = []\n val_error = []\n sup_vect = []\n\n for C in Cs:\n #Training\n model = train(trnK, trnY, C)\n trn_error.append((100 - evaluate(trnK, trnY, model)) / 100)\n sup_vect.append(len(model.get_SV()))\n models.append(model)\n #Evaluate\n val_error.append((100 - evaluate(valK, valY, model)) / 100)\n return(models, trn_error, val_error, sup_vect)", "def evaluate_model(model_name, y_true, y_pred):\n\n # Calculate performance metrics\n rmse_eval = evaluate_rmse(y_true, y_pred)\n mae_eval = evaluate_mae(y_true, y_pred) \n r2_eval = evaluate_r2(y_true, y_pred)\n\n # Print results\n print_evaluation(model_name, mae_eval, rmse_eval, r2_eval)", "def _evaluate(model):\n _recompile(model)\n if isinstance(eval_dataset, tuple):\n eval_images, eval_labels = eval_dataset\n return model.evaluate(\n eval_images, eval_labels, verbose=verbose, return_dict=True)\n else:\n return model.evaluate(eval_dataset, verbose=verbose, return_dict=True)", "def evaluate(model, datagen, X_test, Y_test, batch_size, save_folder_path=None):\n\n print(\"[INFO] Evaluating model...\")\n\n scores = model.evaluate_generator(\n datagen.flow(X_test, Y_test, batch_size=batch_size),\n verbose=1)\n \n print(\"[INFO] Evaluation results:\\n{0}: {1:.2f}\\n{2}: {3:.2f}\".format(model.metrics_names[0], scores[0]*100, model.metrics_names[1], scores[1]*100))\n \n if save_folder_path is not None:\n # Write results to path\n assert os.path.isdir(save_folder_path) == True, \"Unable to save evaluation results, save_folder_path is not a folder\"\n eval_results_path = save_folder_path + \"/eval_results.txt\"\n eval_handle = open(eval_results_path, 'w')\n eval_handle.write(\"Model name: {}\\n\\n\".format(MODEL_NAME))\n eval_handle.write(\"Evaluation results:\\n{0}: {1:.2f}\\n{2}: {3:.2f}\".format(model.metrics_names[0], scores[0]*100, model.metrics_names[1], scores[1]*100))\n eval_handle.close()", "def run(self, data, training=False):\n # Set mode\n if training:\n self._model.train()\n else:\n self._model.eval()\n # Compute\n return self._model(data)", "def train_all(X_train_fuse, Y_train, X_dev_fuse, Y_dev, R_train, R_dev, hyperparams):", "def evaluate(func, dset_path, model_path):\n dset = load_dataset(dset_path, 'trva', False)\n\n \"\"\"\n average class-based zero-shot accuracy\n \"\"\"\n scores = func(dset['Xte_unseen'], dset['Ste_unseen_gt'], model_path)\n preds = np.argmax(scores, 1)\n preds = dset['Cte_unseen'][preds]\n acc_zsl = compute_acc(dset['Lte_unseen'], preds)\n\n \"\"\"\n average class-based generalized zsl accuracy on seen test classes\n \"\"\"\n scores = func(dset['Xte_seen'], dset['Sall_gt'], model_path)\n preds = np.argmax(scores, 1)\n preds = dset['Call'][preds]\n acc_gzsl_seen = compute_acc(dset['Lte_seen'], preds)\n\n \"\"\"\n average class-based generalized zsl accuracy on unseen test classes\n \"\"\"\n scores = func(dset['Xte_unseen'], dset['Sall_gt'], model_path)\n preds = np.argmax(scores, 1)\n preds = dset['Call'][preds]\n acc_gzsl_unseen = compute_acc(dset['Lte_unseen'], preds)\n\n print 'ZSL accuracy: ', acc_zsl\n print 'Generalized ZSL accuracy on seen classes: ', acc_gzsl_seen\n print 'Generalized ZSL accuracy on unseen classes: ', acc_gzsl_unseen", "def evaluate(self, X_test, y_test):\n self.run(self)\n self.y_pred = self.pipeline.predict(X_test)\n self.rmse = compute_rmse(self.y_pred, y_test)", "def evaluate(weights: fl.common.Weights) -> Optional[Tuple[float, float]]:\n model = models.load_model(glb.MODEL)\n model.set_weights(weights)\n model.to(DEVICE)\n testloader = torch.utils.data.DataLoader(testset, batch_size=32, shuffle=False)\n # using pytorch for central evaluation, can be tensorflow as well\n return modules.pt_test(model, testloader, device=DEVICE)", "def evaluator(test_config: TestConfig, criterion: nn.Module, model: nn.Module,\n device: torch.device) -> Engine:\n metrics, eval_metric, *_ = test_config\n metrics['loss'] = Loss(criterion,\n output_transform=lambda data: (data[0], data[1]))\n val_evaluator = create_supervised_evaluator(model, metrics, device,\n prepare_batch=prepare_batch)\n return val_evaluator", "def _evaluate_model(\n run_id: str, dataset_filename: str, dataset_sampling_column: str = None\n):\n fix_multiprocessing_with_keras_on_macos()\n\n run = _get_run(run_id)\n hyperparameters = run.config\n\n # no need to run this on a gpu since it's 1 epoch\n os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"-1\"\n\n with ModelBestH5File(run) as model_h5_filepath:\n model = _load_untrainable_model(hyperparameters, model_h5_filepath)\n\n model_name = run.config[\"model_name\"]\n x, y = _get_prepared_dataset(\n model_name, hyperparameters, dataset_filename, dataset_sampling_column\n )\n\n wandb.init(\n config={\n \"run_id\": run_id,\n \"dataset_filename\": dataset_filename,\n \"dataset_sampling_column\": dataset_sampling_column,\n },\n tags=[\"model-evaluation\"],\n )\n\n batch_size = hyperparameters[\"batch_size\"]\n label_scale_factor_mmhg = hyperparameters[\"label_scale_factor_mmhg\"]\n acceptable_error_mg_l = hyperparameters[\"acceptable_error_mg_l\"]\n acceptable_fraction_outside_error = hyperparameters[\n \"acceptable_fraction_outside_error\"\n ]\n\n # we're using fit() instead of evaluate() to get the functionality of these callbacks\n # training performance in the results should be ignored, as it can be affected by some\n # training-only layers such as dropout\n model.fit(\n x,\n y,\n batch_size=batch_size,\n epochs=1,\n verbose=2,\n validation_data=(x, y),\n callbacks=[\n ThresholdValMeanAbsoluteErrorOnCustomMetric(\n acceptable_fraction_outside_error=acceptable_fraction_outside_error,\n acceptable_error_mg_l=acceptable_error_mg_l,\n ),\n WandbCallback(verbose=1, monitor=\"val_adjusted_mean_absolute_error\"),\n LogPredictionsAndWeights(\n metric=\"val_adjusted_mean_absolute_error\",\n dataset=([], [], x, y),\n label_scale_factor_mmhg=label_scale_factor_mmhg,\n ),\n ],\n )\n\n # returning model and dataset for use in jupyter notebooks\n return model, (x, y)", "def evaluate(eval_ds, model, task):\n\n print('==========EVAL==========')\n # Testing contrastive accuracy\n if task['name'] == 'contrastive_accuracy':\n ds = eval_ds.map(data_utils.pretrain_preprocess)\n ds = ds.batch(128)\n test_contrast_acc = tf.keras.metrics.Accuracy(name='test_constrastive_accuracy')\n for x in ds:\n image = x['image']\n image = tf.transpose(image, [1, 0, 2, 3, 4])\n image = tf.reshape(\n image, \n (image.shape[0]*image.shape[1], image.shape[2], image.shape[3], image.shape[4])\n )\n out = model(image, mode='unsupervised', training=False)\n metrics.update_contrastive_accuracy2(test_contrast_acc, out, TEMP)\n print('test contrastive accuracy')\n print(test_contrast_acc.result())\n return \n\n # Testing classification accuracy \n ds = eval_ds.filter(lambda x: x['label'] != task['excluded_label'])\n ds = ds.map(data_utils.eval_preprocess)\n ds = ds.batch(FLAGS.eval_bs)\n test_class_acc = tf.keras.metrics.Accuracy(name='test_class_accuracy')\n for x in ds:\n image = x['image']\n labels = x[task['name']]\n if task['name'] == 'extr':\n out = model(image, mode='eval', sup_layers=2, training=False)\n else:\n out = model(image, mode='eval', sup_layers=1, training=False)\n metrics.update_supervised_accuracy(test_class_acc, labels, out)\n \n if FLAGS.debug:\n print(tf.math.argmax(out, axis=-1))\n print('test classification accuracy')\n print(test_class_acc.result())", "def evaluate_model(model, X_test, Y_test, category_names):\n\n print(\"Testing Performance\")\n print(classification_report(Y_test, model.predict(X_test), target_names=category_names))\n\n #Todo cat names", "def set_models_eval(self):\n raise NotImplementedError", "def evaluate(embed_model, model, pt, dataset, batch_size):\n\n embed_model.eval()\n model.eval()\n lossf = nn.NLLLoss(size_average=False)\n\n correct = 0\n total = 0\n total_loss = 0\n print('Start Evaluating!')\n print('Validation Size: {}'.format(len(dataset)))\n\n threshold = 0.3\n\n data_iter = iter(pt.batch_iter(dataset, batch_size))\n\n for i in range(len(dataset) // batch_size):\n\n # catch the data\n p1_idx, p2_idx, _, _, label = next(data_iter)\n\n p1_idx = Variable(p1_idx)\n p2_idx = Variable(p2_idx)\n label = Variable(label)\n\n if use_cuda:\n p1_idx = p1_idx.cuda()\n p2_idx = p2_idx.cuda()\n label = label.cuda()\n\n # Feed to the network\n p1_emb, p2_emb = embed_model(p1_idx, p2_idx)\n out = model(p1_emb, p2_emb)\n\n # print(label)\n # print(out)\n\n loss = lossf(out, label)\n total_loss += loss.data[0]\n\n prob = torch.exp(out)\n predicted = Variable(torch.LongTensor([1 if l[1].data[0] >= threshold else 0 for l in prob]))\n\n # _, predicted = torch.max(out, dim=1)\n total += p1_idx.size()[0]\n\n # print(predicted)\n\n correct += torch.sum((label == predicted), dim=0).data[0]\n\n print('Correct Labels: {}/{}'.format(correct, (i + 1) * batch_size))\n\n print('Valid Loss: {}, Acc: {}'.format(total_loss / float(total),\n correct / float(total)))", "def evaluate_model(model, item_valid_input, valid_labels,num_items, topK): \n hits, ndcgs = [],[]\n target_oh = to_categorical(valid_labels, num_classes=num_items)\n input_oh = to_categorical(item_valid_input, num_classes=num_items) \n input_oh = np.expand_dims(input_oh, axis=1)\n\n predictions = model.predict(input_oh, batch_size=args.batch_size)\n #predictions = model.predict([np.array(user_valid_input), np.array(item_valid_input)], batch_size=args.batch_size, verbose=0)\n topk_ind = predictions.argsort()[:,::-1][:,:topK]\n for item in zip(topk_ind,valid_labels):\n hr = ProjectUtility.getHitRatio(item[0], item[1])\n ndcg = ProjectUtility.getNDCG(item[0], item[1])\n hits.append(hr)\n ndcgs.append(ndcg)\n hits.append(hr)\n ndcgs.append(ndcg) \n return (hits, ndcgs)", "def eval_model(self, model):\n evaluation = model.evaluate(x=self.xt_test, y=self.yt_test)\n print(\"loss : \" + str(round(evaluation[0]*100, 2)) + \"%\")\n print(\"accuracy: \" + str(round(evaluation[1]*100, 2)) + \"%\")", "def _evaluate(self, train_x, train_y, test_x, test_y, n_targets, name):\n r_temp = {}\n for metric_name in self.metrics:\n r_temp.update({f\"{metric_name}_Model\": name, f\"{metric_name}_Sum\": 0,\n f\"{metric_name}_Min\": 1000000, f\"{metric_name}_Max\": 0})\n\n for i in range(self.repetitions):\n is_nan = True\n while (is_nan):\n model = self.get_model(train_x.shape[1], n_targets)\n model.fit(train_x, train_y, **self.fit_kwargs)\n result = model.predict(test_x)\n is_nan = np.any(np.isnan(result))\n del model\n\n for metric_name in self.metrics:\n metric = self.get_metrics(metric_name)\n value = metric(result, test_y)\n r_temp[f\"{metric_name}_Sum\"] += value\n if r_temp[f\"{metric_name}_Min\"] > value:\n r_temp[f\"{metric_name}_Min\"] = value\n if r_temp[f\"{metric_name}_Max\"] < value:\n r_temp[f\"{metric_name}_Max\"] = value\n keras.backend.clear_session()\n for metric_name in self.metrics:\n r_temp[f\"{metric_name}_Mean\"] = r_temp[f\"{metric_name}_Sum\"] / self.repetitions\n return r_temp", "def evaluate(model, test_files):\n print(\"Running predictions.\")\n models = load_model(model)\n predictions = predict(models, test_files)\n\n # # write predictions to file\n # write_predictions(\"evaluate_out.json\",predictions)\n evaluate_individual(predictions, test_files, models)\n evaluate_overall(predictions)", "def eval(self): \n inputs,enc_input_weights, outputs, dec_input_weights = self.get_batch()\n predicted_ids = self.model.step(self.sess, inputs, enc_input_weights) \n print(\"=\"*20)\n for i in range(FLAGS.batch_size):\n print(\"* %dth sample target: %s\" % (i,str(outputs[i,1:]-2)))\n for predict in predicted_ids[i]:\n print(\"prediction: \"+str(predict)) \n print(\"=\"*20)", "def eval(self, model, data_iterators, key=\"val\"):\n assert key in (\"val\", \"test\")\n assert not (data_iterators[key] is None)\n criterion = self.criterion\n weight = self.weight\n device = self.device\n\n return evaluator.evaluate(\n model,\n device,\n data_iterators[key],\n self.target_labels,\n criterion,\n weight,\n labeled=True,\n )", "def evaluate_model(self, t, scaling_parameters, system_parameters):\n raise NotImplementedError", "def run(self, date):\n\n out = self.nanoutput()\n\n beh = date.glm()\n expl = beh.explained()\n for cellgroup in expl:\n out['devexp_%s' % cellgroup] = expl[cellgroup]\n\n return out", "def evaluate_model(self):\r\n self.model.eval() # sets layers to eval mode (e.g. norm, dropout)\r\n with torch.no_grad(): # deactivates autograd engine\r\n\r\n # generate graphs required for model evaluation\r\n # note that evaluation of the generated graphs happens in\r\n # `generate_graphs()`, and molecules are saved as `self` attributes\r\n self.generate_graphs(n_samples=self.C.n_samples, evaluation=True)\r\n\r\n print(\"* Evaluating model.\", flush=True)\r\n anal.evaluate_model(valid_dataloader=self.valid_dataloader,\r\n train_dataloader=self.train_dataloader,\r\n nll_per_action=self.nll_per_action,\r\n model=self.model)\r\n\r\n self.nll_per_action = None # don't need anymore\r\n\r\n print(f\"* Saving model state at Epoch {self.current_epoch}.\", flush=True)\r\n\r\n # `pickle.HIGHEST_PROTOCOL` good for large objects\r\n model_path_and_filename = (self.C.job_dir + f\"model_restart_{self.current_epoch}.pth\")\r\n torch.save(obj=self.model,\r\n f=model_path_and_filename,\r\n pickle_protocol=pickle.HIGHEST_PROTOCOL)", "def evaluate_model(model, train_input, train_target, test_input, test_target, loss, save_plot, mname=None):\n # Evalute Model in train set\n epochs_number = len(loss)\n output = model.forward(train_input)\n train_loss = model.compute_loss(output, train_target).item()\n train_error = compute_number_error(output, train_target).item()\n\n print(\"\\nTraining Loss: \", train_loss)\n print(\"Training Number of errors: \", train_error)\n\n id_class_train = output.argmax(dim=1)\n if save_plot:\n plot_result(train_input, train_target, id_class_train, fname=mname)\n plot_loss(range(0, epochs_number), loss, fname=mname)\n\n # Deactivate dropout to test models\n model.enable_dropout(False)\n \n # Evaluate Model in test set\n output = model.forward(test_input)\n test_loss = model.compute_loss(output, test_target).item()\n test_error = compute_number_error(output, test_target).item()\n\n print(\"\\nTest Loss: \", test_loss)\n print(\"Test Number of errors: \", test_error)\n\n\n id_class_test = output.argmax(dim=1)\n if save_plot:\n plot_result(test_input, test_target, id_class_test, train=False, fname=mname)\n \n return [train_loss, train_error, test_loss, test_error]" ]
[ "0.71378905", "0.6952982", "0.6932083", "0.6838927", "0.6826608", "0.68036103", "0.6793033", "0.67818105", "0.67691225", "0.67658705", "0.67651814", "0.6759437", "0.6751383", "0.6677337", "0.6666897", "0.6656296", "0.6651581", "0.65554637", "0.6541575", "0.6536319", "0.65075076", "0.6486906", "0.64857537", "0.64689285", "0.64446497", "0.64439523", "0.6416318", "0.6414502", "0.6405916", "0.63966215", "0.6393871", "0.6390256", "0.6390256", "0.6390256", "0.63767827", "0.63698024", "0.63659614", "0.63575995", "0.6348727", "0.63472354", "0.63454866", "0.63437355", "0.63424397", "0.6337702", "0.63355136", "0.632363", "0.6321809", "0.63174635", "0.6316466", "0.63138837", "0.6293964", "0.62932116", "0.62898254", "0.6287008", "0.6284808", "0.62824696", "0.6269858", "0.6260592", "0.62546486", "0.624736", "0.6234864", "0.6222875", "0.62175196", "0.6216319", "0.6209414", "0.6203955", "0.6202705", "0.6195024", "0.61931646", "0.61924404", "0.61770767", "0.6171829", "0.61635286", "0.61597097", "0.6150906", "0.61487395", "0.61445975", "0.61421573", "0.6138958", "0.6138438", "0.6137149", "0.6133659", "0.6126636", "0.61261487", "0.6122558", "0.61138964", "0.6106173", "0.61048293", "0.61025256", "0.6102276", "0.6099936", "0.6099139", "0.6091133", "0.6090032", "0.6087604", "0.60858077", "0.6084754", "0.6083785", "0.6078879", "0.6066502", "0.60635823" ]
0.0
-1
The constructor for creating variables for each movie instance
def __init__(self, movie_title, movie_storyline, poster_image, trailer_youtube, imdb):
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self, movie_title, poster_image, trailer_youtube, movie_release_data, genre):\n # initialize instance of class Movie\n self.title = movie_title\n self.poster_image_url = poster_image\n self.trailer_youtube_url = trailer_youtube\n self.movie_release = movie_release_data\n self.movie_genre = genre", "def __init__(\n self,\n movie_title,\n movie_storyLine,\n movie_posterImage,\n movie_trailerlink,\n movie_actor,\n movie_director):\n self.title = movie_title\n self.storyLine = movie_storyLine\n self.poster_image_url = movie_posterImage\n self.trailer_youtube_url = movie_trailerlink\n self.actor=movie_actor\n self.director=movie_director", "def __init__(self, \n movie_title,\n movie_storyline,\n movie_poster,\n movie_trailer):\n self.title = movie_title\n self.storyline = movie_storyline\n self.poster_image_url = movie_poster\n self.trailer_youtube_url = movie_trailer", "def __init__(self, movie_title, movie_storyline, movie_poster_image_url, movie_trailer):\n\t\tself.title = movie_title\n\t\tself.storyline = movie_storyline\n\t\tself.poster_image_url = movie_poster_image_url\n\t\tself.trailer_youtube_url = movie_trailer", "def __init__(self, name, storyline, trailer, poster):\n # Assigning the values of the instances to the class variables\n self.title = name\n self.mov_story = storyline\n self.trailer_youtube_url = trailer\n self.poster_image_url = poster", "def __init__(self, movie_title, movie_storyline, poster_image, trailer_youtube):\n self.title = movie_title\n self.storyline = movie_storyline,\n self.poster_image_url = poster_image\n self.trailer_youtube_url = trailer_youtube", "def __init__(self, movie_title, movie_storyline, poster_image, trailer_youtube):\n\t\tself.title = movie_title\n\t\tself.storyline = movie_storyline\n \t\tself.poster_image_url = poster_image\n\t\tself.trailer_youtube_url = trailer_youtube", "def __init__(self, movie_title, movie_storyline, poster_image, trailer):\r\n self.title = movie_title\r\n self.storyline = movie_storyline\r\n self.poster_image_url = poster_image\r\n self.trailer_youtube_url = trailer", "def __init__(self, movie_title, movie_storyline, poster_img, trailer_youtube):\n self.title = movie_title\n self.storyline = movie_storyline\n self.poster_image_url = poster_img\n self.trailer_youtube_url = trailer_youtube", "def __init__(self,movie_title,movie_storyline,poster_image,youtube_trailer,release_date):\n self.title = movie_title\n self.storyline = movie_storyline\n self.poster_image_url = poster_image\n self.trailer_youtube_url = youtube_trailer\n self.release_date = release_date", "def __init__(self, movie_title, movie_certification, movie_genre,\n poster_image, trailer_youtube):\n\n self.title = movie_title\n self.certification = movie_certification\n self.genre = movie_genre\n self.poster_image_url = poster_image\n self.trailer_youtube_url = trailer_youtube", "def __init__(\r\n self, movie_title, movie_storyline, poster_image, \r\n trailer_youtube): \r\n self.title = movie_title\r\n self.storyline = movie_storyline\r\n self.poster_image_url = poster_image\r\n self.trailer_youtube_url = trailer_youtube", "def __init__(self, movie_title, poster_image, trailer_youtube):\n self.title = movie_title\n self.poster_image_url = poster_image\n self.trailer_youtube_url = trailer_youtube", "def __init__(self, movie_title, poster_image, trailer_youtube, review_imdb):\n self.title = movie_title\n self.poster_image_url = poster_image\n self.trailer_youtube_url = trailer_youtube\n self.imdb_url = review_imdb", "def __init__(self, movie_title, movie_storyline, poster_image,\r\n trailer_youtube):\r\n self.title = movie_title\r\n self.storyline = movie_storyline\r\n self.poster_image_url = poster_image\r\n self.trailer_youtube_url = trailer_youtube", "def __init__(self, movie_title, movie_storyline,\n poster_image, trailer_youtube):\n self.title = movie_title\n self.storyline = movie_storyline\n self.poster_image_url = poster_image\n self.trailer_youtube_url = trailer_youtube", "def __init__(self, movie_title, movie_storyline, poster_image,\n trailer_youtube):\n self.title = movie_title\n self.storyline = movie_storyline\n self.poster_image_url = poster_image\n self.trailer_youtube_url = trailer_youtube", "def __init__(self, movie_title, poster_image_url, trailer_youtube_url):", "def __init__(self, movie_title, release_date, movie_storyline, poster_image,\n trailer_youtube, more_link):\n\n self.title = movie_title\n self.date = release_date\n self.storyline = movie_storyline\n self.poster_image_url = poster_image\n self.trailer_youtube_url = trailer_youtube\n self.more_url = more_link", "def __init__(self, id_movie, title, tags, ratings):\n\n self.id_movie = id_movie\n self.title = title\n self.tags = tags\n self.ratings = ratings", "def __init__(self, id, movie_data):\n self.id = id\n self.info = movie_data['film']\n self.casts = movie_data['cast']", "def __init__(self, movie_title, movie_storyline, poster_image_url, trailer_youtube, rating):\n Video.__init__(self, movie_title, movie_storyline, poster_image_url)\n self.trailer_youtube_url = trailer_youtube\n self.rating = rating", "def __init__(self, film_id, genre_id):\n self.film_id = film_id\n self.genre_id = genre_id", "def __init__(self, title, image, movie_tagline=\"\", trailer_url=\"\"):\n self.title = title\n self.poster_image_url = image\n self.trailer_youtube_url = trailer_url\n self.storyline = movie_tagline", "def __init__(self, title):\n # will hit the TMDB API on every instantiation\n search = tmdb.Search()\n response = search.movie({'query': title})\n\n # if there are any results to querying for the title, take the first result\n if len(search.results) > 0:\n self.ID = uuid.uuid4()\n self.TMDB_ID = search.results[0]['id']\n movie = tmdb.Movies(self.TMDB_ID).info() # get all the information available\n\n # save off a few interesting attributes\n self.title = movie['title']\n self.release_date = movie['release_date']\n self.popularity = movie['popularity']\n self.overview = movie['overview']\n else:\n self.initialize()\n print \" ##### Warning: could not find any matches for %s\" % title", "def __init__(self):\n self.movies = []", "def __init__(self, title, poster, trailer):\n\t\tself.title = title\n\t\tself.poster_image_url = poster\n\t\tself.trailer_youtube_url = trailer", "def __init__(self,title, imdb, format):\n self.title = title\n self.imdbID = imdb\n self.format = format\n\n #Ratings format: { site: rating,}\n self.ratings = {}\n\n #Links format: { store: link, }\n self.links = {}\n\n #Prices format: { store: price, }\n self.prices = {}", "def __init__(self, title, year,story, poster_url, trailer_url):\n self.title = title\n self.year = year\n self.story = story\n self.poster_url = poster_url\n self.trailer_url = trailer_url", "def __init__(self, title, image_url, trailer_url):\n self.title = title\n self.trailer_youtube_url = trailer_url\n self.poster_image_url = image_url", "def __init__(self, movie_list, handler):\n self = self\n self.movie_list = movie_list\n self.handler = handler", "def __init__(self, name, typing, reflection, year):#Taking in parameters\n self.n = name#Assigning variables\n self.t = typing\n self.r = reflection\n self.y = year", "def __init__(self, title, storyline, poster_image, trailer_youtube):\n\n self.title = title\n self.storyine = storyline\n self.poster_image_url = poster_image\n self.trailer_youtube_url = trailer_youtube", "def __init__(self,\n title, trailer_youtube_url, poster_image_url, trailer_youtube_id):\n # Set the title of the class to the one passed into the function\n self.title = title\n # Set the youtube trailer url of the class to the one passed into the function\n self.trailer_youtube_url = trailer_youtube_url\n # Set the poster image url of the class to the one passed into the function\n self.poster_image_url = poster_image_url\n # Set the youtube trailer id of the class to the one passed into the function\n self.trailer_youtube_id = trailer_youtube_id", "def __init__(self, title, poster_image_url, trailer_youtube_id):\n\t\tself.title = title\n\t\tself.poster_image_url = poster_image_url\n\t\tself.trailer_youtube_url = trailer_youtube_id", "def __init__(self, movie_title, storyline, poster_link, trailer_link):\n self.title = movie_title\n self.storyline = storyline\n self.poster_image_url = poster_link\n self.trailer_youtube_url = trailer_link\n\n \"\"\"The instance method show_trailer is defined\"\"\"", "def __init__(self, title, storyline, poster_image_url, trailer_youtube_url):\n self.title = title\n self.storyline = storyline\n self.poster_image_url = poster_image_url\n self.trailer_youtube_url = trailer_youtube_url", "def ready_movies():\n troy = Movie(movie_title=\"Troy\",\n poster_image=\"https://upload.wikimedia.org/wikipedia/en/b/b8/Troy2004Poster.jpg\",\n trailer_youtube=\"https://www.youtube.com/watch?v=znTLzRJimeY\")\n\n kingdom_of_heaven = Movie(movie_title=\"Kingdom of Heaven\",\n poster_image=\"https://upload.wikimedia.org/wikipedia/en/9/9e/KoHposter.jpg\",\n trailer_youtube=\"https://www.youtube.com/watch?v=moNH4N44D28\")\n\n warrior = Movie(movie_title=\"Warrior\",\n poster_image=\"https://upload.wikimedia.org/wikipedia/en/e/e3/Warrior_Poster.jpg\",\n trailer_youtube=\"https://www.youtube.com/watch?v=I5kzcwcQA1Q\")\n\n pulp_fiction = Movie(movie_title=\"Pulp Fiction\",\n poster_image=\"https://upload.wikimedia.org/wikipedia/en/3/3b/Pulp_Fiction_%281994%29_poster.jpg\",\n trailer_youtube=\"https://www.youtube.com/watch?v=s7EdQ4FqbhY\")\n\n fight_club = Movie(movie_title=\"Fight Club\",\n poster_image=\"https://upload.wikimedia.org/wikipedia/en/f/fc/Fight_Club_poster.jpg\",\n trailer_youtube=\"https://www.youtube.com/watch?v=SUXWAEX2jlg\")\n\n the_matrix = Movie(movie_title=\"The Matrix\",\n poster_image=\"https://upload.wikimedia.org/wikipedia/en/c/c1/The_Matrix_Poster.jpg\",\n trailer_youtube=\"https://www.youtube.com/watch?v=vKQi3bBA1y8\")\n\n the_dark_knight = Movie(movie_title=\"The Dark Knight\",\n poster_image=\"https://upload.wikimedia.org/wikipedia/en/8/8a/Dark_Knight.jpg\",\n trailer_youtube=\"https://www.youtube.com/watch?v=vKQi3bBA1y8\")\n\n whiplash = Movie(movie_title=\"Whiplash\",\n poster_image=\"https://upload.wikimedia.org/wikipedia/en/0/01/Whiplash_poster.jpg\",\n trailer_youtube=\"https://www.youtube.com/watch?v=7d_jQycdQGo\")\n\n dredd = Movie(movie_title=\"Dredd\",\n poster_image=\"https://upload.wikimedia.org/wikipedia/en/1/16/Dredd2012Poster.jpg\",\n trailer_youtube=\"https://www.youtube.com/watch?v=qv-6dNqqnMA\")\n\n ai_artificial_intelligence = Movie(movie_title=\"A.I. Artificial Intelligence\",\n poster_image=\"https://upload.wikimedia.org/wikipedia/en/e/e6/AI_Poster.jpg\",\n trailer_youtube=\"https://www.youtube.com/watch?v=_19pRsZRiz4\")\n\n watchmen = Movie(movie_title=\"Watchmen\",\n poster_image=\"https://upload.wikimedia.org/wikipedia/en/b/bc/Watchmen_film_poster.jpg\",\n trailer_youtube=\"https://www.youtube.com/watch?v=PVjA0y78_EQ\")\n\n the_rock = Movie(movie_title=\"The Rock\",\n poster_image=\"https://upload.wikimedia.org/wikipedia/en/8/82/The_Rock_%28movie%29.jpg\",\n trailer_youtube=\"https://www.youtube.com/watch?v=jGVJx5mOtL8\")\n\n movies = [troy, kingdom_of_heaven, warrior, pulp_fiction, fight_club, the_matrix,\n the_dark_knight, whiplash, dredd, ai_artificial_intelligence,\n watchmen, the_rock]\n\n return movies", "def __init__(self: object) -> None:\n self.empty: bool = True\n self.episode_broadcast: str = \"\"\n self.episode_id: int = 0\n self.episode_inspectors: str = \"\"\n self.episode_name: str = \"\"\n self.episode_sequence: str = \"\"\n self.episode_url: str = \"\"\n self.episode_year: int = 0", "def __init__(self):\n super().__init__()\n self.data_set_loc = conf.config_section_mapper(\"filePath\").get(\"data_set_loc\")\n self.data_extractor = DataExtractor(self.data_set_loc)\n self.sim_act_diff_mov_tf = SimilarActorsFromDiffMovies()", "def __init__(self):\n super().__init__()\n self.data_set_loc = conf.config_section_mapper(\"filePath\").get(\"data_set_loc\")\n self.data_extractor = DataExtractor(self.data_set_loc)\n self.sim_act_diff_mov_tf = SimilarActorsFromDiffMovies()", "def __init__(self, title, plot, trailer_youtube_url,\n poster_image_url, movie_data=None):\n\n self.title = title\n self.plot = plot\n self.trailer_youtube_url = trailer_youtube_url\n self.poster_image_url = poster_image_url\n self.movie_data = movie_data", "def __init__(self, name: str) -> None:\n\n self._name = name\n self._episode_lines: Dict[str, List[str]] = {}\n self._unique_words: List[str] = []\n self._scene_appearance_dict: Dict[str, int] = {}\n self._num_scenes = 0", "def __init__(self):\n self.movie_reviews = []", "def __init__(self, movie_title, movie_storyline, poster_image, trailer_youtube):\n\n self.title = movie_title\n self.storyline = movie_storyline\n self.poster_image_url = poster_image\n self.trailer_youtube_url = trailer_youtube\n\n # Define a function to show the movie trailer\n # when a user clicks on the movie poster on the site\n\n \"\"\"\n Initializing instance for opening the youtube video\n\n :return: webbrowser to play thriller\n \"\"\"", "def __init__(self, variables, vid, vtype, vname, init, status, timestamp, prec):\n super().__init__()\n self._id = vid\n self._init = init\n self._last_edited = timestamp\n self._last_update = now()\n self._last_changed = now()\n self._name = vname\n self._prec = prec\n self._status = status\n self._type = vtype\n self._variables = variables\n self.isy = variables.isy\n self.status_events = EventEmitter()", "def config_movie_objects():\n movies = []\n movie_title_list=[\"Back to the Future\",\"Blazing Saddles\",\"The Dark Knight\",\n \"Deadpool\",\"Die Hard\",\"Fear and Loathing in Las Vegas\",\n \"Forbidden Zone\",\"Guardians of the Galaxy\",\n \"The Last Dragon\",\"Pulp Fiction\",\"They Live\"]\n # For every movie title gather info and add to list\n for movie_title in movie_title_list:\n imdb_data = query_imdb(movie_title)\n trailer_url = query_youtube(movie_title)\n\n if imdb_data[\"Response\"] == \"True\": # Dictionary contains movie info\n movies.append(media.Movie(imdb_data[\"Title\"],\n imdb_data[\"Year\"],\n imdb_data[\"Plot\"],\n imdb_data[\"Poster\"],\n trailer_url,\n imdb_data[\"Rated\"],\n imdb_data[\"Runtime\"]))\n\n # Send the list of movie objects to the open_movies_page method\n # Generates and opens the fresh_tomatoes website\n fresh_tomatoes.open_movies_page(movies)", "def __init__(self, api_key):\n self.api_key = api_key\n self.BASE_URL = 'https://api.themoviedb.org/3/movie/'\n self.language = 'en-US'\n self.IMAGE_URL = 'https://image.tmdb.org/t/p/w500'", "def __init__(self):\n self._tyrannosaurus = []\n self._triceratops = []", "def __init__(self):\n self.variables = [] # List of all variables in certain scope.\n self.field_id = 0 # Id of next field varibale.\n self.argumen_id = 0 # Id of next argument variable.\n self.local_id = 0 # Id of next local variable.\n self.static_id = 0 # Id of next static variable.", "def __init__(self, **kwargs):\n self.paused = self._get('paused', **kwargs)\n self.title = self._get('title', **kwargs)\n self.artist = self._get('artist', **kwargs)\n self.album = self._get('album', **kwargs)\n self.total_time = self._get('total_time', **kwargs)\n self.position = self._get('position', **kwargs)\n self.mediakind = self._get('mediakind', **kwargs)\n self.playstatus = self._get('playstatus', **kwargs)", "def init_attributes(self):\n\n # Creats two lists: one for x-values, one for y-valuse\n self.xValues = []\n self.yValues = []\n self.colors = []\n\n # Colors of the graph\n self.saveColor = \"limegreen\"\n self.currentColor = \"r\"\n self.unsavedColor = \"royalblue\"\n\n # Initialization of some other attributes\n self.filename = None\n self.mouseY = 0\n\n self.startIndex = 0\n self.current_position = 0\n self.position_index = 0\n\n self.savedRecently = False\n self.videoOpened = False\n\n self.animation = None\n self.curve = None\n self.k = 25\n self.dt = self.k", "def __init__(self, name, plays, number):\n self.name = name\n self.plays = plays\n self.number = number", "def __init__(self, **kwargs):\n self.identifier = kwargs.get(\"identifier\")\n self.playback_state = kwargs.get(\"playback_state\")\n self.title = kwargs.get(\"title\")\n self.series_name = kwargs.get(\"series_name\")\n self.artist = kwargs.get(\"artist\")\n self.album = kwargs.get(\"album\")\n self.genre = kwargs.get(\"genre\")\n self.total_time = kwargs.get(\"total_time\")\n self.position = kwargs.get(\"position\")\n self.season_number = kwargs.get(\"season_number\")\n self.episode_number = kwargs.get(\"episode_number\")\n self.repeat = kwargs.get(\"repeat\")\n self.shuffle = kwargs.get(\"shuffle\")\n self.media_type = kwargs.get(\"media_type\")\n self.playback_rate = kwargs.get(\"playback_rate\")\n self.supported_commands = kwargs.get(\"supported_commands\")\n self.artwork = kwargs.get(\"artwork\")\n self.artwork_identifier = kwargs.get(\"artwork_identifier\")\n self.artwork_mimetype = kwargs.get(\"artwork_mimetype\")\n self.artwork_width = kwargs.get(\"artwork_width\")\n self.artwork_height = kwargs.get(\"artwork_height\")\n self.skip_time = kwargs.get(\"skip_time\")\n self.app_name = kwargs.get(\"app_name\")\n self.content_identifier = kwargs.get(\"content_identifier\")", "def __init__(self, title, storyline, image):\n self.title = title\n self.storyline = storyline\n self.poster_image_url = image", "def __init__(self):\n self.state_shape = None\n self.action_shape = None\n self.number_turns = 0\n self.td_loss_history = []\n self.moving_average_loss = []\n self.reward_history = []\n self.moving_average_rewards = []\n self._episode_reward = 0", "def __init__(self, name, race, sex, age):\n self.Race = race\n self.Sex = sex\n self.Age = age\n self.Name = name", "def __init__(self):\n # initialize a bird to default values.\n self.set_instance_vars()\n\n # randomize some parameters, such as starting height\n self.pos_y = self.random_height()\n\n # tag each bird\n\n self.identifier = Bird.num_birds\n\n # create ai net for each bird\n self.initialize_ai()\n\n # increment Bird counter\n Bird.num_birds += 1\n\n # remember time of birth\n self.birth_time = 0", "def __init__(self):\n self.born = None\n self.sex = None\n self.dnp = None\n self.grid_queue = None\n self.name = None\n \n self.time_of_infection = np.Inf \n self.last_match = -np.Inf\n self.attributes = {}", "def __init__(self):\n self.name = '{0} {1}'.format(choice(stars), choice(self.__class__.planets))\n self.casteOrder = (list(self.__class__.castes))\n shuffle(self.casteOrder)\n self.tech = choice(self.__class__.techTiers)\n self.genesis = choice(self.__class__.genesisReasons)\n self.description = ''\n self.attributes = '{0} ~ ruled by {1} ~ founded to {2}'.format(self.tech, self.casteOrder[0], self.genesis)", "def __init__(self):\n\n self.dialogue_ids = self.__load_dialogue_ids(\"data/dialogue_ids.txt\")\n self.class_dict = self.__load_class_representation(\"data/class_vectors.txt\")", "def __init__(self, vars=None):\n if vars is None:\n vars = {}\n self.vars = vars", "def setUp(self):\n self.new_movie = Movie(1, 'Python is awesome', 'A whole new World', 'khsjha27hbs', 9, 777)", "def __init__(self):\n self.dataset_path = input('Enter the path to the root directory of your dataset:\\n')\n self.classes = [c.lower() for c in os.listdir(self.dataset_path)]\n self.year = str(datetime.datetime.now().year)\n self.kit_path = input(\"Enter the path ot your VOCdevkit directory:\\n\")\n self.annotation_path = self.kit_path + '/VOC' + self.year + '/Annotations'\n self.renamer = data_renamer.DataRenamer(self.dataset_path, self.year)\n self.data_splitter = data_splitter.DataSplitter(self.dataset_path, self.classes, self.year, self.kit_path)\n self.annotation_maker = annotation_maker.AnnotationMaker(self.dataset_path, self.kit_path, self.year,\n self.annotation_path)", "def __init__(self):\n super().__init__(SCREEN_WIDTH, SCREEN_HEIGHT, SCREEN_TITLE)\n self.player_count: int = None\n self.player_hand_0: arcade.SpriteList = None\n self.player_hand_1: arcade.SpriteList = None\n self.deck: arcade.SpriteList = None\n self.pile: arcade.SpriteList = None", "def __init__(self, *args, in_out=\"N\", rate=0):\n for arg in args:\n details=[]\n details.append(in_out)\n details.append(rate)\n self.videos[arg]=details", "def __init__(self):\n #MdvData. __init__(self,model.target_fragments)\n #self.mdv = {}\n self.mdvtc ={}\n self.mode = \"timecourse\"", "def __init__(self, variables):\n self._variables = variables", "def create_movielist():\n # Create the list of movies - let's pick 6\n movielist = []\n # title, box_art, url\n movielist.append(MovieMetadata(\"Toy Story\", \\\n \"I'm from Mattel. Well, I'm not really from Mattel, I'm actually \" \\\n \"from a smaller company that was purchased by Mattel in a leveraged \" \\\n \"buyout.\", \\\n \"http://ia.media-imdb.com/images/M/MV5BMTgwMjI4MzU5N15BMl5BanBnXkFtZ\" \\\n \"TcwMTMyNTk3OA@@._V1_SY317_CR12,0,214,317_AL_.jpg\", \\\n 'https://www.youtube.com/watch?v=KYz2wyBy3kc'))\n movielist.append(MovieMetadata(\"Avatar\", \\\n \"I was hoping for some kind of tactical plan that didn't involve \" \\\n \"martyrdom\", \\\n 'http://ia.media-imdb.com/images/M/MV5BMTYwOTEwNjAzMl5BMl5BanBnXk' \\\n 'FtZTcwODc5MTUwMw@@._V1_SY317_CR0,0,214,317_AL_.jpg', \\\n 'https://www.youtube.com/watch?v=cRdxXPV9GNQ'))\n movielist.append(MovieMetadata(\"The Princess Bride\", \\\n \"When I was your age, television was called books. And this is a \" \\\n \"special book. It was the book my father used to read to me when I \" \\\n \"was sick, and I used to read it to your father. And today I'm gonna\" \\\n \" read it to you.\", \\\n 'http://ia.media-imdb.com/images/M/MV5BMTkzMDgyNjQwM15BMl5BanBnXkFtZ' \\\n 'TgwNTg2Mjc1MDE@._V1_SY317_CR0,0,214,317_AL_.jpg', \\\n 'https://www.youtube.com/watch?v=GNvy61LOqY0'))\n movielist.append(MovieMetadata(\"Serenity\", \\\n \"Shiny. Let's be bad guys.\", \\\n 'http://ia.media-imdb.com/images/M/MV5BMTI0NTY1MzY4NV5BMl5BanBnXkFtZ' \\\n 'TcwNTczODAzMQ@@._V1_SY317_CR0,0,214,317_AL_.jpg', \\\n 'https://www.youtube.com/watch?v=JY3u7bB7dZk'))\n movielist.append(MovieMetadata(\"The Wizard of Speed and Time\", \\\n \"Miss Belair, if you feel compelled to grab part of my body and \" \\\n \"shake it before you can even be friendly, you've got far worse \" \\\n \"problems than you think I have.\", \\\n 'http://ia.media-imdb.com/images/M/MV5BODc3MzA3MDQyN15BMl5BanBnXkFtZ' \\\n 'TYwMzE2MTk5._V1_SX214_AL_.jpg', \\\n 'https://www.youtube.com/watch?v=3ldOTw60Ozg'))\n movielist.append(MovieMetadata(\"Inside Out\", \\\n \"Take her to the moon for me. Okay?\", \\\n 'http://ia.media-imdb.com/images/M/MV5BOTgxMDQwMDk0OF5BMl5BanBnXkFtZ' \\\n 'TgwNjU5OTg2NDE@._V1_SX214_AL_.jpg', \\\n 'https://www.youtube.com/watch?v=yRUAzGQ3nSY'))\n\n return movielist", "def __init__(self, name, typing, reflection, year):\n self.name = name\n self.typing = typing\n self.reflection = reflection\n self.year = year", "def __init__(self,card_name):\n self.mw_card=Cards.Card(card_name)\n self.info=self.mw_card.info\n for key,value in self.info.items():\n self[key]=value\n\n dict.__init__(self.info)\n self.check_info()\n #assign special value\n self.nb_event=int(self.info['mw_run']['2'])\n self.nb_card=self.number_of_P_run()\n self.check_condor()\n self.name=self.take_run_name()\n self.P_listdir,self.MW_listdir=self.detect_SubProcess()\n self.init_run_opt()\n self.def_actif_param()", "def __init__(self, n, t, s):\n\n self.name = n\n self.target = t\n self.start = s\n self.keyframes = []", "def __init__(self, **kwargs):\n\t\tself.vars = kwargs\n\t\tself.old_vars = None", "def __init__(self, init_month, init_day, init_year):\n # add the necessary assignment statements below\n self.month = init_month\n self.day = init_day\n self.year = init_year", "def __init__(self):\n self.monsters_images = pg.sprite.Group()\n self.font_23 = pg.font.Font(prepare.FONTS['Timeless-Bold'], 23)\n self.font_20 = pg.font.Font(prepare.FONTS['Timeless'], 20)\n self.font_18 = pg.font.Font(prepare.FONTS['Timeless'], 18)\n self.bold_font = pg.font.Font(prepare.FONTS['Timeless-Bold'], 17)\n self.font_15 = pg.font.Font(prepare.FONTS['Timeless'], 15)\n\n self.init_left_zone()\n self.init_middle_zone()\n self.init_right_zone()", "def __init__(self, player_name, player_number, player_position):\n self.name = player_name\n self.number = player_number\n self.position = player_position", "def __init__(self,typing,reflection,year):\n self.name = str(self)\n self.typing = typing\n self.reflection = reflection\n self.year = year", "def __init__(self):\n\t\tcherrylog (\"Created Media Object of type: \" + str(self.__class__.__name__))\n\t\tself.mongo_connection = Connection() #This gets closed by the parent destructor\n\t\tself.bettermedia = self.mongo_connection['BetterMedia'] # `BetterMedia` database\n\t\tself.image_collection = self.bettermedia.image # `image` collection\n\t\tself.scene_collection = self.bettermedia.scene\n\t\tself.my_collection = self.bettermedia.video # `video` collection\n\t\tself.attributes = {} #Holds the important data for this object. This will be persisted to the Mongo DB.\n\t\tself.doc_id = \"\" #Holds a reference to the mongo document ID", "def __init__(self, observations, actions, rewards):\n self.observations = observations\n self.actions = actions\n self.rewards = rewards", "def __init__(self):\n self.nombre_roues = 4\n self.nombre_fauteils = 1\n self.moteur = False\n self.volant = True", "def __init__(self):\n self.score = None\n self.avg_score = None\n self.std_dev = None\n self.scores = [] # list containing scores from each episode\n self.avg_scores = [] # list containing average scores after each episode\n self.scores_window = deque(maxlen=100) # last 100 scores\n self.best_avg_score = -np.Inf # best score for a single episode\n self.time_start = time.time() # track cumulative wall time\n self.total_steps = 0 # track cumulative steps taken\n self.writer = SummaryWriter(\"../results/\") # this is where tensorboard results are stored", "def __init__(self, name, isbn, director, actor, tags=None):\n super().__init__(name, isbn, tags)\n self.director = director\n self.actor = actor\n self.resource_type = \"DVD\"", "def __init__(self, name, variable, variable_info):\n self._name = name\n self.var_id = variable\n self.var_period = variable_info[0]\n self.var_type = variable_info[1]\n self.var_detail = variable_info[2]\n self.var_units = variable_info[3]\n self.var_icon = variable_info[4]\n self.var_state = None", "def __init__(self, video_id, base_path, path, rating=Video.P):\n self.video_id = video_id\n self.base_path = base_path\n self.file_path = path\n self.rating = rating", "def __init__(self, maxSize = 100):\n self.data = [None] * maxSize\n self.size = 0\n self.maxSize = maxSize\n\n self._ratingsAdded = [] # A 'private' property to store the ratings of movies already added", "def setMovieDetails(self, mov_dict, soup):\n\n def getPeopleNames(the_list):\n \"\"\" Retrieve a single name or list of names from Director/Writer/Actor in JSON file\n\n :param the_list: List: contains dictionary of persons or others\n :return: List: returns a list of String containing only names.\n \"\"\"\n new_list = []\n if type(the_list) == list:\n for person in the_list:\n if person['@type'] == \"Person\":\n new_list.append(person['name'])\n else:\n new_list.append(the_list['name'])\n return new_list\n\n def getCountry(soup):\n \"\"\" Extracts the country of origin for film by finding it in Details section of page\n\n :param soup: BeautifulSoup object: contains html code for web page\n :return: List: returns a list containing Strings of country/countries the film was produced in\n \"\"\"\n title_details = self.getAdditionalDetails(soup)\n pattern = r'country_of_origin.*?>(.*?)<'\n country = re.findall(pattern, str(title_details))\n return country\n\n self.title = mov_dict['name']\n\n ##### Date published is not film date released #####\n self.date = mov_dict['datePublished']\n self.year = self.date[0:4]\n\n self.duration = mov_dict['duration']\n\n self.age_rating = mov_dict['contentRating']\n\n self.genre = mov_dict['genre']\n\n self.country = getCountry(soup)\n\n directors = mov_dict['director']\n self.director = getPeopleNames(directors)\n\n writers = mov_dict['creator']\n self.writer = getPeopleNames(writers)\n\n actors = mov_dict['actor']\n self.actor = getPeopleNames(actors)\n\n self.imdb_rating = mov_dict['aggregateRating']['ratingValue']\n\n self.num_rates = mov_dict['aggregateRating']['ratingCount']\n\n self.description = mov_dict['description']\n self.keywords = mov_dict['keywords'].split(\",\")", "def __init__(self, env: gym.Env, eval_episodes: int, render_freq: int, \n fps: int, verbose=0):\n super().__init__(verbose=verbose)\n self.env = env\n self.eval_episodes = eval_episodes\n self.render_freq = render_freq\n self.fps = fps", "def __init__(self, name=\"team_name\", races=0, wins=0, drivers_championships_years=[], constructors_championships_years=[]):\n self.name = name\n self.races = races\n self.wins = wins\n self.drivers_championships_years = drivers_championships_years\n self.constructors_championships_years = constructors_championships_years", "def __init__(self, song_name, lyrics, valence):\n self.name = song_name\n self.lyrics = lyrics\n self.valence = valence\n self.lexD = None\n self.emotion = None\n self.sentiment = None\n self.color = None\n self.gloom = None", "def __init__(self, motors, sound_object):\n \n \n self.m = motors\n self.so = sound_object", "def __init__(self):\n\n # Load embeddings index\n self.embeddings = self.load()\n self.console = Console()", "def __init__(self):\n super().__init__()\n self._active = False\n # Counter, used in the animation\n self._time = 0\n # Store the current image id, initially it's 'default'\n self._image = 'default'", "def __init__(self):\n\n self.points = None\n self.centroid_activation_frames = None\n self.noiseless_frames = None\n self.frames = None", "def __init__(self):\n self.x = {}\n self.len = 0\n self.annotations = {}", "def __init__(self, path):\n self.path = path\n self.episode_id = []\n self.episode_set = []\n\n # Track existing dataset if it exists.\n color_path = os.path.join(self.path, 'color')\n if os.path.exists(color_path):\n for fname in sorted(os.listdir(color_path)):\n if '.pkl' in fname:\n num_samples = int(fname[(fname.find('-') + 1):-4])\n self.episode_id += [self.num_episodes] * num_samples\n\n self._cache = dict()\n\n # Only for goal-conditioned Transporters, if we want more goal images.\n self.subsample_goals = False", "def __init__(self, player):\n self.player = player\n player.career.seasons.append(self)\n self.team = player.team\n self.league = self.team.league\n self.year = self.team.cosmos.year", "def __init__(self, make, model, year):\r\n self.make = make\r\n self.model = model\r\n self.year = year", "def movie(movie_title, year=None):\n\n movie_data = Movie.get_movie_data(movie_title, year)\n\n title = movie_title\n plot = movie_data['plot']\n trailer_youtube_url = movie_data['youtube_url']\n poster_image_url = movie_data['poster']\n\n return Movie(title, plot, trailer_youtube_url,\n poster_image_url, movie_data)", "def __init__(self, storer, series, i):\n super(EpisodeWorker, self).__init__()\n self.storer = storer\n self.series = series # All series\n self.i = i", "def __init__(self, **variables):\n vars(self).update(variables)" ]
[ "0.7917893", "0.77630234", "0.768874", "0.766575", "0.7634527", "0.75942045", "0.7591259", "0.758567", "0.75574154", "0.75566876", "0.7539566", "0.75183886", "0.7495097", "0.7468395", "0.7448158", "0.74143714", "0.7385076", "0.73837626", "0.7375816", "0.7372074", "0.7233329", "0.7206931", "0.7127418", "0.70950556", "0.6972417", "0.69668716", "0.6906537", "0.6886535", "0.6880138", "0.6879678", "0.684578", "0.6811509", "0.6801796", "0.6796403", "0.67467636", "0.6699481", "0.66954386", "0.66478366", "0.65559626", "0.6483758", "0.6483758", "0.6474074", "0.6461593", "0.6425597", "0.63487506", "0.6285186", "0.6202022", "0.6199268", "0.6192823", "0.6191053", "0.6182104", "0.6167955", "0.6161975", "0.6134201", "0.6109898", "0.61086994", "0.6089238", "0.608774", "0.6080465", "0.6031", "0.6028011", "0.6019284", "0.6018595", "0.60166144", "0.6006931", "0.59985644", "0.59935606", "0.5964005", "0.59624046", "0.5954321", "0.5942925", "0.5930681", "0.5922383", "0.5919002", "0.59182286", "0.5911214", "0.59048754", "0.58980477", "0.5896664", "0.5891816", "0.58879316", "0.5885337", "0.5884277", "0.5875788", "0.58755714", "0.5873078", "0.5862447", "0.58591604", "0.5857004", "0.585003", "0.58490473", "0.58452433", "0.583337", "0.5827033", "0.5823081", "0.581935", "0.58171374", "0.5816867", "0.580745", "0.5803318" ]
0.77427244
2
Method for opening the trailer in user's browser using the 'webbrowser' module.
def show_trailer(self):
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def open_browser(self):\n\n webbrowser.open(self.trailer_youtube_url)", "def show_trailer():\n webbrowser.open(self.trailer_url)", "def show_trailer(self):\n webbrowser.open(self.trailer_url)", "def show_trailer(self):\r\n webbrowser.open(self.trailer_youtube_url)", "def show_trailer(self):\n webbrowser.open(self.trailerlink)", "def show_trailer(self):\r\n\r\n webbrowser.open(self.trailer_youtube_url)", "def show_trailer(self):\r\n webbrowser.open(self.trailer_youtube_url)", "def show_trailer(self):\r\n webbrowser.open(self.trailer_youtube_url)", "def show_trailer(self):\r\n webbrowser.open(self.trailer_youtube_url)", "def show_trailer(self):\n webbrowser.open(self.trailer_youtube_url) # Open trailer in webbrowser", "def show_trailer(self):\n\t\twebbrowser.open(self.trailer_youtube_url)", "def show_trailer(self):\n\t\twebbrowser.open(self.trailer_youtube_url)", "def show_trailer(self):\n\t\twebbrowser.open(self.trailer_youtube_url)", "def show_trailer(self):\n webbrowser.open(self.trailer_youtube_url)", "def show_trailer(self):\n webbrowser.open(self.trailer_youtube_url)", "def show_trailer(self):\n webbrowser.open(self.trailer_youtube_url)", "def show_trailer(self):\n webbrowser.open(self.trailer_youtube_url)", "def show_trailer(self):\n webbrowser.open(self.trailer_youtube_url)", "def show_trailer(self):\n webbrowser.open(self.trailer_youtube_url)", "def show_trailer(self):\n webbrowser.open(self.trailer_youtube_url)", "def show_trailer(self):\n webbrowser.open(self.trailer_youtube_url)", "def show_trailer(self):\n webbrowser.open(self.trailer_youtube_url)", "def show_trailer(self):\n webbrowser.open(self.trailer_youtube_url)", "def show_trailer(self):\n webbrowser.open(self.trailer_youtube_url)", "def show_trailer(self):\n webbrowser.open(self.trailer_youtube_url)", "def show_trailer(self):\n webbrowser.open(self.trailer_youtube_url)", "def show_trailer(self):\n webbrowser.open(self.trailer_youtube_url)", "def show_trailer (self):\n webbrowser.open (self.trailer_youtube_url)", "def play_trailer(self):\n webbrowser.open(self.youtube_trailer_url)", "def show_trailer(self):\n\n webbrowser.open(self.trailer_youtube_url)", "def open_in_browser(self):\n webbrowser.open(self.url)", "def browse( self ):\n webbrowser.open(self.url())", "def open(url):\r\n webbrowser.open(url)", "def open_browser(url):\n import webbrowser\n webbrowser.open_new(url)", "def open_web_browser(url: str):\n Popen(web_browser + [url], stdout=DEVNULL, stderr=DEVNULL)", "def open(webpage_url):\n\twith youtube_dl.YoutubeDL(dict(forceurl=True)) as ydl:\n\t\tr = ydl.extract_info(webpage_url, download=False)\n\t\tmedia_url = r['formats'][-1]['url']\n\twebbrowser.open('googlechromes://' + media_url[8:] )", "def run(self, url=''):\n if url:\n webbrowser.open(url)", "def open_webpage(browser, url, case, version, package):\n browser_obj = Browser(browser, version, case, package, url)\n if browser == \"firefox\":\n firefox(browser_obj)\n elif browser == \"opera\":\n opera(browser_obj)\n elif package == \"chromium\":\n chromium(browser_obj)\n elif browser == \"ie\":\n iexplorer(browser_obj)\n elif browser == \"edge\":\n edge(browser_obj)", "def browser_open(story_id, arguments):\r\n\r\n story = load_story(story_id, arguments)\r\n\r\n webbrowser.open(story.url)", "def openurl(url):\n\n # Open the URL\n webbrowser.open(url)", "def browser_open(url):\n FNULL = open(os.devnull, 'w')\n subprocess.Popen([udata.browser, url], stdout=FNULL, stderr=subprocess.STDOUT )", "def web_view(self):\n try:\n webbrowser.open(\"https://editor.openeo.org/?server={}\".format(self.backend.url))\n except:\n pass\n # QWebEngineView, QWebView...", "def newwindow(url):\n\n # Open the URL\n webbrowser.open_new(url)", "def open_page(movies, output):\n\n # Output the file\n output_file = open(output, 'w')\n output_file.write(create_page_content(movies))\n output_file.close()\n\n # open the output file in the browser (in a new tab, if possible)\n url = os.path.abspath(output_file.name)\n webbrowser.open('file://' + url, new = 2)", "def _open_browser(self, single_doc_html):\n url = os.path.join(\"file://\", DOC_PATH, \"build\", \"html\", single_doc_html)\n webbrowser.open(url, new=2)", "def _open_browser(self, single_doc_html):\n url = os.path.join(\n \"file://\", DOC_PATH, \"build\", \"html\", single_doc_html\n )\n webbrowser.open(url, new=2)", "def newtab(url):\n\n # Open the URL\n webbrowser.open_new_tab(url)", "def open_doi(doi):\n webbrowser.open_new_tab(DOI_URL % doi)", "def browse():\n rino.browse.open()", "def preview():\n url = \"http://{}:{}\".format(_hostname, _port)\n webbrowser.open(url)", "def open_in_browser(filename):\n subprocess.call([\"firefox\", filename])", "def followlink(self, event):\n webbrowser.open(self.url)", "def __init__(self, movie_title, movie_storyline, poster_image, trailer_youtube):\n\n self.title = movie_title\n self.storyline = movie_storyline\n self.poster_image_url = poster_image\n self.trailer_youtube_url = trailer_youtube\n\n # Define a function to show the movie trailer\n # when a user clicks on the movie poster on the site\n\n \"\"\"\n Initializing instance for opening the youtube video\n\n :return: webbrowser to play thriller\n \"\"\"", "def on_OpenExplorer_clicked(self):\n # TODO: not implemented yet\n #raise NotImplementedError\n\n url=\"http://kfc.matrix.io\"\n\n self.browser.openurl(url)\n self.OnlyDisplay(f\"start {url}\")\n #MATRIXWebutil.open_new(url)\n #MATRIXWebutil.open_new_tab(url)", "def open_web_crawler_window(self, event):\n self.gui.open_web_crawler_window(self.root)", "def open_browser():\n def _open_browser():\n webbrowser.open('http://localhost:%s/%s' % (PORT, FILE))\n pass\n thread = threading.Timer(0.5, _open_browser)\n thread.start()", "def open_link(self) -> None:\n\n webbrowser.open_new(self.link)", "def open_url(name):\n url = localReadConfig.get_webServer(name)\n browser = open_browser()\n browser.get(url)\n return browser", "def open_browser():\n def _open_browser():\n if AIPS_WEBSERVER_HOST == \"localhost\":\n webbrowser.open(WEBSERVER_URL + '/%s' % FILE)\n thread = threading.Timer(0.5, _open_browser)\n thread.start()", "def show_browser(self) -> None:\n\n # set delete = False to avoid early delete when user open multiple plots.\n with NamedTemporaryFile(suffix=\".html\", delete=False) as tmpf:\n save(\n self.to_render,\n filename=tmpf.name,\n resources=CDN,\n title=\"DataPrep.EDA Report\",\n )\n webbrowser.open_new_tab(f\"file://{tmpf.name}\")", "def show(self):\n webopen(str(self))", "def visit_page_with_browser(visit_url):\n logger.warning(\n \"Opening an authorization web page in your browser; if it does not open, \"\n \"please open this URL: %s\",\n visit_url,\n )\n webbrowser.open(visit_url, new=1)", "def open_webpage_classifier_window(self, event):\n self.gui.open_webpage_classifier_window(self.root)", "def handle_new_window(event):\n url = event.GetURL()\n webbrowser.open(url)", "def open_chrome(url,chrome_path):\r\n webbrowser.register('chrome', None,webbrowser.BackgroundBrowser(chrome_path))\r\n webbrowser.get('chrome').open(url)", "def open_in_explorer(file):\n webbrowser.open(os.path.dirname(p['paths'][file]))", "def open(self):\n self.browser = self._browser()\n\n return self.browser", "def aboutThermAP():\n import webbrowser\n url = os.path.join(progpath, \"ThermAP_presentation.pdf\")\n if platform.system() == \"Darwin\":\n webbrowser._browsers['safari'][1].open(url)\n else:\n webbrowser.open(url)", "def open_web_browser(whac_config: WhacConfig) -> None:\n if whac_config.open_web_browser:\n browser = webbrowser.get('chrome')\n browser.open('http://localhost:' + str(whac_config.host_port), new=2, autoraise=True)", "def gotoWeb(self,page:str)->None:\n if page=='repo':\n webbrowser.open('http://github.com/ivan866/readTobiiGlasses')\n elif page=='wiki':\n webbrowser.open('http://github.com/ivan866/readTobiiGlasses/wiki')\n elif page=='glasses2API':\n webbrowser.open('http://tobiipro.com/product-listing/tobii-pro-glasses-2-sdk/')\n elif page=='coordSys':\n webbrowser.open('http://developer.tobiipro.com/commonconcepts.html')", "def webview_file(self, fileid, urlbase=None):\n\n import webbrowser\n\n if urlbase:\n path = urlbase + \"/\" + fileid\n else:\n full = self.root + \"/\" + fileid\n full = re.sub(r\"\\\\\", \"/\", full)\n if \"/childes/\" in full.lower():\n # Discard /data-xml/ if present\n path = re.findall(r\"(?i)/childes(?:/data-xml)?/(.*)\\.xml\", full)[0]\n elif \"eng-usa\" in full.lower():\n path = \"Eng-USA/\" + re.findall(r\"/(?i)Eng-USA/(.*)\\.xml\", full)[0]\n else:\n path = fileid\n\n # Strip \".xml\" and add \".cha\", as necessary:\n if path.endswith(\".xml\"):\n path = path[:-4]\n\n if not path.endswith(\".cha\"):\n path = path + \".cha\"\n\n url = self.childes_url_base + path\n\n webbrowser.open_new_tab(url)\n print(\"Opening in browser:\", url)\n # Pausing is a good idea, but it's up to the user...\n # raw_input(\"Hit Return to continue\")", "def show_poster(self):\n\t\twebbrowser.open(self.poster_image_url)", "def open_in_web(self):\n self._client_api._open_in_web(url=self.platform_url)", "def open_news_url(self, url):\n\n try:\n if not webbrowser.open_new_tab(url):\n raise webbrowser.Error\n except webbrowser.Error:\n print('Unable to open a web browser, try accessing this URL manually instead:\\n{0}'.format(url))", "def mainWebActions(self, **kwargs):\n # If the dictionary item value is the required opens the webpage\n if kwargs['button']=='docs':\n # Only 1 click at every 5 seconds\n self.docs_Button.setDown(True)\n QTimer.singleShot(5000, lambda: self.docs_Button.setDown(False))\n webbrowser.open('https://italorenan.gitbook.io/roc/')", "def open_file_browser(path: str):\n call(file_browser + [path])", "def open_link(self):\n try:\n # webbrowser.open(self.url) # if you are on Windows OS\n webbrowser.get('safari').open_new_tab(self.url) # if you are on Mac OS\n except(AttributeError):\n self.ids.label.text = self.error_msg", "def webBrowser(*args, annotation: Union[AnyStr, bool]=\"\", back: bool=True, backgroundColor:\n Union[List[float, float, float], bool]=None, defineTemplate: AnyStr=\"\", docTag:\n Union[AnyStr, bool]=\"\", dragCallback: Script=None, dropCallback: Script=None,\n enable: bool=True, enableBackground: bool=True, enableKeyboardFocus: bool=True,\n exists: bool=True, find: AnyStr=\"\", forward: bool=True, fullPathName: bool=True,\n height: Union[int, bool]=0, highlightColor: Union[List[float, float, float],\n bool]=None, home: bool=True, isObscured: bool=True, manage: bool=True,\n matchCase: bool=True, matchWholeWorld: bool=True, noBackground: bool=True,\n numberOfPopupMenus: bool=True, openURL: Union[AnyStr, bool]=\"\", parent:\n Union[AnyStr, bool]=\"\", popupMenuArray: bool=True, preventOverride: bool=True,\n reload: bool=True, searchForward: bool=True, statusBarMessage: AnyStr=\"\", stop:\n bool=True, urlChangedCb: AnyStr=\"\", useTemplate: AnyStr=\"\", visible: bool=True,\n visibleChangeCommand: Union[Script, bool]=None, width: Union[int, bool]=0, wrap:\n bool=True, q=True, query=True, e=True, edit=True, **kwargs)->Union[AnyStr, Any]:\n pass", "def open(self, index):\n\n index = int(index.strip())\n index -= 1\n section = self.program.state.last_viewed\n storyid = getattr(self.program.state, section)[index]\n data = self.program.state.stories[storyid]\n webbrowser.open(data['url'])", "def browser(self):\n return", "def open_movies_page(movies):\n # Create or overwrite the output file\n output_file = open('fresh_tomatoes.html', 'w')\n directors_list = create_dropdown_list_directors(movies)\n years_list = create_dropdown_list_years(movies)\n movies.sort(key=lambda x: x.rating, reverse=True)\n # Replace the movie tiles placeholder generated content\n content = create_movie_tiles_content(movies)\n rendered_content = main_page_content.format(\n movie_tiles= content,\n years = years_list,\n directors = directors_list)\n\n \n # Output the file\n output_file.write(main_page_head + rendered_content+main_page_cont)\n output_file.close()\n\n # open the output file in the browser (in a new tab, if possible)\n url = os.path.abspath(output_file.name)\n webbrowser.open('file://' + url, new=2)", "def _about_dialogue(self):\n webbrowser.open('https://github.com/ldrumm/yubikey-totp-gui')", "def open_browser_window(url):\n logger.debug('about to open url \"{url}\" in browser \"{browser}\"'.format(url=url, browser=BROWSER_NAME))\n browser = webbrowser.get(BROWSER_NAME)\n browser.open(url, new=(1 if BROWSER_NEW_WINDOW else 2))", "def open_url(url):\n logger.debug('Opening %s', url)\n _stderr = os.dup(2)\n os.close(2)\n _stdout = os.dup(1)\n os.close(1)\n fd = os.open(os.devnull, os.O_RDWR)\n os.dup2(fd, 2)\n os.dup2(fd, 1)\n try:\n webbrowser.open(url)\n finally:\n os.close(fd)\n os.dup2(_stderr, 2)\n os.dup2(_stdout, 1)", "def show_browser(self) -> None:\n\n # set delete = False to avoid early delete when user open multiple plots.\n with NamedTemporaryFile(suffix=\".html\", delete=False) as tmpf:\n pass\n with open(tmpf.name, \"w\") as file:\n file.write(self.template_base.render(context=self.context))\n webbrowser.open_new_tab(f\"file://{tmpf.name}\")", "def open(self, event=None, url=None):\n url = url or self.server.url\n try:\n import webbrowser\n webbrowser.open(url)\n except ImportError: # pre-webbrowser.py compatibility\n if sys.platform == 'win32':\n os.system('start \"%s\"' % url)\n elif sys.platform == 'mac':\n try:\n import ic\n ic.launchurl(url)\n except ImportError: pass\n else:\n rc = os.system('netscape -remote \"openURL(%s)\" &' % url)\n if rc: os.system('netscape \"%s\" &' % url)", "def openSite(url):\n\timport webbrowser\n\twebbrowser.open('http://www.' + url + '.com', 2)", "def cli(repo, milestone):\n webbrowser.open_new(repo.milestone(milestone).data[\"html_url\"])", "def createWindow(self, type):\n # this = Browser(self.url())\n # this.show()\n\n self.popup = SequanixQWebView(**self.kwargs)\n self.popup.setObjectName(\"web_content\")\n self.popup.setWindowTitle(\"Sequana browser\")\n self.popup.page().windowCloseRequested.connect(self.popup.close)\n self.popup.show()\n return self.popup", "def browse(self):\n print(\"browse \" + self.tb_url.text())\n url = QtCore.QUrl.fromUserInput(self.tb_url.text())\n print(str(url))\n# self.html.setUrl(url)\n self.html.load(url)", "def view_file(self, hash):\n path = self.tree.find(hash)\n if sys.platform == 'darwin':\n # The webrowser module uses Preview for pdf files and Preview sets\n # the quarantine xattr whenever it is opens a file. So far, it\n # seems to work to just clear the quarantine xattr before opening\n # the file.\n subprocess.call(['xattr', '-c', path])\n webbrowser.open_new_tab('file://%s'%path)", "def open_htm(self):\n import webbrowser\n\n html, *_ = self.simulation_dir.files(\"*.htm\")\n\n webbrowser.open(html.abspath())", "def go_to_url(self, url):\n if self.browser is not None:\n self.browser.get(url)\n else:\n print('Browser is not running')", "def setup(self, url, browser_config):\n\n # navigate to the front page\n browser.open_url(url)", "def open_link(self):\n try:\n webbrowser.open(self.url)\n except:\n self.ids.link.text=self.link_message", "def open_browser():\n browser = webdriver.Chrome()\n # 绐楀彛鏈�ぇ鍖�\n browser.maximize_window()\n return browser", "def runBrowser(driver, url):\n\tdriver.get(url)\n\ttime.sleep(3) #REACT app need to sleep and wait app load.\n\tall_links=driver.execute_script('all_links = []; links = document.querySelectorAll(\".style-module--action--1Avvt>a\"); links.forEach(url => all_links.push(url.href)); return all_links');\n\tbar = IncrementalBar('📥 Icons Downloaded', max = len(all_links))\n\t\n\tfor i, link in enumerate(all_links):\n\t\tdriver.execute_script('''window.open(\"'''+link+'''\",\"_blank\");''')\n\t\tbar.next()\n\tprint('\\n')\n\tdriver.close()\n\tMessage.success('🎉 Download done!')", "def button1_press(self):\n\n ext = nuke_link(str(self.lineEdit.text()))\n url = 'https://learn.foundry.com/nuke/developers/70/pythonreference/{}'.format(ext)\n webbrowser.open(url)", "def open_movies_page(movies):\n # Replace the placeholder for the movie tiles with the actual dynamically generated content\n movie_tiles = create_movie_tiles_content(movies)\n\n # Wrap the header and footer content around the movie tiles\n with open('templates/header.html', 'r') as header, open('templates/footer.html', 'r') as footer:\n content = header.read() + movie_tiles + footer.read()\n\n # Create or overwrite the output file\n with open('index.html', 'w') as output_file:\n output_file.write(content)\n\n # open the output file in the browser\n url = os.path.abspath(output_file.name)\n webbrowser.open('file://' + url, new=2) # open in a new tab, if possible", "def perform_as(self, the_actor: Actor) -> None:\n browser = the_actor.ability_to(BrowseTheWeb).browser\n if self.target is None:\n browser.switch_to.default_content()\n else:\n browser.switch_to.frame(self.target.found_by(the_actor))", "def __init__(self, newbrowser=None):\n # Initialize Cookies\n CHandler = urllib2.HTTPCookieProcessor(cookielib.CookieJar())\n self.newbrowser = urllib2.build_opener(CHandler)\n self.newbrowser.addheaders = [\n ('User-agent', 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:19.0) Gecko/20100101 Firefox/19.0')]\n urllib2.install_opener(self.newbrowser)\n self.error_dict={} # to be returned by get_html if any thing goes wrong" ]
[ "0.81188226", "0.79440135", "0.7848126", "0.7815112", "0.77381235", "0.7584801", "0.754971", "0.754971", "0.754971", "0.75377965", "0.7463172", "0.7463172", "0.7463172", "0.7461554", "0.7461554", "0.7461554", "0.7461554", "0.7461554", "0.7461554", "0.7461554", "0.7461554", "0.7461554", "0.7461554", "0.7461554", "0.7461554", "0.7461554", "0.7461554", "0.74502826", "0.7416435", "0.7398521", "0.71061224", "0.7092521", "0.70571053", "0.68372864", "0.67472315", "0.67287034", "0.6572051", "0.6536375", "0.65328586", "0.651573", "0.6459828", "0.6354419", "0.6317341", "0.62198246", "0.6219379", "0.61938614", "0.619036", "0.61699057", "0.61084837", "0.6105659", "0.60900897", "0.60884035", "0.6064078", "0.6040811", "0.59897673", "0.5976517", "0.5936283", "0.5915765", "0.58957654", "0.5892107", "0.5889887", "0.58842283", "0.5877391", "0.58710486", "0.5866525", "0.5843475", "0.58383626", "0.58121777", "0.580337", "0.5803035", "0.5778521", "0.5754821", "0.57348394", "0.57175285", "0.5716547", "0.5707277", "0.56868917", "0.5667401", "0.565487", "0.56302184", "0.56272644", "0.561164", "0.5589592", "0.5589188", "0.5588398", "0.5554671", "0.55399835", "0.5537873", "0.55253464", "0.5525291", "0.55220616", "0.5519966", "0.54638356", "0.5462013", "0.54518944", "0.54406136", "0.5436121", "0.54238784", "0.54158074", "0.541025", "0.5409577" ]
0.0
-1
Returns source that matches the user provided source_id or display_name.
def ExtractMatchingSourceFromResponse(response, args): for source in response: if ((args.source and source.name.endswith(args.source)) or (args.source_display_name and source.displayName == args.source_display_name)): return source raise core_exceptions.Error( "Source: %s not found." % (args.source if args.source is not None else args.source_display_name))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def find_source(self, name):\n t = filter( lambda x: x.name==name, self.point_sources+self.extended_sources)\n return t[0] if len(t)==1 else None", "def get_source(source_name):\n if source_name == \"SCHOLAR_CENSUS\":\n from mec_data.source.scholar import ScholarSource\n\n return ScholarSource()\n elif source_name == \"UNIVERSITY_CENSUS\":\n from mec_data.source.university import UniversitySource\n\n return UniversitySource()", "def getSourceForId(context, identifier):\n nearest = getattr(context, identifier, None)\n if IExternalSource.providedBy(nearest):\n return nearest\n return None", "def get_source_name(self, source_id: str) -> str:\n if not self._source_list_map:\n return \"\"\n if source_id.upper() == DIGITAL_TV.upper():\n source_id = \"dtv\"\n for map_value in self._source_list_map:\n map_id = map_value.get(\"id\")\n if map_id and map_id == source_id:\n return map_value.get(\"name\", \"\")\n return \"\"", "def source(self):\n for source in self.coordinator.data.sources:\n if source.SourceID == self.zone.SourceID:\n return source.Name\n return None", "def by_source(self, source):\n return self.filter(source_object=source)", "def source_id(self) -> Optional[str]:\n return pulumi.get(self, \"source_id\")", "def get_source(self, name):\n return self._sources[name]", "def get_source(self) -> Optional[SourceIdentifierParameter]:\n return self.get_param_by_type(SourceIdentifierParameter)", "def get_source(self, source_name: str) -> Optional[Tag]:\n # sanitize the name, this will also add index if there isn't one\n source_name, *_ = Tags._sanitize_tag(source_name, 0, 0)\n return next(self.tags.filter(name=source_name, valid=None), None)", "def _get_source(self, uri: str) -> Optional[_Source]:\n\n for source in self._sources:\n if uri == source.uri:\n return source\n\n return None", "def source(self) -> Optional[str]:\n return pulumi.get(self, \"source\")", "def source():\n\n source = models.Source(name=u\"Joe's Funerals.com\", url=u\"http://www.joesfunerals.com\")\n return source", "def get_source(self) -> Optional[str]:\n return self._source", "def find_data_source_url(a_name, url_prefs):\n for row in url_prefs:\n if row[0] == a_name:\n return row[1]\n return None", "def get_media_source_id(self, source_name):\n\t\tvalidation.required(source_name, 'source_name')\n\n\t\treturn self.media_sources.get(source_name, 1)", "def _get_source(self, source_id):\n logging.debug(\"Getting entity for source_id %s\", source_id)\n if self.trace.has_item(source_id):\n return self.trace.get_item(source_id)\n\n source_components = source_id.split('/')\n if len(source_components) not in [2, 4]:\n logging.error(\n \"Expecting source with either 2 or 4 components, got %s\",\n source_id)\n return None\n\n if not re.match(\"[0-9]+\", source_components[0]):\n logging.error(\n \"Expecting source beginning with item ID, got %s\",\n source_components[0])\n return None\n\n if len(source_components) == 2:\n source_item_id = source_components[0]\n well = source_components[1]\n\n pattern = r\"\\[\\[([0-9]+),[ \\t]*([0-9]+)\\]\\]\"\n match = re.match(pattern, source_components[1])\n if match:\n well = well_coordinates(int(match[1]), int(match[2]))\n\n elif len(source_components) == 4:\n source_item_id = source_components[1]\n well = source_components[3]\n\n source_item_entity = self.factory.get_item(item_id=source_item_id)\n\n if not source_item_entity.is_collection():\n msg = \"Ignoring source part %s from non-collection %s\"\n logging.info(msg, well, source_item_id)\n return source_item_entity\n\n source_part_entity = self.factory.get_part(\n collection=source_item_entity,\n well=well\n )\n\n return source_part_entity", "def getSourceName(self, instance):\n mapping = IAnnotations(instance).setdefault(\n 'collective.table',\n PersistentMapping()\n )\n return mapping.get('source_name', self.defaultSourceName)", "def data_source_display_name(self, data_source_display_name):\n\n self._data_source_display_name = data_source_display_name", "def get_source(self, source, driver_name=None):\n if not driver_name:\n driver_name = self.driver_name\n driver = ogr.GetDriverByName(driver_name)\n return driver.Open(source, 0)", "def get_source(self) -> CopyPasteSource:\n mode = self.new_or_existing.currentIndex()\n\n if mode == 0:\n name = self.name_widget.name.text()\n\n description = self.name_widget.description.text()\n if not description:\n description = None\n\n return self.source.create_source(self.context, name, description)\n\n sources = self.table.get_selected_sources()\n\n return sources[0]", "async def async_select_source(self, source):\n source = next((g[\"id\"] for g in self._galleries if g[\"name\"] == source), None)\n if source is None:\n _LOGGER.warning(\"Source %s not found\", source)\n await self.local_meural.send_change_gallery(source)", "def show_source_page(sourceid=None):\n uuid = request.args.get(\"uuid\", sourceid)\n if not uuid:\n return redirect(url_for(\"virhesivu\", code=1, text=\"Missing Source key\"))\n u_context = UserContext(user_session, current_user, request)\n try:\n with SourceReader(\"read\", u_context) as service:\n # reader = SourceReader(readservice, u_context)\n res = service.get_source_with_references(uuid, u_context)\n\n if res[\"status\"] == Status.NOT_FOUND:\n msg = res.get(\"statustext\", _(\"No objects found\"))\n flash(msg, \"error\")\n if res[\"status\"] != Status.OK:\n flash(f'{res.get(\"statustext\", _(\"error\"))}', \"error\")\n\n stk_logger(\n u_context, f\"-> bp.scene.routes.show_source_page n={len(res['citations'])}\"\n )\n\n except KeyError as e:\n msg = f\"bp.scene.routes.show_source_page: {e.__class__.__name__} {e}\"\n flash(f'{ _(\"Program error\")}', \"error\")\n logger.error(msg)\n\n # for c in res.citations:\n # for i in c.citators:\n # if i.id[0] == \"F\": print(f'{c} – family {i} {i.clearname}')\n # else: print(f'{c} – person {i} {i.sortname}')\n return render_template(\n \"/scene/source_events.html\",\n source=res[\"item\"],\n citations=res[\"citations\"],\n user_context=u_context,\n )", "def get_source(self):\n return self.source", "def source(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"source\")", "def source_id(self) -> str:\n return self._source_id", "def source_id(self) -> str:\n return self._source_id", "def grab_external_id(stix_object, source_name):\n for external_reference in stix_object.get(\"external_references\", []):\n if external_reference.get(\"source_name\") == source_name:\n return external_reference[\"external_id\"]", "def _fetch_source_objects(source_objects, source_type, name=None):\n try:\n nodes = source_objects[0].nodes\n for node in nodes:\n if node.get(\"nodes\", []):\n nodes.extend(node[\"nodes\"])\n else:\n if node[\"protectionSource\"][\"vmWareProtectionSource\"][\n \"type\"] == source_type:\n obj_name = node[\"protectionSource\"][\"name\"]\n if not name:\n return node[\"protectionSource\"][\"id\"]\n elif name and name == obj_name:\n return node[\"protectionSource\"][\"id\"]\n except APIException as err:\n return str(err)", "def source_identifier(self) -> str:\n return pulumi.get(self, \"source_identifier\")", "def source_identifier(self) -> str:\n return pulumi.get(self, \"source_identifier\")", "def sourcename(user_id):\n return SourceIndex.instance().name(user_id)", "def source_id(self):\n return self._source_id", "def _single_data_source(self) -> DataSource:\n data_source = None\n for meta_column in self._meta_columns:\n if data_source is None:\n data_source = meta_column.data_source\n elif data_source is not meta_column.data_source:\n raise SomeError('Mixed data sources are not supported')\n if data_source is None:\n raise SomeError('The column list provides no data source')\n return data_source", "def give_source(self):\n has_src, src_sobj = self.get_sobj().ReferencedObject()\n if has_src:\n return self.__class__(self._std, self._bld, src_sobj.GetID())", "def get_from_sources(self,index,doc_type,document_id):\n return self.sources.get(index, {}).get(doc_type, {}).get(document_id,{})", "def source_resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"source_resource_id\")", "def source_resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"source_resource_id\")", "def get_source(self):\n\t\treturn self.source.get_source()", "def get_source_class_id(self, class_id, source):\n info = self.class_info[class_id]\n assert info['source'] == source\n return info['id']", "def get_source_class_id(self, class_id, source):\n info = self.class_info[class_id]\n assert info['source'] == source\n return info['id']", "def source_type(self) -> Optional[str]:\n return pulumi.get(self, \"source_type\")", "def card_selSource(self, **kwargs):\n if \"cardsrc\" in kwargs:\n if kwargs[\"cardsrc\"] == \"hand\":\n selectfrom = self.piles[Piles.HAND]\n elif kwargs[\"cardsrc\"] == \"played\":\n selectfrom = self.piles[Piles.PLAYED]\n elif kwargs[\"cardsrc\"] == \"discard\":\n selectfrom = self.piles[Piles.DISCARD]\n else:\n selectfrom = kwargs[\"cardsrc\"]\n else:\n selectfrom = self.piles[Piles.HAND]\n return selectfrom", "def Source(self):\r\n\t\treturn self._get_attribute('source')", "def source(self) -> str:\n return pulumi.get(self, \"source\")", "def source(self) -> str:\n return pulumi.get(self, \"source\")", "def source(self) -> str:\n return pulumi.get(self, \"source\")", "def source(self) -> str:\n return pulumi.get(self, \"source\")", "def source(self) -> str:\n return pulumi.get(self, \"source\")", "def source(self) -> str:\n return pulumi.get(self, \"source\")", "def get_source_po(self,name):\n sources = self._source_POs()\n\n for po in sources:\n if name in po.params():\n return po\n\n\n raise AttributeError(self._attr_err_msg(name,sources))", "def by_external_id_and_provider(cls, external_id, provider_name, db_session=None):\n db_session = get_db_session(db_session)\n query = db_session.query(cls.model)\n query = query.filter(cls.model.external_id == external_id)\n query = query.filter(cls.model.provider_name == provider_name)\n return query.first()", "def getSourceForRecipesByUser(cls, user=None): \n\n if user:\n sources = (db.session.query(Recipe.source).join(RecipeUser).\\\n filter(Recipe.recipe_id == RecipeUser.recipe_fk).\\\n filter(RecipeUser.user_fk == user).\\\n filter(Recipe.source != None).\\\n distinct().order_by(Recipe.source))\n else:\n sources = (db.session.query(Recipe.source).\\\n filter(Recipe.source != None).\\\n distinct().order_by(Recipe.source))\n\n \n return sources", "def process_source(self):\n source_col = getattr(self.model_cls, self.source)\n return source_col", "def load_data_source(data_source):\n source_module = __import__('source_'+data_source)\n get_source = getattr(source_module, 'get_source')\n return get_source()", "def friendly_source_name(name):\n known_names = dict(\n blastprodom=\"BlastProDom\",\n fprintscan=\"FPrintScan\",\n gene3d=\"Gene3D\",\n hamap=\"HAMAP\",\n hmmpir=\"HMMPIR\",\n hmmpanther=\"HMMPanther\",\n hmmpfam=\"HMMPfam\",\n hmmsmart=\"HMMSmart\",\n hmmtigr=\"HMMTIGR\",\n patternscan=\"PatternScan\",\n profilescan=\"ProfileScan\",\n superfamily=\"SUPERFAMILY\"\n )\n return known_names.get(name.lower(), name)", "def get_virtualsource(self, name):\n return self._auraliser.get_object(name)", "def get_user_by_display_name(self, display_name: str) -> typing.Optional[User]:\n query_params = {\n \"$select\": \",\".join(\n [\"displayName\", \"id\", \"mail\", \"department\", \"companyName\"]\n ),\n \"$search\": f'\"displayName:{display_name}\"',\n \"$top\": 1,\n \"$orderby\": \",\".join([\"displayName\"]),\n }\n headers = {\n \"ConsistencyLevel\": \"eventual\",\n }\n\n request = self._prepare_request(\n method=\"get\",\n resource_path=\"users\",\n query_params=query_params,\n headers=headers,\n )\n with requests.Session() as session:\n response = session.send(request=request)\n try:\n response.raise_for_status()\n except requests.exceptions.HTTPError as exception:\n if response.status_code == 400:\n return None\n raise exception\n users = response.json()\n\n return User.from_dict(**users[\"value\"][0]) if users.get(\"value\") else None", "def source(self) -> Optional[pulumi.Input[Union['BuildResultUserSourceInfoArgs', 'CustomContainerUserSourceInfoArgs', 'JarUploadedUserSourceInfoArgs', 'NetCoreZipUploadedUserSourceInfoArgs', 'SourceUploadedUserSourceInfoArgs', 'UploadedUserSourceInfoArgs']]]:\n return pulumi.get(self, \"source\")", "def _GetVisitSource(self, visit_identifier, cache, database):\n sync_cache_results = cache.GetResults('sync')\n if not sync_cache_results:\n result_set = database.Query(self._SYNC_CACHE_QUERY)\n\n cache.CacheQueryResults(result_set, 'sync', 'id', ('source',))\n sync_cache_results = cache.GetResults('sync')\n\n if sync_cache_results and visit_identifier:\n results = sync_cache_results.get(visit_identifier, None)\n if results:\n return results[0]\n\n return None", "def get_user_readable(user_id, display_format):\n s = ''\n try:\n user = User.objects.get(id=user_id)\n\n if display_format == 'full_name':\n s = \"{} {}\".format(user.first_name, user.last_name)\n elif display_format == 'username':\n s = user.username\n elif display_format == 'first_name':\n s = user.first_name\n elif display_format == '*':\n s = '{} {} ({})'.format(user.first_name, user.last_name, user.username)\n except User.DoesNotExist:\n pass\n\n return s", "def get_source_type(import_file, source_type=''):\n\n # TODO: move source_type to a database lookup. Right now it is hard coded\n source_type_str = getattr(import_file, 'source_type', '') or ''\n source_type_str = source_type or source_type_str\n source_type_str = source_type_str.upper().replace(' ', '_')\n\n return getattr(models, source_type_str, ASSESSED_RAW)", "def source_owner(self) -> str:\n return pulumi.get(self, \"source_owner\")", "def source_owner(self) -> str:\n return pulumi.get(self, \"source_owner\")", "def _extract_first_from(name, sources):\n for i, source in enumerate(sources):\n if not source:\n continue\n if name in source:\n return (i, source[name])\n raise KeyError(name)", "def user_by_external_id_and_provider(\n cls, external_id, provider_name, db_session=None\n ):\n db_session = get_db_session(db_session)\n query = db_session.query(cls.models_proxy.User)\n query = query.filter(cls.model.external_id == external_id)\n query = query.filter(cls.model.provider_name == provider_name)\n query = query.filter(cls.models_proxy.User.id == cls.model.local_user_id)\n return query.first()", "def source(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"source\")", "def external_id_source(self):\n return self._external_id_source", "def getUserShow(self, **kwargs):\n screen_name = kwargs.get('screen_name')\n user_id = kwargs.get('user_id')\n include_entities = kwargs.get('include_entities')\n \n return self.getUsersLookup(screen_name=screen_name, user_id=user_id, include_entities=include_entities)", "def set_source(self, source_name):\n self.source = source_name", "def getNewsSourceUri(self, sourceName: str, dataType: Union[str, List[str]] = [\"news\", \"pr\", \"blog\"]):\n matches = self.suggestNewsSources(sourceName, dataType = dataType)\n if matches != None and isinstance(matches, list) and len(matches) > 0 and \"uri\" in matches[0]:\n return matches[0][\"uri\"]\n return None", "def get_subsource(self, name):\n return self._auraliser.get_object(name)", "def external_source(self):\n if \"externalSource\" in self._prop_dict:\n if isinstance(self._prop_dict[\"externalSource\"], OneDriveObjectBase):\n return self._prop_dict[\"externalSource\"]\n else :\n self._prop_dict[\"externalSource\"] = EducationExternalSource(self._prop_dict[\"externalSource\"])\n return self._prop_dict[\"externalSource\"]\n\n return None", "def sources(source):\n\n source2 = models.Source(name=u\"Bob's Funerals.com\", url=u\"http://www.bobsfunerals.com\")\n source3 = models.Source(name=u\"Jim's Funerals.com\", url=u\"http://www.jimsfunerals.com\")\n return (source, source2, source3)", "def get(self, cls, name, data_source, **attr):\n #ds = self._default_DataSource if data_source is None else data_source\n if data_source is None:\n cache = self._cache.setdefault(cls, {})\n else:\n cache = self._cache.setdefault(data_source._id, {}).setdefault(cls, {})\n\n try:\n return cache[name]\n except KeyError:\n if cls in ['Neuron', 'NeuronFragment', 'NeuronAndFragment', 'Synapse', 'InferredSynapse']:\n q = self._find(cls, data_source, uname = name)\n else:\n q = self._find(cls, data_source, name = name)\n if len(q) == 1:\n obj = q.node_objs[0]\n if data_source is None:\n tmp = q.owned_by(cls = 'DataSource', cols = '@rid')\n if len(tmp) == 1:\n ds_rid = list(tmp.nodes)[0].oRecordData['rid'].get_hash()\n self.set(cls, name, obj, ds_rid)\n elif len(tmp) > 1:\n raise ValueError('unexpected more than 1 DataSource found')\n else:\n self.set(cls, name, obj, None)\n else:\n self.set(cls, name, obj, None)\n elif len(q) > 1:\n raise DuplicateNodeError('Hit more than one instance of {} with name {} in database.'.format(cls, name))\n else:\n raise RecordNotFoundError('{} {} not found in database.'.format(cls, name))\n return obj", "def get(cls, external_id, local_user_id, provider_name, db_session=None):\n db_session = get_db_session(db_session)\n return db_session.query(cls.model).get(\n [external_id, local_user_id, provider_name]\n )", "def image_reference(self, image_id):\n info = self.image_info[image_id]\n if info['source'] == 'local':\n return info['source']\n else:\n super(self.__class__).image_reference(self, image_id)", "def data_source(self, label):\r\n return datasource.Datasource(self.apikey_or_username, label)", "def source(self):\n return some.dap.source(\"<string>\")", "def getSource():", "def getSubmitter(self, source):\n submitterStart = source.find('user?id=')\n realSubmitterStart = source.find('=', submitterStart) + 1\n submitterEnd = source.find('\"', realSubmitterStart)\n return source[realSubmitterStart:submitterEnd]", "def _source_filter(self):\n param_id = self._detect_source_params()\n cls_str = self._detect_source_param_class(param_id)\n if cls_str is None:\n raise ProfileError(\"parameter '%s' isn't defined in config\" %\n param_id)\n else:\n self.logger.debug(\"==> source objects class is '%s'\" % cls_str)\n cls = globals()[cls_str]\n instance = cls(param_id, \"$\", self.config[\"source\"][\"objects\"])\n return instance.get_pattern()", "def get_source_player(\n self, playertype: Type[PlayerType]) -> Optional[PlayerType]:\n player: Any = self._source_player\n return (player if isinstance(player, playertype) and player.exists()\n else None)", "def source(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"source\")", "def key_source(self) -> Optional[str]:\n return pulumi.get(self, \"key_source\")", "def lookupUser_byID(self, user_id):\n sql = \"SELECT * FROM Users WHERE id='%s'\"\\\n % (user_id)\n res = self.execute(sql)\n reslist = res.fetchall()\n if reslist == []:\n return None\n else:\n return reslist[0]", "def setSourceName(self, instance, value):\n mapping = IAnnotations(instance).setdefault(\n 'collective.table',\n PersistentMapping()\n )\n mapping['source_name'] = value", "def _source(self, namespace):\n if not namespace:\n source = self._default_source\n else:\n source = self._sources.get(namespace)\n if not source:\n raise GroupResolverSourceError(namespace or \"<default>\")\n return source", "def model_for_source_url(self, url):\n if 'cities_reduced_fat/city/' in url:\n return City\n elif 'cities_reduced_fat/country/' in url:\n return Country", "def getSource(self):\n return self.source", "def __get_connector(source: str):\n lower_source = source.lower()\n mapping = {\n 'cortx': cortx_connector,\n 'fhir': fhir_connector\n }\n\n if lower_source not in mapping.keys():\n abort(400, 'Unknown source - only CORTX and FHIR available')\n\n return mapping[lower_source]", "def AcquisitionSource(self, default={}):\n tmp = self.data.get('metadata', {}).get('acquisition_source', default)\n return HEP.AcquisitionSourceObject(tmp)", "def get_order_source_by_id(self, order_source_id, **kwargs):\n\n all_params = ['order_source_id']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method get_order_source_by_id\" % key\n )\n params[key] = val\n del params['kwargs']\n\n # verify the required parameter 'order_source_id' is set\n if ('order_source_id' not in params) or (params['order_source_id'] is None):\n raise ValueError(\"Missing the required parameter `order_source_id` when calling `get_order_source_by_id`\")\n\n resource_path = '/beta/orderSource/{orderSourceId}'.replace('{format}', 'json')\n path_params = {}\n if 'order_source_id' in params:\n path_params['orderSourceId'] = params['order_source_id']\n\n query_params = {}\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type([])\n\n # Authentication setting\n auth_settings = ['api_key']\n\n response = self.api_client.call_api(resource_path, 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='OrderSource',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def source_uri(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"source_uri\")", "def service_by_source(self,sourcefile,servicename):\n\t\tif (sourcefile,servicename) in self.services:\n\t\t\treturn self.services[(sourcefile,servicename)]\n\t\treturn None", "def is_datasource_for(cls, **kwargs):\n if 'source' in kwargs.keys():\n if kwargs.get('source', ''):\n return kwargs.get('source', '').lower().startswith(cls._source)\n if 'meta' in kwargs.keys():\n return kwargs['meta'].get('TELESCOP', '').startswith('GOES')", "def source(self):\n if self._source not in ['Idle', 'Network']:\n return self._source\n else:\n return None", "def find_source(alt_az, lat_lon=local_latlong,\n minute=minute, hour=hour,\n day=day, month=month, year=year, tz_offset=5,\n return_all_sources=True):\n ra_dec = altaz_to_radec(alt_az, pos=lat_lon,\n minute=minute, hour=hour, day=day,\n month=month, year=year, tz_offset=5)\n\n coords = coordinates.SkyCoord(ra=ra_dec[0], dec=ra_dec[1],\n unit=(u.deg, u.deg), frame='icrs')\n # Get the actual results\n # For some reason, if this goes too big it stops seeing the actual source.\n r = 500 * u.arcminute\n results = Vizier.query_region(coords, radius=r, catalog='V/50')[0]\n df = results.to_pandas()\n\n candidate_sources = filter(None, [n for n in df['HD']])\n sources = []\n dmax, vmax = 0, 0\n for s in candidate_sources:\n source_info = df.loc[df['HD'] == s]\n name = source_info['Name']\n mag = round(float(source_info['Vmag']), 2)\n\n temp_ra = source_info['RAJ2000'].tolist()[0]\n temp_dec = source_info['DEJ2000'].tolist()[0]\n source_ra_hms = tuple(map(float, temp_ra.split()))\n source_dec_dms = tuple(map(float, temp_dec.split()))\n source_ra = Angle(source_ra_hms, unit='hourangle').degree\n source_dec = Angle(source_dec_dms, unit=u.deg).degree\n\n dist_from_center = np.sqrt((source_ra - ra_dec[0])**2 +\n (source_dec - ra_dec[1])**2)\n\n score = float(c1 * mag + c2 * dist_from_center)\n source_dict = {'HD': source_info['HD'].values[0],\n 'Name': source_info['Name'].values[0],\n 'RA': source_ra,\n 'DEC': source_dec,\n 'Distance': dist_from_center,\n 'Vmag': source_info['Vmag'],\n 'Score': score}\n\n sources.append(source_dict)\n\n dmax = dist_from_center if dist_from_center > dmax else dmax\n vmax = mag if mag > vmax else mag\n\n for s in range(len(sources)):\n d = sources[s]['Distance']/dmax\n mag = sources[s]['Vmag'].values[0]/vmax\n score = c1 * mag + c2 * d\n sources[s]['Score'] = score\n sources[s]['Scaled-Distance'] = d\n sources[s]['Scaled-Mag'] = mag\n\n sources_df = pd.DataFrame(sources)\n\n\n # Note that this loop is supremely janky, but df.loc'ing wasn't working.\n # best_source = sources_df.loc[sources_df['Score'] == sources_df['Score'].min]\n best_source_idx = 0\n # best_score = np.array([])\n best_score = 10000\n for i in range(len(sources)):\n score = sources[i]['Score']\n if score < best_score:\n best_source_idx = i\n best_score = score\n\n name = sources_df['Name'].values[0]\n out = {'Coords': ra_dec,\n 'HD-Name': 'HD' + str(int(sources[best_source_idx]['HD'])),\n 'Name': sources[best_source_idx]['Name'],\n 'Score': sources[best_source_idx]['Score'],\n 'Scaled-Distance': sources[best_source_idx]['Scaled-Distance'],\n 'Scaled-Mag': sources[best_source_idx]['Scaled-Mag']\n }\n return out", "def sources(self, video_display_name=None):\r\n sources_selector = self.get_element_selector(video_display_name, CSS_CLASS_NAMES['video_sources'])\r\n return self.q(css=sources_selector).map(lambda el: el.get_attribute('src').split('?')[0]).results", "def search_source(self,strz):\n\t\tfor src in sources_rip: #sources_rip = list of allow source words\n\t\t\tif src in strz:\n\t\t\t\tself.src_rip=src.replace(\".\",\"\")\n\t\t\t\treturn strz.replace(src,\"\")\n\t\treturn strz" ]
[ "0.6412398", "0.6283802", "0.62611306", "0.6204275", "0.6198703", "0.6188179", "0.608557", "0.5972765", "0.5959811", "0.59588695", "0.594499", "0.58315796", "0.5799471", "0.57752895", "0.57598245", "0.57466704", "0.5684819", "0.5638011", "0.561176", "0.5592158", "0.55663866", "0.5533117", "0.5516964", "0.5510883", "0.5498491", "0.54852426", "0.54852426", "0.54776955", "0.547437", "0.5467378", "0.5467378", "0.5456925", "0.5420616", "0.538151", "0.5378324", "0.53769416", "0.5372454", "0.5372454", "0.5353811", "0.5352213", "0.5352213", "0.5351372", "0.5323265", "0.5322453", "0.52980024", "0.52980024", "0.52980024", "0.52980024", "0.52980024", "0.52980024", "0.52950215", "0.52904654", "0.5274861", "0.525907", "0.5236574", "0.52134734", "0.5206551", "0.5194425", "0.5183023", "0.51787204", "0.5175624", "0.5145024", "0.51430494", "0.51430494", "0.5142337", "0.5128271", "0.51239663", "0.51224923", "0.5117836", "0.509814", "0.5093555", "0.5080328", "0.50766695", "0.507203", "0.5055519", "0.50419366", "0.5033188", "0.5029121", "0.50222737", "0.50114185", "0.49990132", "0.49967802", "0.49919325", "0.4981739", "0.4977871", "0.49561957", "0.49547338", "0.4953253", "0.49518818", "0.4951832", "0.49476388", "0.49353585", "0.4931007", "0.49257988", "0.492547", "0.49193096", "0.49108326", "0.488662", "0.4883907", "0.48811528" ]
0.64637303
0
Set up a template for creating new beamspot finders. Options can be configures via the config dict
def createBeamspotFinder(config=jobConfig, containerName = "VxPrimaryCandidate",suffix=""): import AthenaCommon.CfgMgr as CfgMgr from AthenaCommon.AppMgr import ToolSvc from AthenaCommon.AlgSequence import AlgSequence topSequence = AlgSequence() # Extra options that may not be in default jobConfig if not 'MinVertexProb' in config: config['MinVertexProb'] = 0.01 if not 'MaxVtxChi2' in config: config['MaxVtxChi2'] = 100 if not 'FixParK' in config: config['FixParK'] = False if not 'MaxSigmaTr' in config: config['MaxSigmaTr'] = 100. if not 'MaxVtxErrTr' in config: config['MaxVtxErrTr'] = 100. if not 'OutlierChi2Tr' in config: config['OutlierChi2Tr'] = 50. InDetBeamSpotVertex = CfgMgr.InDet__InDetBeamSpotVertex(name= 'InDetBeamSpotVertex_'+containerName+suffix, VertexContainer = containerName, VertexTypes = config['VertexTypes'], MinTracksPerVtx = config['MinTracksPerVtx'], MinVtxNum = config['MinVtxNum'], MaxOutlierLoops = 30, OutlierMaxRejection = 30, OutlierWidthFail= 5.1e-3, # in mm OutlierRhoFail = 0.8, DoHists = doVertexHists, OutputLevel = min(INFO,config['outputlevel']), VertexTreeName = "Vertices_"+containerName+suffix, MinVertexProb = config['MinVertexProb'], MaxVtxChi2 = config['MaxVtxChi2'], MaxSigmaTr = config['MaxSigmaTr'] , MaxVtxErrTr = config['MaxVtxErrTr'] , OutlierChi2Tr = config['OutlierChi2Tr'] ) ToolSvc += InDetBeamSpotVertex # Will be automatically printed as part of InDetBeamSpotFinder printout # print ToolSvc.InDetBeamSpotVertex # from InDetBeamSpotFinder.InDetBeamSpotFinderConf import InDet__InDetBeamSpotDbWriterTool InDetBeamSpotDbWriterTool = CfgMgr.InDet__InDetBeamSpotDbWriterTool(name = 'InDetBeamSpotDbWriterTool_'+containerName+suffix, OutputLevel = min(INFO,config['outputlevel']), TreeName = "COOLBeamspot_"+containerName+suffix, Tag = containerName+suffix ) ToolSvc += InDetBeamSpotDbWriterTool print ToolSvc.InDetBeamSpotDbWriterTool #from InDetBeamSpotFinder.InDetBeamSpotFinderConf import InDet__InDetBeamSpotFinder as InDetBeamSpotFinder topSequence += CfgMgr.InDet__InDetBeamSpotFinder(name = 'InDetBeamSpotFinder_'+containerName+suffix, BeamSpotTool = InDetBeamSpotVertex, BeamSpotWriterTool = InDetBeamSpotDbWriterTool, MaxCount = config['MaxCount'], LumiRange = config['LumiRange'], LumiBlockRanges = config['LumiBlockRanges'], RunRange = config['RunRange'], EventRange = config['EventRange'], #ForceRunNumber = 52280, DoHists = doBeamspotHist, WriteDb = False, UseDefaultValues = True, #WriteFailed = True, Default_SigmaX = 30.0, Default_SigmaY = 30.0, Default_SigmaZ = 500.0, Default_SigmaXY = 0.0, OutputLevel = min(INFO,config['outputlevel']), BeamSpotRootName = "Beamspots_"+containerName+suffix ) try: topSequence.InDetBeamSpotFinder.UseLBFromViewed = config['UseLBFromViewed'] topSequence.InDetBeamSpotFinder.UseLBFromAccepted = config['UseLBFromAccepted'] except: print 'ERROR: You are using an older version of InDetBeamSpotFinder - please update to InDetBeamSpotFinder-01-00-29 or later' print topSequence.InDetBeamSpotFinder
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __setup_template(self):\n template = Template()\n template.add_description(\"Service VPC - used for services\")\n\n template.add_metadata({\n \"Build\": \"development\",\n \"DependsOn\": [],\n \"Environment\": \"ApiDev\",\n \"Revision\": \"develop\",\n \"StackName\": \"ApiDev-Dev-VPC\",\n \"StackType\": \"InfrastructureResource\",\n \"TemplateBucket\": \"cfn-apidev\",\n \"TemplateName\": \"VPC\",\n \"TemplatePath\": \"ApiDev/Dev/VPC\"\n })\n\n vpc = template.add_resource(\n ec2.VPC(\n \"VPC\",\n CidrBlock=\"10.0.0.0/16\",\n EnableDnsHostnames=\"true\",\n EnableDnsSupport=\"true\",\n InstanceTenancy=\"default\",\n Tags=self.__get_tags(\"ServiceVPC\"),\n )\n )\n\n instance_sg = template.add_resource(\n ec2.SecurityGroup(\n \"BastionSG\",\n GroupDescription=\"Used for source/dest rules\",\n Tags=self.__get_tags(\"VPC-Bastion-SG\"),\n VpcId=Ref(\n vpc\n )\n ),\n )\n\n cw_alarm_topic = template.add_resource(\n Topic(\n \"CloudWatchAlarmTopic\",\n TopicName=\"ApiDev-Dev-CloudWatchAlarms\",\n )\n )\n\n dhcp_options = template.add_resource(\n ec2.DHCPOptions(\n \"DhcpOptions\",\n DomainName=Join(\n \"\",\n [\n Ref(\"AWS::Region\"),\n \".compute.internal\"\n ]\n ),\n DomainNameServers=[\"AmazonProvidedDNS\"],\n Tags=self.__get_tags(\"DhcpOptions\"),\n )\n )\n\n gateway = template.add_resource(\n ec2.InternetGateway(\n \"InternetGateway\",\n Tags=self.__get_tags(\"InternetGateway\")\n )\n )\n\n nat_emergency_topic = template.add_resource(\n Topic(\n \"NatEmergencyTopic\",\n TopicName=\"ApiDev-Dev-NatEmergencyTopic\",\n )\n )\n\n vpc_dhcp_options_assoc = template.add_resource(\n ec2.VPCDHCPOptionsAssociation(\n \"VpcDhcpOptionsAssociation\",\n DhcpOptionsId=Ref(\n dhcp_options\n ),\n VpcId=Ref(\n vpc\n )\n )\n )\n\n vpc_gw_attachment = template.add_resource(\n ec2.VPCGatewayAttachment(\n \"VpcGatewayAttachment\",\n InternetGatewayId=Ref(\n gateway\n ),\n VpcId=Ref(\n vpc\n )\n )\n )\n\n vpc_network_acl = template.add_resource(\n ec2.NetworkAcl(\n \"VpcNetworkAcl\",\n Tags=self.__get_tags(\"NetworkAcl\"),\n VpcId=Ref(\n vpc\n )\n )\n )\n\n vpc_network_acl_rules = template.add_resource([\n ec2.NetworkAclEntry(\n \"VpcNetworkAclInboundRulePublic443\",\n CidrBlock=\"0.0.0.0/0\",\n Egress=\"false\",\n NetworkAclId=Ref(\n vpc_network_acl\n ),\n PortRange=ec2.PortRange(\n From=\"443\",\n To=\"443\",\n ),\n Protocol=\"6\",\n RuleAction=\"allow\",\n RuleNumber=20001\n ),\n ec2.NetworkAclEntry(\n \"VpcNetworkAclInboundRulePublic80\",\n CidrBlock=\"0.0.0.0/0\",\n Egress=\"false\",\n NetworkAclId=Ref(\n vpc_network_acl\n ),\n PortRange=ec2.PortRange(\n From=\"80\",\n To=\"80\",\n ),\n Protocol=\"6\",\n RuleAction=\"allow\",\n RuleNumber=20000\n ),\n ec2.NetworkAclEntry(\n \"VpcNetworkAclOutboundRule\",\n CidrBlock=\"0.0.0.0/0\",\n Egress=\"true\",\n NetworkAclId=Ref(\n vpc_network_acl\n ),\n Protocol=\"-1\",\n RuleAction=\"allow\",\n RuleNumber=30000\n ),\n ec2.NetworkAclEntry(\n \"VpcNetworkAclSsh\",\n CidrBlock=\"127.0.0.1/32\",\n Egress=\"false\",\n NetworkAclId=Ref(\n vpc_network_acl\n ),\n PortRange=ec2.PortRange(\n From=\"22\",\n To=\"22\",\n ),\n Protocol=\"6\",\n RuleAction=\"allow\",\n RuleNumber=10000\n )\n ])\n\n template.add_output([\n Output(\n \"BastionSG\",\n Value=Ref(instance_sg)\n ),\n Output(\n \"CloudWatchAlarmTopic\",\n Value=Ref(cw_alarm_topic)\n ),\n Output(\n \"InternetGateway\",\n Value=Ref(gateway)\n ),\n Output(\n \"NatEmergencyTopicARN\",\n Value=Ref(nat_emergency_topic)\n ),\n Output(\n \"VPCID\",\n Value=Ref(vpc)\n ),\n Output(\n \"VPCName\",\n Value=Ref(\"AWS::StackName\")\n ),\n Output(\n \"VpcNetworkAcl\",\n Value=Ref(vpc_network_acl)\n )\n\n ])\n\n return template", "def config(self, **_) -> \"NexusTemplate\":\n return self", "def setUp(self):\n print \"Setting Up: %s\" % self.id()\n # render the template\n g.render_template(self.template_file,\n self.template_vars,\n self.output_file,\n self.search_path)\n\n # read the resulting config file built from template\n self.output_config = g.load_config(self.output_file)\n g.show_config(self.output_config)", "def create_config(self) -> None:\n pass", "def create_config(self) -> None:\n pass", "def __init__(self, template_name, **kwargs):\n self.template_name = template_name\n self.model = dict(**kwargs)", "def create(self, config):\n\n assert config[\"name\"] == self.name, \"Given config is not for this template\"\n\n data = self._json_encode(config)\n headers = self._default_headers()\n\n return self._request(\"\",\n ok_status=None,\n data=data,\n headers=headers)", "def generate_config(context):\n resources = []\n\n # Create an initial 'STARTED' pubsub notification.\n if 'pubsubTopic' in context.properties:\n resources.extend(\n create_pubsub_notification(\n context,\n depends_on=[],\n status_string='STARTED',\n ))\n\n # Required properties.\n billing_account_id = context.properties['billingAccountId']\n parent_organization = context.properties['parentOrganization']\n project_id = context.properties['projectId']\n\n # Optional properties, with defaults.\n high_security_network = context.properties.get('highSecurityNetwork', False)\n private_ip_google_access = context.properties.get('privateIpGoogleAccess', False)\n storage_bucket_lifecycle = context.properties.get('storageBucketLifecycle', 180)\n billing_account_friendly_name = context.properties.get('billingAccountFriendlyName', billing_account_id)\n # Use a project name if given, otherwise it's safe to fallback to use the\n # project ID as the name.\n project_name = context.properties.get('projectName', project_id)\n labels_obj = context.properties.get('labels', {})\n\n # Save this template's version number and all parameters inputs to the project metadata to keep track of what\n # operations were performed on a project.\n labels_obj.update({\n \"firecloud-project-template-version\" : str(FIRECLOUD_PROJECT_TEMPLATE_VERSION_ID)\n })\n\n for k, v in context.properties.items():\n label_k, label_v = satisfy_label_requirements('param--' + str(k), v)\n labels_obj.update({\n label_k: label_v\n })\n\n\n if high_security_network:\n labels_obj.update({\n \"vpc-network-name\" : FIRECLOUD_VPC_NETWORK_NAME,\n \"vpc-subnetwork-name\" : FIRECLOUD_VPC_SUBNETWORK_NAME\n })\n\n if 'parentFolder' in context.properties:\n parent_obj = {\n 'id': context.properties['parentFolder'],\n 'type': 'folder',\n }\n else:\n parent_obj = {\n 'id': context.properties['parentOrganization'],\n 'type': 'organization',\n }\n\n # Create the main project resource.\n resources.append({\n 'type': 'templates/project.py',\n 'name': 'fc-project',\n 'properties': {\n 'activateApis': FIRECLOUD_REQUIRED_APIS,\n 'billingAccountId': billing_account_id,\n 'billingAccountFriendlyName': billing_account_friendly_name,\n 'iamPolicies': create_iam_policies(context),\n 'labels': labels_obj,\n 'name': project_name,\n # The project parent. For FireCloud, this should refer to the\n # firecloud.org (or equivalent) GCP organization ID.\n 'parent': parent_obj,\n 'projectId': project_id,\n # If true, this would remove the default compute egine service\n # account. FireCloud doesn't use this SA, but we're leaving this set\n # to False to avoid changing any legacy behavior, at least initially.\n 'removeDefaultSA': False,\n # Removes the default VPC network for projects requiring stringent\n # network security configurations.\n 'removeDefaultVPC': high_security_network,\n 'createUsageExportBucket': False,\n # Always set up the storage logs and cromwell auth buckets for Firecloud\n 'storageLogsBucket': True,\n 'storageBucketLifecycle': storage_bucket_lifecycle,\n 'cromwellAuthBucket': True\n }\n })\n\n if high_security_network:\n resources.extend(create_high_security_network(context))\n resources.extend(create_firewall(context))\n if private_ip_google_access:\n resources.extend(create_private_google_access_dns_zone(context))\n else:\n resources.extend(create_default_network(context))\n\n if 'pubsubTopic' in context.properties:\n resources.extend(\n create_pubsub_notification(\n context,\n # This is somewhat hacky, but we can't simply collect the name of each\n # collected resource since template call nodes aren't \"real\" resources\n # that can be part of a dependsOn stanza. So instead, we collect the\n # names of all resources that are output by the network (which itself\n # depends on the project). It doesn't seem to be possible to concatenate\n # dependsOn arrays within the reference syntax, otherwise we could make\n # this depend explicitly on all resources from the template nodes.\n depends_on='$(ref.fc-network.resourceNames)',\n status_string='COMPLETED'))\n\n return {'resources': resources}", "def create_config(self, context, mgmtport):\n pass", "def generate_config(context):\n\n resources = []\n properties = context.properties\n project_id = properties.get('project', context.env['project'])\n name = properties.get('name', context.env['name'])\n\n resource = {\n 'name': context.env['name'],\n # https://cloud.google.com/filestore/docs/reference/rest/v1beta1/projects.locations.instances/create\n 'type': 'gcp-types/file-v1beta1:projects.locations.instances',\n 'properties': {\n 'parent': 'projects/{}/locations/{}'.format(project_id, properties['location']),\n 'instanceId': name,\n }\n }\n\n optional_props = [\n 'description',\n 'tier',\n 'labels',\n 'fileShares',\n 'networks',\n ]\n\n for prop in optional_props:\n if prop in properties:\n resource['properties'][prop] = properties[prop]\n\n resources.append(resource)\n\n return {\n 'resources':\n resources,\n 'outputs':\n [\n {\n 'name': 'name',\n 'value': name\n },\n {\n 'name': 'fileShares',\n 'value': '$(ref.{}.fileShares)'.format(context.env['name'])\n },\n {\n 'name': 'networks',\n 'value': '$(ref.{}.networks)'.format(context.env['name'])\n }\n ]\n }", "def configure(self, options, config):\n Plugin.configure(self, options, config)\n self.config = config\n if self.enabled:\n self.jinja = Environment(\n loader=FileSystemLoader(os.path.join(os.path.dirname(__file__), 'templates')),\n trim_blocks=True,\n lstrip_blocks=True\n )\n self.stats = {'errors': 0, 'failures': 0, 'passes': 0, 'skipped': 0}\n self.report_data = defaultdict(Group)\n self.report_file = codecs.open(options.html_file, 'w', self.encoding, 'replace')", "def setup_data(es_with_collector):\n country_uk = constants.Country.united_kingdom.value.id\n country_us = constants.Country.united_states.value.id\n uk_region = constants.UKRegion.south_east.value.id\n CompanyFactory(\n name='abc defg ltd',\n trading_names=['helm', 'nop'],\n address_1='1 Fake Lane',\n address_town='Downtown',\n address_country_id=country_uk,\n uk_region_id=uk_region,\n )\n CompanyFactory(\n name='abc defg us ltd',\n trading_names=['helm', 'nop', 'qrs'],\n address_1='1 Fake Lane',\n address_town='Downtown',\n address_country_id=country_us,\n registered_address_country_id=country_us,\n )\n es_with_collector.flush_and_refresh()", "def create_template(self):\n options = {\n 'dir': os.path.join(os.path.dirname(__file__)),\n 'template': self.template,\n 'project': self.project,\n 'answers_file': self.answers_file,\n }\n return self.env.run(\n '%(dir)s/bin/mrbob -O %(project)s --config '\n '%(dir)s/%(answers_file)s %(dir)s/bobtemplates/%(template)s'\n % options)", "def _base_troposphere_template(self):\n template = troposphere.Template()\n template.add_parameter(\n troposphere.Parameter(\n \"Stage\",\n Default=\"dev\",\n Description=\"Name of the Stage\",\n Type=\"String\",\n )\n )\n\n template.add_parameter(\n troposphere.Parameter(\n \"Region\",\n Description=\"AWS Region\",\n Type=\"String\",\n )\n )\n return template", "def _config(self):\n tmpl = self._template_interface\n for p in tmpl._params:\n setattr(self, p._name, p.get_value())", "def test_create_template_subsciption(self):\n pass", "def gen_parameters(self):\n\n print \"\\t* Adding parameters to compute template\"\n # get all the server client\n servers = self.novaclient.servers.list()\n\n # add all key_pair_names\n self.gen_key_name_parameters(servers)\n\n # add all images\n self.gen_image_parameters(servers)\n\n # add all flavors\n self.gen_flavor_parameters(servers)\n\n # add all networks\n self.gen_network_parameters()", "def create_configuration(EngineType=None, EngineVersion=None, Name=None, Tags=None):\n pass", "def generate_config(context):\n\n\n properties = context.properties\n project_id = properties.get('project', context.env['project'])\n\n network = context.properties.get('networkURL', generate_network_uri(\n project_id,\n context.properties.get('network','')\n ))\n target_vpn_gateway = context.env['name'] + '-tvpng'\n esp_rule = context.env['name'] + '-esp-rule'\n udp_500_rule = context.env['name'] + '-udp-500-rule'\n udp_4500_rule = context.env['name'] + '-udp-4500-rule'\n vpn_tunnel = context.env['name'] + '-vpn'\n router_vpn_binding = context.env['name'] + '-router-vpn-binding'\n resources = []\n if 'ipAddress' in context.properties:\n ip_address = context.properties['ipAddress']\n static_ip = ''\n else:\n static_ip = context.env['name'] + '-ip'\n resources.append({\n # The reserved address resource.\n 'name': static_ip,\n # https://cloud.google.com/compute/docs/reference/rest/v1/addresses\n 'type': 'gcp-types/compute-v1:addresses',\n 'properties': {\n 'name': properties.get('name', static_ip),\n 'project': project_id,\n 'region': context.properties['region']\n }\n })\n ip_address = '$(ref.' + static_ip + '.address)'\n\n resources.extend([\n {\n # The target VPN gateway resource.\n 'name': target_vpn_gateway,\n # https://cloud.google.com/compute/docs/reference/rest/v1/targetVpnGateways\n 'type': 'gcp-types/compute-v1:targetVpnGateways',\n 'properties':\n {\n 'name': properties.get('name', target_vpn_gateway),\n 'project': project_id,\n 'network': network,\n 'region': context.properties['region'],\n }\n },\n {\n # The forwarding rule resource for the ESP traffic.\n 'name': esp_rule,\n # https://cloud.google.com/compute/docs/reference/rest/v1/forwardingRules\n 'type': 'gcp-types/compute-v1:forwardingRules',\n 'properties':\n {\n 'name': '{}-esp'.format(properties.get('name')) if 'name' in properties else esp_rule,\n 'project': project_id,\n 'IPAddress': ip_address,\n 'IPProtocol': 'ESP',\n 'region': context.properties['region'],\n 'target': '$(ref.' + target_vpn_gateway + '.selfLink)',\n }\n },\n {\n # The forwarding rule resource for the UDP traffic on port 4500.\n 'name': udp_4500_rule,\n # https://cloud.google.com/compute/docs/reference/rest/v1/forwardingRules\n 'type': 'gcp-types/compute-v1:forwardingRules',\n 'properties':\n {\n 'name': '{}-udp-4500'.format(properties.get('name')) if 'name' in properties else udp_4500_rule,\n 'project': project_id,\n 'IPAddress': ip_address,\n 'IPProtocol': 'UDP',\n 'portRange': 4500,\n 'region': context.properties['region'],\n 'target': '$(ref.' + target_vpn_gateway + '.selfLink)',\n }\n },\n {\n # The forwarding rule resource for the UDP traffic on port 500\n 'name': udp_500_rule,\n # https://cloud.google.com/compute/docs/reference/rest/v1/forwardingRules\n 'type': 'gcp-types/compute-v1:forwardingRules',\n 'properties':\n {\n 'name': '{}-udp-500'.format(properties.get('name')) if 'name' in properties else udp_500_rule,\n 'project': project_id,\n 'IPAddress': ip_address,\n 'IPProtocol': 'UDP',\n 'portRange': 500,\n 'region': context.properties['region'],\n 'target': '$(ref.' + target_vpn_gateway + '.selfLink)',\n }\n },\n\n ])\n router_url_tag = 'routerURL'\n router_name_tag = 'router'\n\n if router_name_tag in context.properties:\n router_url = context.properties.get(router_url_tag, generate_router_uri(\n context.env['project'],\n context.properties['region'],\n context.properties[router_name_tag]))\n # Create dynamic routing VPN\n resources.extend([\n {\n # The VPN tunnel resource.\n 'name': vpn_tunnel,\n # https://cloud.google.com/compute/docs/reference/rest/v1/vpnTunnels\n 'type': 'gcp-types/compute-v1:vpnTunnels',\n 'properties':\n {\n 'name': properties.get('name', vpn_tunnel),\n 'project': project_id,\n 'description':\n 'A vpn tunnel',\n 'ikeVersion':\n 2,\n 'peerIp':\n context.properties['peerAddress'],\n 'region':\n context.properties['region'],\n 'router': router_url,\n 'sharedSecret':\n context.properties['sharedSecret'],\n 'targetVpnGateway':\n '$(ref.' + target_vpn_gateway + '.selfLink)'\n },\n 'metadata': {\n 'dependsOn': [esp_rule,\n udp_500_rule,\n udp_4500_rule]\n }\n }])\n else:\n # Create static routing VPN\n resources.append(\n {\n # The VPN tunnel resource.\n 'name': vpn_tunnel,\n 'type': 'gcp-types/compute-v1:vpnTunnels',\n 'properties': {\n 'name': vpn_tunnel,\n 'description':\n 'A vpn tunnel',\n 'ikeVersion':\n 2,\n 'peerIp':\n context.properties['peerAddress'],\n 'region':\n context.properties['region'],\n 'sharedSecret':\n context.properties['sharedSecret'],\n 'targetVpnGateway':\n '$(ref.' + target_vpn_gateway + '.selfLink)',\n 'localTrafficSelector':\n context.properties['localTrafficSelector'],\n 'remoteTrafficSelector':\n context.properties['remoteTrafficSelector'],\n\n },\n 'metadata': {\n 'dependsOn': [esp_rule, udp_500_rule, udp_4500_rule]\n }\n },\n )\n\n return {\n 'resources':\n resources,\n 'outputs':\n [\n {\n 'name': 'targetVpnGateway',\n 'value': target_vpn_gateway\n },\n {\n 'name': 'staticIp',\n 'value': static_ip\n },\n {\n 'name': 'espRule',\n 'value': esp_rule\n },\n {\n 'name': 'udp500Rule',\n 'value': udp_500_rule\n },\n {\n 'name': 'udp4500Rule',\n 'value': udp_4500_rule\n },\n {\n 'name': 'vpnTunnel',\n 'value': vpn_tunnel\n },\n {\n 'name': 'vpnTunnelUri',\n 'value': '$(ref.'+vpn_tunnel+'.selfLink)'\n }\n ]\n }", "def setupOptions():\n define(\"conf\", default=\"\",help=\"path to configuration file\")\n define(\"DB_CACHE\", default=\"False\", help=\"Flag\")\n define(\"CELL_NAME\", default=\"beolink.org\", help=\"Default Cell\")\n\n afs.orm.DbMapper.setupOptions() \n return", "def configure(self, options, conf):", "def __init__(self, name):\n config = Config()\n super(TestTemplate, self).__init__(name)\n self.doc_dict = None\n func = getattr(self, name)\n self.get_test_doc(name, func.__doc__)\n # ASSIGN VALUES TO CONFIG\n config['name'] = self.doc_dict['name']\n logger.info(\"NAME: {}\".format(config['name']))\n config['title'] = self.doc_dict['title']\n logger.info(\"TITLE: {}\".format(config['title']))\n config['test'] = self.doc_dict['test']\n logger.info(\"TEST: {}\".format(config['test']))\n config['bug'] = self.doc_dict['bug']\n logger.info(\"BUG: {}\".format(config['bug']))", "def setup_cloud(self, config):\n # Just write out the template to the pwd for right now\n template_file = './openstack.yaml'\n with open(template_file, 'w', encoding='utf-8') as fp:\n fp.write(config)\n self._cloud.create_stack(self._stack_name, template_file=template_file, wait=True)", "def configureEFS(self):\n methodName = \"configureEFS\"\n \n TR.info(methodName,\"STARTED configuration of EFS\")\n # Create the EFS provisioner service account\n\n \"\"\"\n oc create -f efs-configmap.yaml -n default\n oc create serviceaccount efs-provisioner\n oc create -f efs-rbac-template.yaml\n oc create -f efs-storageclass.yaml\n oc create -f efs-provisioner.yaml\n oc create -f efs-pvc.yaml\n \"\"\"\n \n # self.updateTemplateFile(workerocs,'${az1}', self.zones[0])\n self.updateTemplateFile(\"/ibm/templates/efs/efs-configmap.yaml\",'${file-system-id}',self.EFSID)\n self.updateTemplateFile(\"/ibm/templates/efs/efs-configmap.yaml\",'${aws-region}',self.region)\n self.updateTemplateFile(\"/ibm/templates/efs/efs-configmap.yaml\",'${efsdnsname}',self.EFSDNSName)\n\n self.updateTemplateFile(\"/ibm/templates/efs/efs-provisioner.yaml\",'${file-system-id}',self.EFSID)\n self.updateTemplateFile(\"/ibm/templates/efs/efs-provisioner.yaml\",'${aws-region}',self.region)\n\n TR.info(methodName,\"Invoking: oc create -f efs-configmap.yaml -n default\")\n cm_cmd = \"oc create -f /ibm/templates/efs/efs-configmap.yaml -n default\"\n retcode = call(cm_cmd, shell=True)\n if (retcode != 0):\n TR.info(methodName,\"Invoking: oc create -f efs-configmap.yaml -n default %s\" %retcode)\n raise Exception(\"Error calling oc. Return code: %s\" % retcode)\n #endIf\n\n TR.info(methodName,\"Invoking: oc create serviceaccount efs-provisioner\")\n sa_cmd = \"oc create serviceaccount efs-provisioner\"\n retcode = call(sa_cmd, shell=True)\n if (retcode != 0):\n raise Exception(\"Error calling oc. Return code: %s\" % retcode)\n #endIf\n\n TR.info(methodName,\"Invoking: oc create -f efs-rbac-template.yaml\")\n rbac_cmd = \"oc create -f /ibm/templates/efs/efs-rbac-template.yaml\"\n retcode = call(rbac_cmd, shell=True)\n if (retcode != 0):\n raise Exception(\"Error calling oc. Return code: %s\" % retcode)\n #endIf\n\n TR.info(methodName,\"Invoking: oc create -f efs-storageclass.yaml\")\n sc_cmd = \"oc create -f /ibm/templates/efs/efs-storageclass.yaml\"\n retcode = call(sc_cmd, shell=True)\n if (retcode != 0):\n raise Exception(\"Error calling oc. Return code: %s\" % retcode)\n #endIf\n \n TR.info(methodName,\"Invoking: oc create -f efs-provisioner.yaml\")\n prov_cmd = \"oc create -f /ibm/templates/efs/efs-provisioner.yaml\"\n retcode = call(prov_cmd, shell=True)\n if (retcode != 0):\n raise Exception(\"Error calling oc. Return code: %s\" % retcode)\n #endIf\n \n TR.info(methodName,\"Invoking: oc create -f efs-pvc.yaml\")\n pvc_cmd = \"oc create -f /ibm/templates/efs/efs-pvc.yaml\"\n retcode = call(pvc_cmd, shell=True)\n if (retcode != 0):\n raise Exception(\"Error calling oc. Return code: %s\" % retcode)\n #endIf \n \n TR.info(methodName,\"COMPLETED configuration of EFS.\")", "def create_template(self):\n return '{}/{}.html'.format(self.object_name, self.create_endpoint)", "def create(self, template, print_cmd=False):\n brand_mapping = {'solaris11' : 'SYSsolaris', 'solaris' : 'SYSsolaris', 'solaris10' : 'SYSsolaris10'}\n if brand_mapping.has_key(template):\n template = brand_mapping[template]\n\n return self._create_minimal(template, print_cmd)\n\n #self._write_sysidcfg()", "def create_arg_config(environment, region, template, parameters):\r\n raw_config = {\r\n 'Environment': environment,\r\n 'Region': region\r\n }\r\n if template:\r\n raw_config['Template'] = template\r\n if parameters:\r\n raw_config['Parameters'] = dict(parameters)\r\n return Config(raw_config)", "def __init__(self):\n super().__init__()\n TemplateEngineFactory.register_factory('Jinja2Engine', Jinja2Engine.Factory)\n\n step1 = PrepareAppConfTransfiguration()\n step2 = ConfReaderToContextTransfiguration()\n step3 = ValidateCollectionTags()\n step4 = AttributeChainedTransfiguration('mbean')\n step5 = AttributeChainedTransfiguration('connection')\n\n self.add(step1)\n self.add(step2)\n self.add(step3)\n self.add(step4)\n self.add(step5)", "def create_project(options, name=None, template='google-app-engine-python'):\n if options.help:\n print create_project.__doc__\n sys.exit(1)\n\n print template, name", "def help_create(self):\n print(\"create instances\")", "def initCreate(self , initialconfig):\n return", "def configure_stp_instance(self, instance, **kwargs):\n pass", "def GenerateConfig(context):\n\n resources = [{\n 'name': context.env['name'],\n 'type': 'compute.v1.instance',\n 'properties': {\n 'zone': context.properties['zone'],\n 'machineType': ''.join([COMPUTE_URL_BASE, 'projects/',\n context.env['project'], '/zones/',\n context.properties['zone'], '/machineTypes/',\n context.properties['machineType']]),\n 'disks': [{\n 'deviceName': 'boot',\n 'type': 'PERSISTENT',\n 'boot': True,\n 'autoDelete': True,\n 'initializeParams': {\n 'sourceImage': ''.join([COMPUTE_URL_BASE, 'projects/',\n 'ubuntu-os-cloud/global/',\n 'images/family/ubuntu-1604-lts'])\n }\n }],\n 'networkInterfaces': [{\n 'network': '$(ref.' + context.properties['network']\n + '.selfLink)',\n 'accessConfigs': [{\n 'name': 'External NAT',\n 'type': 'ONE_TO_ONE_NAT'\n }]\n }],\n 'metadata': {\n 'items': [{\n 'key': 'startup-script',\n 'value': ''.join(['#!/bin/bash\\n',\n 'sudo apt-get install openjdk-9-jre-headless -y\\n',\n 'sudo python -m SimpleHTTPServer 80'])\n }]\n }\n }\n }]\n return {'resources': resources}", "def __init__(self):\n super().__init__()\n TemplateEngineFactory.register_factory('Jinja2Engine', Jinja2Engine.Factory)\n\n step1 = PrepareAppConfTransfiguration()\n step2 = ConfReaderToContextTransfiguration()\n step3 = AttributeChainedTransfiguration('mbean')\n\n self.add(step1)\n self.add(step2)\n self.add(step3)", "def set_parameters(self, seed):\n self.file_location = self.CONFIG[seed]['file_location']\n self.url = self.CONFIG[seed]['url']\n self.pattern = self.CONFIG[seed]['pattern']\n self.date_type = self.CONFIG[seed]['date_type']\n self.ext = self.CONFIG[seed]['extension']\n self.rename = self.CONFIG[seed].get('rename', None)\n\n # Make the locations if they do not exist.\n if not os.path.isdir(self.file_location):\n os.mkdir(self.file_location)", "def _custom_endpoint_init(self, node_config, *argv):\n config = {}\n selector = node_config.get('template', None)\n if not selector:\n selector = argv[0]\n _LOGGER.debug(\" selector: %s\", selector)\n config = {\n \"config_report\": [\n [0x0001, 0x0020, 60, 3600, 5], \n [0x0001, 0x0021, 60, 3600, 5]\n ],\n \"in_cluster\": [0x0000, 0x0001, 0x0500, ],\n \"out_cluster\": [0x0500],\n \"type\": \"binary_sensor\",\n }\n self.add_input_cluster(0x0500)\n self.add_output_cluster(0x0500)", "def __fill_all_templates__(self,configs):\n template_dir = configs['system'].get('Common_directories','template')\n sample_template = os.path.join(template_dir,configs['pipeline'].get('Template_files','sample'))\n system_template = os.path.join(template_dir,configs['pipeline'].get('Template_files','system'))\n qsub_template = os.path.join(template_dir,configs['pipeline'].get('Template_files','bcbio'))\n self.__fill_template__(sample_template,self.sample_file)\n self.__fill_template__(system_template,self.systems_file)\n self.__fill_template__(qsub_template,self.qsub_file)", "def create(self, factory_ctx):\n return propartial(EBSWrapper, region=factory_ctx.get('entity').get('region', None))", "def create(ctx, **kwargs):\n # creates and activates pf9-express config file\n\n pf9_exp_conf_dir = ctx.obj['pf9_exp_conf_dir']\n \n # Backup existing config if one exist\n if os.path.exists(pf9_exp_conf_dir + 'express.conf'):\n with open(pf9_exp_conf_dir + 'express.conf', 'r') as current:\n lines = current.readlines()\n current.close()\n for line in lines:\n if 'config_name|' in line:\n line = line.strip()\n name = line.replace('config_name|','')\n\n filename = name + '.conf'\n shutil.copyfile(pf9_exp_conf_dir + 'express.conf', pf9_exp_conf_dir + filename)\n\n if not os.path.exists(pf9_exp_conf_dir):\n try:\n access_rights = 0o700\n os.makedirs(pf9_exp_conf_dir, access_rights)\n except Exception:\n click.echo(\"Creation of the directory %s failed\" % pf9_exp_conf_dir)\n else:\n click.echo(\"Successfully created the directory %s \" % pf9_exp_conf_dir)\n\n with open(pf9_exp_conf_dir + 'express.conf', 'w') as file:\n for k,v in ctx.params.items():\n file.write(k + '|' + str(v) + '\\n')\n click.echo('Successfully wrote Platform9 management plane configuration')", "def create(self):\n # TODO: Properly validate data\n self._proj()\n if self.cfg.align_heading:\n self._align()\n self._griddata()\n if self.cfg.gap_filter[\"algorithm\"] != \"none\":\n self._gap_filter()", "def generic_setup(\n self,\n npf_orgs_list,\n biz_orgs_list,\n volunteers_list,\n create_admins=True,\n create_projects=True\n ):\n # creating NPF org with projects if required\n org_i = 0\n for npf_org in npf_orgs_list:\n\n org_i += 1\n org = _create_org(npf_org, \"npf\")\n\n # creating projects\n if create_projects:\n _create_project(org, 'test_project_{}'.format(str(org_i)))\n\n # creating an NPF admin\n if create_admins:\n _create_test_user(\n 'npf_admin_{}'.format(str(org_i)),\n org=org,\n is_org_admin=True\n )\n\n # creating BIZ org\n biz_org_i = 0\n for biz_org in biz_orgs_list:\n\n biz_org_i += 1\n org = _create_org(biz_org, \"biz\")\n\n # creating an BIZ admin\n if create_admins:\n _create_test_user(\n 'biz_admin_{}'.format(str(biz_org_i)),\n org=org,\n is_org_admin=True\n )\n\n # creating existing volunteers\n for volunteer in volunteers_list:\n _create_test_user(volunteer)\n\n # creating master offer\n if len(biz_orgs_list) > 0:\n _create_offer(\n self.get_all_biz_orgs()[0],\n currents_share=_SHARE * 100,\n is_master=True\n )", "def update_config_external_template(config):\r\n\r\n # best parameters from the paper\r\n config['train_batch_size'] = 16384\r\n config['lr'] = 3e-4\r\n config['sgd_minibatch_size'] = 4096\r\n config['num_sgd_iter'] = 4\r\n config['rollout_fragment_length'] = 100\r\n\r\n # run ID to communicate to the http trainer\r\n config['run_uid'] = '_setme'\r\n\r\n # stable baselines accepts full episodes\r\n config[\"batch_mode\"] = \"complete_episodes\"\r\n\r\n # stable baselines server address\r\n config[\"http_remote_port\"] = \"http://127.0.0.1:50001\"\r\n\r\n # no gpus, stable baselines might use them\r\n config['num_gpus'] = 0\r\n\r\n # set trainer class\r\n config['_trainer'] = \"External\"\r\n config['_policy'] = \"PPO\"\r\n\r\n # tuned\r\n config['num_envs_per_worker'] = 10\r\n config['num_workers'] = 3\r\n return config", "def create_endpoint_config(EndpointConfigName=None, ProductionVariants=None, Tags=None, KmsKeyId=None):\n pass", "def _configureVisualFactory(self):\n raise NotImplementedError", "def configure(self, options):\n #\n # TODO(lartola) These operations are performed on the client side using\n # gcloud to make sure that the user running it has enough admin permissions\n # to perform them. If these were to be executed by Orchestrate in the API\n # backend, they would be performed under the Orchestrate service account.\n # Perhaps it is ok to do this in the backend, but at this point it seems\n # preferable to know that the user registering the project is indeed a\n # trusted admin of the project.\n #\n self.configure_roles(options)\n orchestrate_account = 'orchestrate@{project}.iam.gserviceaccount.com'.format(\n project=options.api_project)\n project_account = 'orchestrate@{project}.iam.gserviceaccount.com'.format(\n project=options.project)\n commands = \"\"\"\ngcloud services enable compute.googleapis.com --project={project} --quiet\ngcloud services enable pubsub.googleapis.com --project={project} --quiet\ngcloud services enable cloudresourcemanager.googleapis.com --project={project} --quiet\ngcloud services enable dns.googleapis.com --project={project} --quiet\ngcloud projects add-iam-policy-binding {project} --member=\"serviceAccount:{orchestrate_account}\" --role=\"projects/{project}/roles/orchestrate.devOps\" --quiet\ngcloud projects add-iam-policy-binding {project} --member=\"serviceAccount:{orchestrate_account}\" --role=\"roles/iam.serviceAccountUser\" --quiet\ngcloud iam service-accounts create orchestrate --display-name=\"Orchestrate project-level orchestration service account.\" --project={project} --quiet\ngcloud projects add-iam-policy-binding {project} --member=\"serviceAccount:{project_account}\" --role=\"roles/iam.serviceAccountUser\" --quiet\ngcloud projects add-iam-policy-binding {project} --member=\"serviceAccount:{project_account}\" --role=\"roles/logging.logWriter\" --quiet\ngcloud projects add-iam-policy-binding {project} --member=\"serviceAccount:{project_account}\" --role=\"roles/monitoring.metricWriter\" --quiet\n\"\"\".format(\n project=options.project,\n orchestrate_account=orchestrate_account,\n project_account=project_account,\n ).strip().split('\\n')\n run_commands(commands)", "def create_routine_template(self):\n # Create RoutineTemplate based on _DEFAULT_ROUTINE_TEMPLATE\n self.routine_template = copy.deepcopy(self._DEFAULT_ROUTINE_TEMPLATE)\n\n for step in self.routine_template:\n # Retrieve the step settings from the configuration parameter\n # dictionary. The settings will be merged according to the correct\n # hierarchy (more specific settings will overwrite less specific\n # settings)\n step_settings = self.extract_step_settings(step[0], step[1],\n step[2])\n step[2]['settings'] = step_settings\n\n # standard global settings\n delegate_plotting = self.get_param_value('delegate_plotting')\n\n self.routine_template.global_settings.update({\n \"dev\": self.dev,\n \"update\": True, # all subroutines should update relevant params\n \"delegate_plotting\": delegate_plotting,\n })\n\n # add user specified global settings\n update_nested_dictionary(\n self.routine_template.global_settings,\n self.kw.get(\"global_settings\", {}),\n )", "def init():\n defaults = _project_defaults()\n\n if Project.prompt:\n defaults['name'] = prompt(\"Enter the project's name:\", defaults['name'])\n defaults['package'] = prompt(\"Enter the project's package:\", defaults['package'])\n defaults['author'] = prompt(\"Enter the project's author:\", defaults['author'])\n defaults['author_email'] = prompt(\"Enter the project's author's email:\", defaults['author_email'])\n defaults['description'] = prompt(\"Enter the project's description:\", defaults['description'])\n\n # print(\"defaults:\\n{defaults}\".format(defaults=pformat(defaults)))\n\n if Project.use_templates:\n\n template = Template()\n\n for template_dir in [os.path.abspath(os.path.join(herringlib, 'herringlib', 'templates'))\n for herringlib in HerringFile.herringlib_paths]:\n\n info(\"template directory: %s\" % template_dir)\n # noinspection PyArgumentEqualDefault\n template.generate(template_dir, defaults, overwrite=False)", "def get_config():\n name = 'dynamic_pricing'\n num_products = 5\n scale = 1\n noise_var = 10\n p_max = 1\n\n agents = collections.OrderedDict(\n [('bsPricing',\n functools.partial(BootstrapDynamicPricing,\n num_products, scale, noise_var, p_max))]\n )\n\n environments = collections.OrderedDict(\n [('env',\n functools.partial(DynamicPricing,\n num_products, scale, noise_var, p_max))]\n )\n experiments = collections.OrderedDict(\n [(name, ExperimentNoAction)]\n )\n n_steps = 80\n n_seeds = 2000\n config = Config(name, agents, environments, experiments, n_steps, n_seeds)\n return config", "def config():\n if app.args.ui_mode == \"jinja\":\n ui_config = {\n \"p1\": {\n \"options\": {\n \"lineNumbers\": True,\n \"theme\":\"material\",\n \"lineWrapping\" : True,\n \"mode\": \"yaml\",\n \"indentUnit\": 2,\n \"tabSize\": 2\n },\n \"title\": \"DATA\",\n \"inventory\": bool(app.args.inventory_source),\n \"b1\": {\n \"icon\": None,\n \"show\": False,\n \"text\": None,\n \"url\": None\n }\n },\n \"p2\": {\n \"options\": {\n \"lineNumbers\": True,\n \"theme\": \"material\",\n \"lineWrapping\" : True,\n \"mode\": \"jinja2\"\n },\n \"title\": \"RENDER\",\n \"b1\": {\n \"icon\": \"create\",\n \"show\": True,\n \"text\": \"Render\",\n \"url\": \"/render\"\n }\n },\n \"p3\": {\n \"options\": {\n \"lineNumbers\": True,\n \"theme\": \"material\",\n \"lineWrapping\" : True,\n \"mode\": 'text'\n },\n \"title\": \"RESULT\",\n \"b1\": {\n \"icon\": \"link\",\n \"show\": bool(app.args.url),\n \"text\": \"link\"\n }\n }\n }\n elif app.args.ui_mode == \"schema\":\n ui_config = {\n \"p1\": {\n \"options\": {\n \"lineNumbers\": True,\n \"theme\":\"material\",\n \"lineWrapping\" : True,\n \"mode\": \"yaml\",\n \"indentUnit\": 2,\n \"tabSize\": 2\n },\n \"title\": \"DATA\",\n \"inventory\": bool(app.args.inventory_source),\n \"b1\": {\n \"icon\": \"create\",\n \"show\": True,\n \"text\": \"schema\",\n \"url\": \"/schema\"\n }\n },\n \"p2\": {\n \"options\": {\n \"lineNumbers\": True,\n \"theme\": \"material\",\n \"lineWrapping\" : True,\n \"mode\": \"yaml\"\n },\n \"title\": \"SCHEMA\",\n \"b1\": {\n \"icon\": \"check\",\n \"show\": True,\n \"text\": \"Validate\",\n \"url\": \"/validate\"\n }\n },\n \"p3\": {\n \"options\": {\n \"lineNumbers\": True,\n \"theme\": \"material\",\n \"lineWrapping\" : True,\n \"mode\": \"yaml\"\n },\n \"title\": \"VALIDATION SUCCESS/ERRORS\",\n \"b1\": {\n \"icon\": \"link\",\n \"show\": bool(app.args.url),\n \"text\": \"link\"\n }\n }\n }\n return jsonify(ui_config)", "def generate_config():\n\n return {\n \"email_subject\": DEFAULT_EMAIL_SUBJECT,\n \"from_email\": DEFAULT_FROM_EMAIL,\n \"to_email\": DEFAULT_TO_EMAIL,\n \"url\": DEFAULT_URL,\n \"start_value\": DEFAULT_START_VALUE,\n \"look_ahead\": DEFAULT_LOOK_AHEAD,\n \"slide_window\": DEFAULT_SLIDE_WINDOW,\n }", "def setup(self) -> None:\n if self.opt.textfile:\n with codecs.open(self.opt.textfile, 'r',\n config.textfile_encoding) as f:\n self.opt.text = f.read()\n else:\n # Translating the \\\\n into binary \\n if given from command line\n self.opt.text = self.opt.text.replace('\\\\n', '\\n')\n\n if self.opt.talk_page:\n self.generator = pagegenerators.PageWithTalkPageGenerator(\n self.generator, return_talk_only=True)", "def __init__(self):\n self._create_options()\n self._create_sections()", "def create_routine_template(self):\n super().create_routine_template()\n transition_name = self.get_param_value(\"transition_name\",\n qubit=self.qubit)\n\n # Adaptive qubit spectroscopy\n if transition_name == 'ge':\n aqs_settings = {\n \"qubits\": self.qubits\n }\n self.add_step(AdaptiveQubitSpectroscopy,\n 'adaptive_qubit_spectroscopy',\n aqs_settings)\n\n # Pi-Pulse calibration\n pipulse_settings = {\n \"qubits\": self.qubits,\n \"settings\": {\n \"PiPulseCalibration\": {\n \"General\": {\n \"transition_name\": transition_name,\n }\n }\n }\n }\n self.add_step(PiPulseCalibration, 'pi_pulse_calibration',\n pipulse_settings)\n\n # Decision step\n decision_settings = {}\n self.add_step(self.Decision, 'decision', decision_settings)", "def setUp(self):\n BuilderTestsMixin.setUp(self)\n self.builder = DocBuilder()\n self.howtoDir = FilePath(self.mktemp())\n self.howtoDir.createDirectory()\n self.templateFile = self.howtoDir.child(\"template.tpl\")\n self.templateFile.setContent(self.template)", "def config( **kwargs ):", "def module_config_template():\n template = {\n 'gce_instance_performance': {\n 'module': 'decisionengine_modules.GCE.sources.GCEInstancePerformance',\n 'name': 'GCEInstancePerformance',\n 'parameters': {\n 'csv_file': '/path/to/csv_file',\n }\n }\n }\n print('Entry in channel configuration')\n pprint.pprint(template)", "def __init__(self, setupName):\n\t\timport revitron\n\t\tself.options = revitron.DB.DWGExportOptions().GetPredefinedOptions(\n\t\t revitron.DOC,\n\t\t setupName\n\t\t)", "def create_template(self, path: Path, dot_cookietemple: Optional[dict]) -> None:\n self.web_struct.language = cookietemple_questionary_or_dot_cookietemple(function='select',\n question='Choose between the following languages',\n choices=['python'],\n default='python',\n dot_cookietemple=dot_cookietemple,\n to_get_property='language')\n\n # prompt the user to fetch general template configurations\n super().prompt_general_template_configuration(dot_cookietemple)\n\n # switch case statement to prompt the user to fetch template specific configurations\n switcher: Dict[str, Any] = {\n 'python': self.web_python_options,\n }\n switcher.get(self.web_struct.language)(dot_cookietemple) # type: ignore\n # call handle function for specified language\n self.__getattribute__(f'handle_web_project_type_{self.web_struct.language}')(dot_cookietemple)\n # call handle function for specified webtype according to the chosen language\n self.__getattribute__(f'handle_{self.web_struct.webtype.lower()}_{self.web_struct.language}')(dot_cookietemple)\n # call option function for specified framework (if any)\n framework = self.web_struct.web_framework.lower()\n if framework:\n self.__getattribute__(f'{self.web_struct.webtype.lower()}_{framework}_options')(dot_cookietemple)\n\n self.web_struct.is_github_repo, self.web_struct.is_repo_private, self.web_struct.is_github_orga, self.web_struct.github_orga = \\\n prompt_github_repo(dot_cookietemple)\n # if repo owner is a github orga, update username\n if self.web_struct.is_github_orga:\n self.web_struct.github_username = self.web_struct.github_orga\n # create the project (TODO COOKIETEMPLE: As for now (only Flask) this works. Might need to change this in future.\n super().create_template_with_subdomain_framework(self.TEMPLATES_WEB_PATH, self.web_struct.webtype, self.web_struct.web_framework.lower())\n # clean project for advanced or basic setup\n self.basic_or_advanced_files_with_frontend(self.web_struct.setup_type, self.web_struct.frontend.lower())\n\n # switch case statement to fetch the template version\n switcher_version = {\n 'python': self.WEB_WEBSITE_PYTHON_TEMPLATE_VERSION\n }\n\n self.web_struct.template_version, self.web_struct.template_handle\\\n = switcher_version.get(self.web_struct.language), f'web-{self.web_struct.webtype}-{self.web_struct.language.lower()}' # type: ignore\n\n # perform general operations like creating a GitHub repository and general linting\n super().process_common_operations(path=Path(path).resolve(), domain='web',\n subdomain=self.web_struct.webtype,\n language=self.web_struct.language,\n dot_cookietemple=dot_cookietemple)", "def provider_setup(cls, args, config):\n if len(args) < 1:\n print \"USAGE: molns provider setup name\"\n print \"\\tCreates a new provider with the given name.\"\n return\n # check if provider exists\n try:\n provider_obj = config.get_object(args[0], kind='Provider')\n except DatastoreException:\n # ask provider type\n print \"Select a provider type:\"\n for n, p in enumerate(VALID_PROVIDER_TYPES):\n print \"\\t[{0}] {1}\".format(n, p)\n while True:\n try:\n provider_ndx = int(raw_input_default(\"Enter the number of type:\", default='0'))\n provider_type = VALID_PROVIDER_TYPES[provider_ndx]\n break\n except (ValueError, IndexError):\n pass\n logging.debug(\"Provider type '{0}'\".format(provider_type))\n # Create provider\n try:\n provider_obj = config.create_object(name=args[0], ptype=provider_type, kind='Provider')\n except DatastoreException as e:\n logging.exception(e)\n print e\n return\n print \"Enter configuration for provider {0}:\".format(args[0])\n setup_object(provider_obj)\n config.save_object(provider_obj, kind='Provider')\n\n cls.provider_initialize(args[0], config)", "def create(cls, vlist):\n IndexBacklog = Pool().get('elasticsearch.index_backlog')\n Product = Pool().get('product.product')\n\n templates = super(Template, cls).create(vlist)\n products = []\n for template in templates:\n products.extend([Product(p) for p in template.products])\n IndexBacklog.create_from_records(products)\n return templates", "def init():\n\n @click.command()\n @click.option('--approot', type=click.Path(exists=True),\n envvar='TREADMILL_APPROOT', required=True)\n @click.option('--instance', help='Publisher instance.')\n def run(approot, instance):\n \"\"\"Starts discovery publisher process.\"\"\"\n tm_env = appenv.AppEnvironment(approot)\n publisher = endpoints.EndpointPublisher(tm_env.endpoints_dir,\n context.GLOBAL.zk.conn,\n instance=instance)\n publisher.run()\n\n return run", "def configure(self, options, conf):\n pass", "def pre_service_template_create(self, resource_dict):\n pass", "def __init__(self):\n super().__init__()\n TemplateEngineFactory.register_factory('Jinja2Engine', Jinja2Engine.Factory)\n\n step1 = CommonMBeansChainedTransfiguration()\n step2 = SpliByApplicationTransfiguration()\n step3 = ConsolidateToFinalOutput()\n\n self.add(step1)\n self.add(step2)\n self.add(step3)", "def module_config_template():\n\n d = {\"AWSPricePerformancePublisher\": {\n \"module\": \"modules.AWS.publishers.AWS_price_performance\",\n \"name\": \"AWSPricePerformancePublisher\",\n }, }\n print(\"Entry in channel cofiguration\")\n pprint.pprint(d)\n print(\"where\")\n print(\"\\t name - name of the class to be instantiated by task manager\")\n print(\"\\t publish_to_graphite - publish to graphite if True\")\n print(\"\\t graphite_host - graphite host name\")", "def __init__(__self__, *,\n generation: Optional[pulumi.Input[Union[str, 'ConfigurationServiceGeneration']]] = None,\n settings: Optional[pulumi.Input['ConfigurationServiceSettingsArgs']] = None):\n if generation is None:\n generation = 'Gen1'\n if generation is not None:\n pulumi.set(__self__, \"generation\", generation)\n if settings is not None:\n pulumi.set(__self__, \"settings\", settings)", "def configure(args):\n print('Configures HPC fleet with given name \"{}\"'.format(args))", "def get_config_template() -> dict:\n return {\n VENE_PAYMENTS_BAMBORA_API_URL: (str, \"https://payform.bambora.com/pbwapi\"),\n VENE_PAYMENTS_BAMBORA_API_KEY: str,\n VENE_PAYMENTS_BAMBORA_API_SECRET: str,\n VENE_PAYMENTS_BAMBORA_PAYMENT_METHODS: list,\n }", "def configure(self, yaml_file):\n with open(yaml_file, \"r\") as f:\n panorama_conf = yaml.load(f)\n\n # Configuring factories to:\n # - get only title, date and category from article metadata\n # - rename the first 4 tags with the names defined below\n\n self.data_factory = DataFactory(\n metadata_columns=panorama_conf[\"metadata_columns\"],\n tag_columns=panorama_conf[\"tag_columns\"],\n )\n self.chart_factory = ChartFactory()\n\n # Configuring the charts if a chart configuration information is available in the conf file\n if \"chart_conf\" in panorama_conf:\n self.chart_factory.chart_conf = panorama_conf[\"chart_conf\"]\n\n # Creating the configurations\n for yaml_conf in panorama_conf[\"confs\"]:\n chart_id = yaml_conf[\"chart_id\"]\n try:\n producer = self._create_producer(yaml_conf[\"producer\"])\n renderer = self._create_renderer(yaml_conf[\"renderer\"], chart_id)\n self.append_conf(\n chart_id=chart_id, producer=producer, renderer=renderer\n )\n except ValueError as err:\n logger.exception(\n \"Error while initializing [%s] conf. -> chart not available.\",\n chart_id,\n )", "def configure(process, options):\n\n # create a TFileService for output\n process.TFileService = cms.Service(\n \"TFileService\",\n fileName = cms.string(options.outputFile),\n closeFileFast = cms.untracked.bool(True),\n )\n\n # enable the JSON filter (if given)\n if options.jsonFilterFile:\n process.enable_json_lumi_filter(options.jsonFilterFile)\n\n # -- configure pipelines\n\n _rng_engines = {}\n for jet_collection in options.jetCollections:\n # create modules with nominal configurations for each jet collection\n init_modules(process, options, jet_algo_name=jet_collection)\n\n # -- set up pipelines (different for data and MC)\n\n if options.isData:\n # data -> only add pipelines with JEC shifts (i.e. no JER smearing)\n #for jec_shift in list(JEC_PIPELINES) + (list(JEC_UNCERTAINTY_SOURCE_SETS) if options.doJECUncertaintySources else []):\n for jec_shift in list(JEC_PIPELINES):\n _pipeline_suffix = \"Nominal\" if jec_shift.endswith(\"Nominal\") else jec_shift\n setup_pipeline(\n process, options,\n pipeline_name=\"{}{}\".format(jet_collection, _pipeline_suffix),\n jet_algo_name=jet_collection,\n jec_shift=jec_shift,\n )\n _rng_engines.update({\n \"ntuple{}{}\".format(jet_collection, _pipeline_suffix) : cms.PSet(\n initialSeed=cms.untracked.uint32(497931),\n engineName=cms.untracked.string('TRandom3')\n )\n })\n else:\n # mc -> add pipelines with both JEC shifts and JER smearing\n #for jec_shift in list(JEC_PIPELINES) + (list(JEC_UNCERTAINTY_SOURCE_SETS) if options.doJECUncertaintySources else []):\n for jec_shift in list(JEC_PIPELINES):\n for jer_variation in JER_PIPELINES:\n # do not add pipelines with more than one active variation (JER or JEC)\n if (jer_variation != 'JERNominal' and jec_shift != 'JECNominal'):\n continue\n\n # take pipeline name from active variation (either JER or JEC)\n _pipeline_suffix = \"Nominal\"\n for _suf in (jec_shift, jer_variation):\n if not _suf.endswith('Nominal'):\n _pipeline_suffix = _suf\n break\n\n setup_pipeline(\n process, options,\n pipeline_name=\"{}{}\".format(jet_collection, _pipeline_suffix),\n jet_algo_name=jet_collection,\n jec_shift=jec_shift,\n jer_variation=jer_variation,\n )\n # store config for random number engine (added to service later)\n _rng_engines.update({\n # key is name of module that needs the RNG engine\n \"smearedCorrectedJets{}{}{}\".format(jet_collection, jec_shift, jer_variation) : cms.PSet(\n initialSeed=cms.untracked.uint32(83),\n engineName=cms.untracked.string('TRandom3')\n ),\n \"ntuple{}{}\".format(jet_collection, _pipeline_suffix) : cms.PSet(\n initialSeed=cms.untracked.uint32(497931),\n engineName=cms.untracked.string('TRandom3')\n )\n })\n\n # add pipeline without JEC/JER shifts (i.e. raw uncorrected jets)\n if False:\n setup_pipeline(\n process, options,\n pipeline_name=\"{}{}\".format(jet_collection, 'Raw'),\n jet_algo_name=jet_collection,\n jec_shift=None\n )\n _rng_engines.update({\n \"ntuple{}{}\".format(jet_collection, 'Raw') : cms.PSet(\n initialSeed=cms.untracked.uint32(497931),\n engineName=cms.untracked.string('TRandom3')\n )\n })\n\n # random number generator service (for JER smearing)\n if _rng_engines:\n process.add_module(\n \"RandomNumberGeneratorService\",\n cms.Service(\n \"RandomNumberGeneratorService\",\n **_rng_engines\n )\n )\n\n # just in case we need it\n return process", "def __init__(self, config: Config) -> None:\n self.config = config\n\n faker_config = self.config.faker\n self.faker = Faker(locale=faker_config.locale)\n\n self.fakes = {}", "def get_config_template(self) -> cconfig.Config:", "def run(input, size, engine, resize_method, imagesize, api_key):\n\tengine_list = ['duckgo', 'bing', 'bing_api', 'flickr_api']\n\tclick.clear()\n\n\tif input and engine in engine_list:\n\t\tfactory = SearchEngineFactory(input,size,input,resize_method,\"dataset\",imagesize, engine, api_key)\n\t\t# Remove corrupt files\n\t\tremove_corrupt(\"dataset\")\n\n\telse:\n\t\trich.print(\"Please provide a valid name\")", "def site_create_from_dict(self, parameters: dict):\n site_name = parameters[KnownParameters.SITE_NAME.value]\n site_path = self.create_physical_path_for_virtual_path(site_name)\n bindings = parameters[KnownParameters.SITE_BINDING.value]\n # use default app pool by default\n pool = parameters.get(KnownParameters.POOL_NAME.value, None)\n\n self.site_create(site_name, site_path, bindings, pool)", "def configure(self):", "def configure(self):", "def configure(self):", "def configure(self):", "def __init__(self, jinja2_env, info):\n super(Finding, self).__init__(jinja2_env)\n self._info = info", "def default_configs(cls):\n config = super().default_configs()\n config.update(\n {\n \"entry_type\": \"ft.onto.base_ontology.Document\",\n \"model_name\": \"ktrapeznikov/biobert_v1.1_pubmed_squad_v2\",\n \"question\": \"Where do I live\",\n \"max_answer_len\": 15,\n \"cuda_devices\": -1,\n \"handle_impossible_answer\": False,\n }\n )\n return config", "def __init__(self, config):\n logging.info(\"Creating footprint\")\n # self.infra = yaml.load(config)\n self.infra = config\n self.footprint_name = self.infra.get(\"footprint\", \"ehw\")\n self.images = self.infra.get(\"images\")\n self.old_images = self.infra.get(\"old_images\", [])\n self.container_name = \"%s-metadata\" % self.footprint_name\n \n self.admin_password = self.infra.get('admin-password')\n self.savefile = self.infra.get(\"footprint\", \"outfile\") + \"-save.yaml\"\n if os.path.exists(self.savefile):\n self.saved_images = yaml.load(open(self.savefile))\n self.footprint_status=self.infra.get(\"footprint_status\", None)\n logging.debug(\"Loaded saved images: %s\" % self.saved_images)\n # sys.exit(0) ", "def main(self):\n\t\thtml_file = codecs.open(\"./templates/results.html\",'w','utf-8')\n\t\thtml_file.write(\"\"\"<!DOCTYPE html>\\n<html lang=\"en\"><head><meta http-equiv=\"Content-Type\" content=\"text/html; charset=UTF-8\">\\n<meta charset=\"utf-8\">\\n<meta http-equiv=\"X-UA-Compatible\" content=\"IE=edge\">\\n<meta name=\"viewport\" content=\"width=device-width, initial-scale=1\">\\n<meta name=\"description\" content=\"Results page for Yelp Road Trip app\">\\n<meta name=\"author\" content=\"Alexander Ko\">\\n\\n<title>Yelp Road Trip results</title>\\n\\n<!-- Bootstrap core CSS -->\\n<link href=\"./../static/css/bootstrap.min.css\" rel=\"stylesheet\">\\n<!-- Custom styles for this template -->\\n<link href=\"./../static/css/jumbotron-narrow.css\" rel=\"stylesheet\">\\n<link href=\"./../static/css/navbar-fixed-top.css\" rel=\"stylesheet\">\\n\\n</head>\\n\\n\\n<body class=\" hasGoogleVoiceExt\">\\n\\n<div class=\"container\">\\n\\n<nav class=\"navbar navbar-default navbar-fixed-top\" role=\"navigation\">\\n <div class=\"container-fluid\">\\n <!-- Brand and toggle get grouped for better mobile display -->\\n <div class=\"navbar-header\">\\n <button type=\"button\" class=\"navbar-toggle\" data-toggle=\"collapse\" data-target=\"#bs-example-navbar-collapse-1\">\\n <span class=\"sr-only\">Toggle navigation</span>\\n <span class=\"icon-bar\"></span>\\n <span class=\"icon-bar\"></span>\\n <span class=\"icon-bar\"></span>\\n </button>\\n <a class=\"navbar-brand\" href=\"{{ url_for(\\'show_input_form\\') }}\">Yelp Road Trip</a>\\n\\n <!--<div class=\"page-header\">\\n <h1>Yelp</h1>\\n <div class=\"tagline\">Road trip</div>\\n </div>-->\\n </div>\\n\\n <!-- Collect the nav links, forms, and other content for toggling -->\\n <div class=\"collapse navbar-collapse\" id=\"bs-example-navbar-collapse-1\">\\n <ul class=\"nav navbar-nav\">\\n <li><a href=\"{{ url_for(\\'map\\') }}\">Map</a></li>\\n <li class=\"active\"><a href=\"#\">Results</a></li>\\n </ul>\\n\\n </div><!-- /.navbar-collapse -->\\n </div><!-- /.container-fluid -->\\n</nav>\\n\\n<div class=\"row marketing\">\"\"\")\n\n\t\t# I do this twice bc there are 2 columns for the HTML file\n\t\tself.write_resturants(self.sorted_resto_keys[:4], html_file)\n\t\thtml_file.write('\\n</div>\\n')\n\t\tself.write_resturants(self.sorted_resto_keys[4:], html_file)\n\n\t\thtml_file.write(\"\"\"\\n</div> <!-- /container -->\\n\\n\\n<!-- Bootstrap core JavaScript\\n================================================== -->\\n<!-- Placed at the end of the document so the pages load faster -->\\n\\n\\n<iframe id=\"rdbIndicator\" width=\"100%\" height=\"270\" border=\"0\" src=\"./results_files/indicator.html\" style=\"display: none; border: 0; position: fixed; left: 0; top: 0; z-index: 2147483647\"></iframe></body></html>\"\"\")\n\n\t\thtml_file.close()", "def configure(self):\n\n pass", "def _CreateCfgFile():\n default_cfg = \"\"\"\nproject: \"fake_project\"\nzone: \"fake_zone\"\nstorage_bucket_name: \"fake_bucket\"\nclient_id: \"fake_client_id\"\nclient_secret: \"fake_client_secret\"\n\"\"\"\n return default_cfg", "def generate_config_template():\n lines = ['# Lines starting with # will be skipped.']\n lines.append('# Only one argument on each line.')\n lines.append('#-s This option is always assumed to be true.')\n lines.append('#-p')\n lines.append('#-m')\n lines.append('#-o')\n lines.append('#-c')\n lines.append('-l')\n lines.append('#-a')\n lines.append('#-d')\n\n with open('export_config.txt', 'wb') as f_new:\n f_new.write('\\r\\n'.join(lines))\n print 'Template generated. Edit this file as you please and call this script '\\\n 'with the -f option enabled.'", "def trigger_configuration(self):\n topic_name, ok = QInputDialog.getItem(self._widget, \"Select topic name\", \"Topic name\", rostopic.find_by_type('sensor_msgs/Image'))\n if ok:\n self._create_subscriber(topic_name)\n\n available_rosservices = []\n for s in rosservice.get_service_list():\n try:\n if rosservice.get_service_type(s) in _SUPPORTED_SERVICES:\n available_rosservices.append(s)\n except:\n pass\n\n srv_name, ok = QInputDialog.getItem(self._widget, \"Select service name\", \"Service name\", available_rosservices)\n if ok:\n self._create_service_client(srv_name)", "def template(c, release=\"url-shortener\"):\n c.run(f\"helm template {release} {HELM_CHART_DIR} > ./generated-deployment.yml\")", "def __init__(self, cfg_file, default_timeout=10):\n self.driver = None\n self._template_table = None\n self.config = self.read_config(cfg_file)\n self.instance = self.config.get('login', 'instance')\n self.default_timeout = default_timeout", "def __init__(__self__,\n resource_name: str,\n args: ProvisioningTemplateArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__, *,\n discovery_spec: pulumi.Input['ZoneDiscoverySpecArgs'],\n lake: pulumi.Input[str],\n location: pulumi.Input[str],\n resource_spec: pulumi.Input['ZoneResourceSpecArgs'],\n type: pulumi.Input[str],\n description: Optional[pulumi.Input[str]] = None,\n display_name: Optional[pulumi.Input[str]] = None,\n labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n name: Optional[pulumi.Input[str]] = None,\n project: Optional[pulumi.Input[str]] = None):\n pulumi.set(__self__, \"discovery_spec\", discovery_spec)\n pulumi.set(__self__, \"lake\", lake)\n pulumi.set(__self__, \"location\", location)\n pulumi.set(__self__, \"resource_spec\", resource_spec)\n pulumi.set(__self__, \"type\", type)\n if description is not None:\n pulumi.set(__self__, \"description\", description)\n if display_name is not None:\n pulumi.set(__self__, \"display_name\", display_name)\n if labels is not None:\n pulumi.set(__self__, \"labels\", labels)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if project is not None:\n pulumi.set(__self__, \"project\", project)", "def wfpdoc_factory(**kwargs):\n # it seems we cannot use django-dynamic-fixtures with django-polymorphic\n # therefore we create the fixture the old fashion way\n wfpdoc_number = random.randint(0, 1000)\n title = kwargs.pop('title', None)\n if not title:\n title = 'Static map N. %s' % wfpdoc_number\n abstract = 'Abstract for static map N. %s' % wfpdoc_number\n # we need to upload a file\n imgfile = StringIO.StringIO(\n 'GIF87a\\x01\\x00\\x01\\x00\\x80\\x01\\x00\\x00\\x00\\x00ccc,\\x00'\n '\\x00\\x00\\x00\\x01\\x00\\x01\\x00\\x00\\x02\\x02D\\x01\\x00;')\n doc_file = SimpleUploadedFile(\n '%s.gif' % wfpdoc_number,\n imgfile.read(),\n 'image/gif')\n owner = rol_capooti()\n wfpdoc = WFPDocument(title=title, abstract=abstract, owner=owner, doc_file=doc_file)\n # associate a layer. TODO also associate maps in place of layers\n id_list = list(xrange(Layer.objects.all().count()))\n random.shuffle(id_list)\n layer = Layer.objects.all()[id_list[0]]\n layer_ct = ContentType.objects.get(app_label=\"layers\", model=\"layer\")\n wfpdoc.content_type = layer_ct\n wfpdoc.object_id = layer.id\n wfpdoc.save()\n\n # append some (0 to 3) categories\n id_list = list(xrange(Category.objects.all().count()))\n random.shuffle(id_list)\n for i in range(0, 3):\n category = Category.objects.all()[id_list[i]]\n wfpdoc.categories.add(category)\n\n # set permissions\n perm_spec = {\n \"users\": {\n \"admin\": [\n \"change_resourcebase\",\n \"change_resourcebase_permissions\",\n \"view_resourcebase\"]},\n \"groups\": {}}\n wfpdoc.set_permissions(perm_spec)\n\n return wfpdoc", "def __init__(self, **configs): \n self.pkg = \"sdpp_explore\"\n\n self.__dict__.update(configs)", "def _create(self, **kwargs):\n\n config = misc_utils.resolve_config(\n kwargs.pop('config', None),\n kwargs.pop('config_file', None)\n )\n\n return self._make_request(method='POST', config=config)", "def __init__(__self__, *,\n config: pulumi.Input['ConfigArgs'],\n instance_id: pulumi.Input[str],\n location: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n project: Optional[pulumi.Input[str]] = None):\n pulumi.set(__self__, \"config\", config)\n pulumi.set(__self__, \"instance_id\", instance_id)\n if location is not None:\n pulumi.set(__self__, \"location\", location)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if project is not None:\n pulumi.set(__self__, \"project\", project)", "def describe(self, template='projection_default.txt', engine='default'):\n raise NotImplementedError", "def __init__(self, jinja_template, report_generator):\n self._jinja_template = jinja_template\n self._report_generator = report_generator\n self._outp = {} # all section-specific information for the template\n # will be stored here\n self._config = self._report_generator.config", "def configure(self):\n pass", "def configure(self):\n pass", "def setUp(self):\n super(TestDOSCreateService, self).setUp()\n self.domain_list = [{\"domain\": \"mywebsite%s.com\" % uuid.uuid1()}]\n self.origin_list = [{\"origin\": \"mywebsite1.com\",\n \"port\": 443,\n \"ssl\": False}]\n self.caching_list = [{\"name\": \"default\", \"ttl\": 3600},\n {\"name\": \"home\",\n \"ttl\": 1200,\n \"rules\": [{\"name\": \"index\",\n \"request_url\": \"/index.htm\"}]}]\n self.restrictions_list = [\n {\n u\"name\": u\"website only\",\n u\"rules\": [\n {\n u\"name\": \"mywebsite.com\",\n u\"referrer\": \"mywebsite.com\"\n }\n ]\n }\n ]\n self.service_name = str(uuid.uuid1())\n self.flavor_id = self.test_config.default_flavor\n self.MAX_ATTEMPTS = 30\n\n if self.test_config.generate_flavors:\n # create the flavor\n self.flavor_id = str(uuid.uuid1())\n self.client.create_flavor(flavor_id=self.flavor_id,\n provider_list=[{\n \"provider\": \"fastly\",\n \"links\": [{\"href\": \"www.fastly.com\",\n \"rel\": \"provider_url\"}]}])", "def setup_portlets(context):\n marker_file = '{0}.txt'.format(PROJECTNAME)\n if context.readDataFile(marker_file) is None:\n return\n\n portal = api.portal.get()\n fix_image_links_in_static_portlet(portal)\n set_flowplayer_portlet(portal)" ]
[ "0.54952186", "0.5411151", "0.5359625", "0.53443027", "0.53443027", "0.52677274", "0.52394605", "0.52167094", "0.5214136", "0.5174137", "0.51633024", "0.5150547", "0.51282334", "0.5121639", "0.5080549", "0.5079729", "0.5071006", "0.5065138", "0.506349", "0.5042716", "0.5035949", "0.5010344", "0.5003982", "0.49866182", "0.49772686", "0.49770367", "0.4975985", "0.49645746", "0.49624753", "0.4945803", "0.49327374", "0.49314848", "0.49070066", "0.4903917", "0.48887518", "0.48838767", "0.48707226", "0.48624775", "0.4854353", "0.4848824", "0.4848769", "0.48287082", "0.48184472", "0.4813539", "0.48034844", "0.48006952", "0.47861895", "0.47840953", "0.4780879", "0.47738352", "0.4772628", "0.4772054", "0.47706664", "0.4767051", "0.47662324", "0.4763666", "0.47571328", "0.47560817", "0.47542533", "0.4751492", "0.47435954", "0.47369447", "0.4734001", "0.4730109", "0.47296855", "0.47265354", "0.4724081", "0.47176415", "0.47158936", "0.47138453", "0.47121775", "0.47107664", "0.47064638", "0.46956035", "0.46909848", "0.46909848", "0.46909848", "0.46909848", "0.4689182", "0.46880433", "0.4685636", "0.4683748", "0.46831894", "0.4673163", "0.46698713", "0.4663326", "0.46606755", "0.46557927", "0.46556857", "0.4655641", "0.46555522", "0.46505824", "0.46499813", "0.46419477", "0.4641778", "0.46288475", "0.4628563", "0.4628563", "0.46278962", "0.46233842" ]
0.5189562
9
>>> create_grid(4) [['0', '0', '0', '0'], ['0', '0', '0', '0'], ['0', '0', '0', '0'], ['0', '0', '0', '0']]
def create_grid(size): grid = [] for i in range(size): row = ['0']*size grid.append(row) return grid
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_grid(grid):\r\n for i in range(4):\r\n grid.append([0]*4)\r\n return grid", "def create_grid(grid):\r\n for i in range (4):\r\n grid.append ([])\r\n for j in range (4):\r\n grid[i].append (0)", "def create_grid(grid):\r\n inner = [0]*4\r\n for i in range(4):\r\n grid.append(inner[:])", "def create_grid(grid):\r\n for i in range(4):\r\n grid.append([0,0,0,0])", "def grid_maker(width, height):\n grid = [['.' for i in range(width)] for j in range(height)]\n return grid", "def create_grid(height, width):\n grid = []\n \n for r in range(height):\n row = [0] * width # a row containing width 0s\n grid += [row]\n\n return grid", "def gen_grid(grid_width, grid_height):\n\n grid = []\n for x in range(0, grid_width):\n grid.append([])\n for y in range(0, grid_height):\n grid[x].append(False)\n return grid", "def generate_grid(height, width):\n return [[random.randint(0, 9) for _ in range(width)] for _ in range(height)]", "def make_grid(X,Y): \r\n grid = []\r\n for j in range(Y):\r\n row = []\r\n for i in range(X):\r\n row.append( block((i,j)) )\r\n grid.append(row)\r\n return grid", "def create_grid(self):\n return [[0] * self.width for _ in range(self.height)]", "def make_grid(self):\n\n\t\tinit_grid = (self.grid_width//2, self.grid_height//2)\n\t\tgrid_list = []\n\n\t\tfor i in range(self.canv_width//self.grid_width):\n\t\t\tfor j in range(self.canv_height//self.grid_height):\n\t\t\t\tif j == 0 or j%2 ==0:\n\t\t\t\t\tgrid_list.append((init_grid[0]+i*self.grid_width, init_grid[1]+j*self.grid_height))\n\t\t\t\t\t\n\t\t\t\telse:\n\t\t\t\t\tgrid_list.append((grid_list[-1][0]+(self.grid_width//2), init_grid[1]+j*self.grid_height))\n\n\t\treturn grid_list", "def generate_grid():\n field = []\n three_lst = []\n for three_let in range(0, 3):\n three_lst = []\n for i in range(0, 3):\n three_lst.append(chr(random.randint(97, 122)))\n field.append(three_lst)\n return field", "def create_rand_grid(grid_size):\n\n return [[randint(0, 1) for x in range(0, grid_size)] for y in range(0, grid_size)]", "def __init__(self) -> None:\n self.row = 6\n self.col = 7\n self.grid = []\n\n for y in range(self.row):\n temp_row = []\n for x in range(self.col):\n temp_row.append(\" \")\n self.grid.append(temp_row)", "def create_empty_grid(width, height):\n return [[None] * width for _ in range(height)]", "def makeBoard(m,n):\n if m < 2 or n < 2:\n raise ValueError('Grid must be at least 2x2')\n grid = []\n for y in range(n):\n row = []\n for x in range(m):\n value = 1 if random.randint(0,4) % 4 == 0 else 0\n if x==0 and y==0:\n value = 0\n if x==(m-1) and y==(n-1):\n value = 0\n row.append(value)\n grid.append(row)\n return grid", "def _create_grid_with_cells(self, width, height):\n grid = []\n for row in range(height):\n grid.append([])\n for column in range(width):\n if column % 2 == 1 and row % 2 == 1:\n grid[row].append(TILE_EMPTY)\n elif (\n column == 0 or row == 0 or column == width - 1 or row == height - 1\n ):\n grid[row].append(TILE_CRATE)\n else:\n grid[row].append(TILE_CRATE)\n grid[-2][-3] = TILE_EMPTY\n grid[1][0] = TILE_EMPTY\n return grid", "def make_grid(grid_size=(10, 10)):\n return np.zeros(grid_size, dtype=np.int16)", "def get_start_grid(cols=4, rows=4):\n\tgrid = [[\"\"]*cols for i in range(rows)]\n\tfor i in range(2):\n\t\tempties = get_empty_cells(grid)\n\t\ty,x = random.choice(empties)\n\t\tgrid[y][x] = 2 if random.random() < prob_2 else 4\n\treturn grid", "def get_start_grid(cols=4, rows=4):\n grid = [[0]*cols for i in range(rows)]\n for i in range(2):\n empties = get_empty_cells(grid)\n y,x = random.choice(empties)\n grid[y][x] = 2 if random.random() < 0.9 else 4\n return grid", "def create_grid(size_x, size_y, default=None):\n return [[default for _x in range(size_y)] for _y in range(size_x)]", "def regex_grid(n):\n cx = 2 ** (n - 1)\n cy = 2 ** (n - 1)\n grid = [[grid_numbering(n, i , j, cx, cy) for i in range(2 ** n)] for j in range(2 ** n)]\n \n return grid", "def create_board(rows, columns):\n res = [[0 for i in range(columns)] for j in range(rows)]\n return res", "def initialize(self, height, width,):\n grid = list()\n for x in xrange(height):\n grid.append(list())\n for y in xrange(width):\n grid[x].append(Node(x, y))\n return grid", "def make_board():\n return [[0 for i in range(8)] for i in range(8)]", "def build_grid(tiles, tile_size, grid_rows=None, grid_cols=None):\n if grid_rows is None or grid_cols is None:\n grid_rows = int(math.sqrt(len(tiles)))\n grid_cols = int(math.ceil(len(tiles) / grid_rows))\n\n grid = np.zeros(\n (grid_rows * tile_size[1], grid_cols * tile_size[0], 3), np.uint8)\n for tile_id, tile in enumerate(tiles):\n assert(tile.shape[0] == tile_size[1] and tile.shape[1] == tile_size[0])\n yy = int(tile_id / grid_cols)\n xx = tile_id % grid_cols\n grid[(yy * tile_size[1]):((yy + 1) * tile_size[1]),\n (xx * tile_size[0]):((xx + 1) * tile_size[0]), :] = tile\n return grid", "def create_initial_grid():\n\n\tgrid = {(x, y) : ' + ' for x in range(8) for y in range(8)}\n\n\t# Define initial positions \n\tgrid[(3,3)] = colors.RED + \"[I]\" + colors.STOP\n\tgrid[(4,3)] = colors.GREEN + \"[A]\" + colors.STOP\n\tgrid[(3,4)] = colors.GREEN + \"[A]\" + colors.STOP\n\tgrid[(4,4)] = colors.RED + \"[I]\" + colors.STOP\n\n\treturn grid", "def make_grid(dataset):\n top_left_lat = dataset[\"a\"][0]\n top_left_lng = dataset[\"a\"][1]\n top_right_lng = dataset[\"c\"][1]\n bot_left_lat = dataset[\"b\"][0]\n\n lng_row = []\n lat_col = []\n i = top_left_lng\n while i < top_right_lng:\n lng_row.append(round(i, 5))\n i += step\n j = bot_left_lat\n while j < top_left_lat:\n lat_col.append(round(j, 5))\n j += step\n out_grid = []\n for i in lat_col:\n row = []\n for j in lng_row:\n row.append(\"{0}:{1}:0\".format(i, j))\n out_grid.append(row)\n return out_grid", "def generate_grid(nrows, ncols, ndots):\n\n # Validation\n if nrows * ncols < ndots:\n raise Exception(\"ndots must be <= than grid size\")\n\n rows = np.arange(1, nrows + 1)\n cols = np.arange(1, ncols + 1)\n\n # Create empty matrix\n grid = np.empty((len(rows), len(cols), 2), dtype=np.intp)\n grid[..., 0] = rows[:, None]\n grid[..., 1] = cols \n\n return grid.reshape(nrows * ncols, -1)[:ndots]", "def create_sudoku(self)->list:\n grid = [[None for x in range(9)] for row in range(9)]\n for row in range(0,9):\n for column in range(0,9):\n if row <= 2 and column <=2:\n grid[row][column] = cell.Cell(0)\n elif row <= 2 and 3 <= column <= 5:\n grid[row][column] = cell.Cell(1)\n elif row <= 2 and 6 <= column <= 8:\n grid[row][column] = cell.Cell(2)\n elif 3 <= row <= 5 and column <= 2:\n grid[row][column] = cell.Cell(3)\n elif 3 <= row <= 5 and 3 <= column <= 5:\n grid[row][column] = cell.Cell(4)\n elif 3 <= row <= 5 and 6 <= column <= 8:\n grid[row][column] = cell.Cell(5)\n elif 6 <= row <= 8 and column <= 2:\n grid[row][column] = cell.Cell(6)\n elif 6 <= row <= 8 and 3 <= column <= 5:\n grid[row][column] = cell.Cell(7)\n elif 6 <= row <= 8 and 6 <= column <= 8:\n grid[row][column] = cell.Cell(8)\n return grid", "def make_grid(width, height):\n # Make a grid e.g. make_grid(2, 3) would be\n # [] [] []\n # [] [] [] \n # the row is the height and the col is the width\n # now fill each one of them with captial letters\n return {(row, col): choice(ascii_uppercase) \n for row in range(height)\n for col in range(width)\n }", "def make_grid(self, nx, ny):\n nx_vec = np.arange(nx)\n ny_vec = np.arange(ny)\n yv, xv = np.meshgrid(ny_vec, nx_vec)\n grid = np.stack((yv, xv), axis=2)\n grid = grid.reshape(1, 1, ny, nx, 2)\n return grid", "def init_grid(self):\n grid = []\n for i in range(self.settings['grid_size']):\n grid.append([])\n for j in range(self.settings['grid_size']):\n if [j, i] in self.settings['walls']:\n grid[i].append(g.WALL)\n else:\n grid[i].append(g.EMPTY)\n return grid", "def create_subgrid(self)->list:\n return [subgrid.Subgrid(i) for i in range(0, 9)]", "def test_Grid_creates_list_of_lists_of_None(mock_grid):\n # note: img_dim, spacing = (193, 129), 64\n # hence we get 4 points in y and 3 points in x\n exp = [[None, None, None], [None, None, None],\n [None, None, None], [None, None, None]]\n assert mock_grid._array == exp", "def visualize(grid, board_size=16):\n visual_grid = []\n for i in range(board_size):\n row = []\n for j in range(board_size):\n row.append(grid[(j, i)])\n visual_grid.append(row)\n print(visual_grid)", "def create_board(N):\n board = [[0 for x in range(N)] for y in range(N)] \n return board", "def createBoard(width, height):\n A = []\n for row in range(height):\n A += [createOneRow(width)]\n return A", "def grid(request):\n\n if 'number' in request.GET and request.GET['number']:\n n = int(request.GET['number'])\n else:\n n = 3;\n\n grid = []\n for row in xrange(n):\n r = []\n for cell in xrange(n):\n r.append(random_octal_str(9))\n grid.append(r)\n\n context = {'grid': grid,\n 'title': 'Grid',}\n return render(request, 'grid.html', context)", "def create_grid(images, n_rows=4, n_cols=4):\n k = min(n_rows * n_cols, len(images))\n indices = [i for i in range(k)]\n return _create_grid(images, indices, n_rows, n_cols)", "def random_grid(height, width):\n grid = create_grid(height, width)\n for r in range(1, height - 1):\n for c in range(1, width - 1):\n grid[r][c] = random.choice([0, 1])\n return grid", "def build(xaxis, yaxis, zaxis):\n matrix = []\n for floor in range(zaxis):\n roomnum = 1\n matrix.append([])\n for row in range(yaxis):\n matrix[floor].append([])\n for column in range(xaxis):\n matrix[floor][row].append(str(roomnum))\n roomnum += 1\n return matrix", "def generate_grid(alphabet):\n random_line = generate_line(alphabet)\n array = random_line[:]\n grid = []\n for i in range(len(random_line)):\n new_array = array[:]\n grid.append(new_array)\n last_char = array.pop()\n array.insert(0, last_char)\n\n mix_grid(grid)\n\n return grid", "def make_game_grid(self):\n return numpy.array([[random.choice(string.ascii_uppercase) for breath in range(self.grid_size)] for depth in\n range(self.grid_size)])", "def grid_image(output):\n grid = []\n for data in output:\n grid += [make_grid(data, nrow=5, normalize=True)]\n return grid", "def fill_grid(self):\n\n for row_margin, row in enumerate(range(self.rows)):\n self.grid.append([])\n\n for col_margin, col in enumerate(range(self.cols)):\n x = col*self.cell_size + col_margin\n y = row*self.cell_size + row_margin\n\n rect = pygame.Rect(x, y, self.cell_size, self.cell_size)\n\n cell = Cell(row, col, rect)\n\n if row == 7 and col == 3:\n cell.root = True\n self.root = cell\n elif row == 7 and col == 16:\n cell.goal = True\n self.goal = cell\n\n self.grid[row].append(cell)", "def copy_grid (grid):\r\n new_grid = []\r\n for i in range (4):\r\n new_grid.append ([])\r\n for j in range (4):\r\n new_grid[i].append (grid[i][j])\r\n return new_grid", "def make_grid(self):\n for k in range(0, NUM + 1):\n self.create_line(k * UNIT, 0, k * UNIT, SIZE, width=THICKNESS)\n self.create_line(0, k * UNIT, SIZE, k * UNIT, width=THICKNESS)", "def create_grid(self):\n\n # If called when a grid already exists create a new grid\n if self.grid:\n self.grid = []\n\n grid_pen = QPen(QColor(215, 215, 215), 1)\n w = 10000\n h = 10000\n self.addLine(-10000, 0, 10000, 0, QPen(QColor(0, 0, 0), 2))\n self.addLine(0, -10000, 0, 10000, QPen(QColor(0, 0, 0), 2))\n\n w = int(w / self.grid_spacing) * self.grid_spacing\n h = int(h / self.grid_spacing) * self.grid_spacing\n for i in range(-w, w, self.grid_spacing):\n if i == 0:\n pass\n else:\n line = self.addLine(-w, i, w, i, grid_pen)\n line.setZValue(-1)\n self.grid.append(line)\n for i in range(-h, h, self.grid_spacing):\n if i == 0:\n pass\n else:\n line = self.addLine(i, -h, i, h, grid_pen)\n line.setZValue(-1)\n self.grid.append(line)\n\n self.grid_built = True", "def __makeRandomLetterGrid(self):\n for x in range(self.numRows):\n row = []\n for y in range(self.numCols):\n row.append(self.__getRandChar())\n self.grid.append(row)\n return self.grid", "def createGrid(nx, ny, include_center = False):\n direction = 0\n positions = []\n if (nx > 1) or (ny > 1):\n half_x = int(nx/2)\n half_y = int(ny/2)\n for i in range(-half_y, half_y+1):\n for j in range(-half_x, half_x+1):\n if ((i==0) and (j==0)) and not include_center:\n continue\n else:\n if ((direction%2)==0):\n positions.append([j,i])\n else:\n positions.append([-j,i])\n direction += 1\n return positions", "def create_video_grid(vid_width, vid_height, block_dim):\n\n grid = []\n for i in range(0, vid_height, block_dim):\n grid_row = []\n for j in range(0, vid_width, block_dim):\n bottom_left_vertex = (j, i)\n bottom_right_vertex = (j + block_dim, i)\n top_right_vertex = (j + block_dim, i + block_dim)\n top_left_vertex = (j, i + block_dim)\n\n vertex_list = []\n vertex_list.append(bottom_left_vertex)\n vertex_list.append(bottom_right_vertex)\n vertex_list.append(top_right_vertex)\n vertex_list.append(top_left_vertex)\n\n path = Path(vertex_list)\n grid_row.append(path)\n grid.append(grid_row)\n return grid", "def inner_grid(height, width, digit):\n grid = create_grid(height, width)\n for r in range(1, height - 1):\n for c in range(1, width - 1):\n grid[r][c] = digit\n return grid", "def createTiles():\n Renderer.Clear()\n map = []\n w, h = len(testmap[0]), len(testmap)\n x, y = 0, 0\n for row in testmap:\n for char in row:\n map.append(makeTile(char, x, y))\n x += 1\n y += 1\n x = 0\n\n return map, w, h", "def createGridcells(mapdata, listOfP):\n new_gridcells = GridCells()\n new_gridcells.header = mapdata.header\n new_gridcells.cell_width = mapdata.info.resolution\n new_gridcells.cell_height = mapdata.info.resolution\n new_gridcells.cells = []\n for p in listOfP:\n new_gridcells.cells.append(PathPlanner.grid_to_world(mapdata, p[0], p[1]))\n return new_gridcells", "def _createGrid(self, dimensions, density):\n import math\n\n xmin, xmax = dimensions[0], dimensions[1]\n imin, imax = dimensions[2], dimensions[3]\n\n hsteps = math.ceil((xmax - xmin)/density)\n vsteps = math.ceil((imax - imin)/density)\n\n hgrids = int(math.ceil(hsteps/self.gridsize))\n vgrids = int(math.ceil(vsteps/self.gridsize))\n\n grid_inc = density * self.gridsize\n \n #Add one inside the range() because you want to include the last one\n horizontal = [[xmin + (x * grid_inc), xmin + ((x+1) * grid_inc)] for x in range(hgrids)]\n vertical = [[imin + (im * grid_inc), imin + ((im+1) * grid_inc)] for im in range(vgrids)]\n\n #This makes the negative to positive less confusing, positive is at index = 0\n vertical.reverse()\n\n grid_map = []\n\n for im in vertical:\n temp = []\n for x in horizontal:\n my_x = list(x)\n my_x.extend(im)\n temp.append(my_x)\n grid_map.append(temp)\n\n return grid_map", "def create_board(width, height):\n a = []\n for row in range(height):\n a += [createOneRow(width)] # gebruik de bovenstaande functie zodat ... één rij is!!\n return a", "def initialize_grid(self):\n self.grid = np.zeros([self.N, self.N, self.N])\n return self.grid", "def create_board(n, m):\n\tif n == 0 or m == 0:\n\t\traise IndexError(\"dimensions cannot both be zero\")\n\tif n < 0 or m < 0:\n\t\traise IndexError(\"dimensions cannot be negative\")\n\tboard = []\n\trows = [0] * m\n\tfor i in range(n):\n\t\tboard.append(rows)\n\treturn board", "def createBoard(height, width):\r\n A = []\r\n for row in range(height):\r\n A += [createOneRow(width)] \r\n return A\r\n #return [createOneRow(width) for x in range(height)]\r", "def __maze_generator(self):\n grid = []\n for row in range(self.__row_count):\n new_row = []\n for col in range(self.__col_count):\n new_row.append(RoomFactory.create_room([row, col]))\n if col > 0:\n new_row[col].left = new_row[col - 1]\n new_row[col - 1].right = new_row[col]\n if row > 0:\n new_row[col].up = grid[row - 1][col]\n grid[row - 1][col].down = new_row[col]\n grid.append(new_row)\n return grid", "def make_grid(width, height):\n return {(row, col): choice(ascii_uppercase) \n for row in range(height)\n for col in range(width)\n }", "def __init__(self, grid_height, grid_width):\n self._grid_height = grid_height\n self._grid_width = grid_width\n self._cells = [[EMPTY for dummy_col in range(self._grid_width)]\n for dummy_row in range(self._grid_height)]", "def getNewGrid(self, _grid_size):\n grid_ = []\n for _ in range(_grid_size[0]):\n grid_ += [[ None for _ in range(_grid_size[1]) ]]\n return grid_", "def make_grid(width, height): \n return {(row, col): choice(ascii_uppercase)\n for row in range (height) # remove ' ' and add choice()\n for col in range(width)}", "def make_grid(tensors, nrow=2, padding=2, isNormalized=True):\n grid = tv.utils.make_grid(tensor=tensors.detach().cpu(),\n nrow=nrow,\n padding=padding,\n normalize=(not isNormalized))\n if isNormalized:\n ndgrid = grid.mul(255).add_(0.5).clamp_(0, 255).permute(\n 1, 2, 0).numpy().astype(np.uint16)\n else:\n ndgrid = grid.clamp_(0, 255).permute(1, 2, 0).numpy().astype(np.uint16)\n return ndgrid", "def make_complete_graph(num_vertices):\n V = num_vertices\n K = V * (V - 1) // 2\n grid = np.zeros([3, K], np.int32)\n k = 0\n for v2 in range(V):\n for v1 in range(v2):\n grid[:, k] = [k, v1, v2]\n k += 1\n return grid", "def createBoard(width, height):\r\n board = []\r\n for i in range(height):\r\n board = board+[createOneRow(width)]\r\n return board\r\n # or\r", "def initialize_grid(self):\r\n for i in range(self.height):\r\n for j in range(self.width):\r\n self.grid[i][j] = 0\r\n \r\n # fill up unvisited cells\r\n for r in range(self.height):\r\n for c in range(self.width):\r\n if r % 2 == 0 and c % 2 == 0:\r\n self.unvisited.append((r,c))\r\n\r\n self.visited = []\r\n self.path = dict()\r\n self.generated = False", "def print_grid (grid):\r\n print('+--------------------+')\r\n for o in range(len(grid)):\r\n print('|',end='')\r\n for e in range(len(grid[o])):\r\n j=grid[o][e]\r\n if j==0:\r\n g=' '\r\n else:\r\n g=j\r\n print(g,end=' '*(5-len(str(grid[o][e]))))\r\n print('|')\r\n print('+--------------------+')", "def board_init():\n board = [[[i for i in range(1,n+1)] for j in range(n)] for k in range(n)]\n return board", "def stripToGrid(pixelCount, columnCount):\n rowCount = int(pixelCount/columnCount)\n grid = [[0 for x in range(rowCount)] for y in range(columnCount)]\n\n pixel = 0\n for y in range(rowCount):\n for x in range(columnCount): \n column = x if y%2 == 0 else columnCount-1-x\n grid[column][y] = pixel \n pixel += 1 \n\n return grid", "def print_grid (grid):\r\n f = '{:<5}'\r\n print(\"+--------------------+\")\r\n print('|', f.format(grid[0][0]), f.format(grid[0][1]), f.format(grid[0][2]), f.format(grid[0][3]), '|',sep='')\r\n print('|', f.format(grid[1][0]), f.format(grid[1][1]), f.format(grid[1][2]), f.format(grid[1][3]), '|',sep='')\r\n print('|', f.format(grid[2][0]), f.format(grid[2][1]), f.format(grid[2][2]), f.format(grid[2][3]), '|',sep='')\r\n print('|', f.format(grid[3][0]), f.format(grid[3][1]), f.format(grid[3][2]), f.format(grid[3][3]), '|',sep='')\r\n print(\"+--------------------+\")", "def print_grid(gr):\n for i in range(0,9):\n if((i % 3) == 0):\n print('- - - - - - - - - - - - - - - -')\n for j in range(0,9):\n if((j % 3) == 0):\n print('|', end='')\n \n val = str(gr[i][j])\n if(val == '0'):\n val = ' '\n \n print(' ' + val + ' ', end = '')\n print('|')\n print('- - - - - - - - - - - - - - - -')", "def grid_edges(num_node):\n m = math.sqrt(num_node)\n top = []\n bottom = []\n left = []\n right = []\n for node_id in range(1, num_node + 1):\n if node_id % m == 1:\n left.append(node_id)\n elif node_id % m == 0:\n right.append(node_id)\n elif node_id <= m:\n top.append(node_id)\n elif node_id >= num_node - m + 1:\n bottom.append(node_id)\n else:\n pass\n return (top, bottom, left, right)", "def create_map(grid_size):\n STATUS['game_grid'] = [] # Could be a tuple?\n STATUS['grid_size'] = grid_size\n x_coord = 1\n y_coord = 1\n grid_size_counter = grid_size * grid_size\n while grid_size_counter:\n STATUS['game_grid'].append([x_coord, y_coord])\n x_coord += 1\n if x_coord == grid_size + 1:\n y_coord += 1\n x_coord = 1\n grid_size_counter -= 1", "def make_grid(N):\n\n x = np.linspace(-2. , 2 , N)\n y = np.linspace(-2. , 2 , N)\n # two evenly spaced grids from -2 to 2\n\n return x, y", "def create_board(self, dimension):\n\n board = []\n\n for i in range(dimension):\n row = []\n for j in range(dimension):\n row.append(' ')\n board.append(row)\n\n return board", "def make_tree(edges):\n assert all(isinstance(edge, tuple) for edge in edges)\n edges = [tuple(sorted(edge)) for edge in edges]\n edges.sort()\n E = len(edges)\n grid = np.zeros([3, E], np.int32)\n for e, (v1, v2) in enumerate(edges):\n grid[:, e] = [e, v1, v2]\n return grid", "def generate_grid_dict(height, width):\n board = {}\n for i in range(height):\n for j in range(width):\n position = (i, j)\n board[position] = 0\n return board", "def generate_grid(contents: str) -> str:\r\n grid = \"\"\r\n grid_width = get_grid_width(contents)\r\n\r\n # Generate each row of the grid\r\n for row in range(grid_width):\r\n # Add the seperator row\r\n grid += \"{}\\n\".format(generate_grid_separator_row(grid_width))\r\n\r\n # Generate the row with characters in it\r\n for column in range(grid_width):\r\n position = (row * grid_width) + column\r\n grid += VERTICAL_WALL + EMPTY + contents[position] + EMPTY\r\n\r\n grid += \"{}\\n\".format(VERTICAL_WALL)\r\n\r\n grid += generate_grid_separator_row(grid_width)\r\n\r\n return grid", "def _possible_grids(self, num_windows):\n if num_windows < 2:\n end = 2\n else:\n end = num_windows // 2 + 1\n for rows in range(1, end):\n cols = int(math.ceil(num_windows / rows))\n yield (rows, cols, ROWCOL)\n if rows != cols:\n # also want the reverse test\n yield (cols, rows, COLROW)", "def makeMatrix():\n listOfChars = []\n for ascii in range(32, 128):\n listOfChars.append(chr(ascii))\n random.shuffle(listOfChars)\n matrix = Grid(8, 12)\n i = 0\n for row in range(matrix.getHeight()):\n for column in range(matrix.getWidth()):\n matrix[row][column] = listOfChars[i]\n i += 1\n return matrix", "def __init__(self, puzzle_height, puzzle_width, initial_grid=None):\r\n self._height = puzzle_height\r\n self._width = puzzle_width\r\n self._grid = [[col + puzzle_width * row\r\n for col in range(self._width)]\r\n for row in range(self._height)]\r\n\r\n if initial_grid != None:\r\n for row in range(puzzle_height):\r\n for col in range(puzzle_width):\r\n self._grid[row][col] = initial_grid[row][col]", "def __init__(self, puzzle_height, puzzle_width, initial_grid=None):\r\n self._height = puzzle_height\r\n self._width = puzzle_width\r\n self._grid = [[col + puzzle_width * row\r\n for col in range(self._width)]\r\n for row in range(self._height)]\r\n\r\n if initial_grid != None:\r\n for row in range(puzzle_height):\r\n for col in range(puzzle_width):\r\n self._grid[row][col] = initial_grid[row][col]", "def __init__(self, puzzle_height, puzzle_width, initial_grid=None):\r\n self._height = puzzle_height\r\n self._width = puzzle_width\r\n self._grid = [[col + puzzle_width * row\r\n for col in range(self._width)]\r\n for row in range(self._height)]\r\n\r\n if initial_grid != None:\r\n for row in range(puzzle_height):\r\n for col in range(puzzle_width):\r\n self._grid[row][col] = initial_grid[row][col]", "def init_maze(width: int, height: int) -> list[int]:\n return [0] * width * height", "def create_board(size) -> list:\n return list(itertools.product([i for i in range(size)], repeat=2))", "def create_grid(self):\n for y_iter in range(self.NUM_GRIDS):\n for x_iter in range(self.NUM_GRIDS):\n x, y = x_iter * self.SQUARE_SIZE, y_iter * self.SQUARE_SIZE\n x_stop, y_stop = x + self.SQUARE_SIZE, y + self.SQUARE_SIZE\n cords = x, y, x_stop, y_stop\n self.canvas.create_rectangle(cords, outline=self.color,\n fill=self.default_color)", "def __init__(self, puzzle_height, puzzle_width, initial_grid=None):\n self._height = puzzle_height\n self._width = puzzle_width\n self._grid = [[col + puzzle_width * row for col in range(self._width)]\n for row in range(self._height)]\n\n if initial_grid != None:\n for row in range(puzzle_height):\n for col in range(puzzle_width):\n self._grid[row][col] = initial_grid[row][col]", "def make_grid(self):\n grid = []\n for index, _ in enumerate(self.font[self.chars[0]]):\n grid.append([])\n\n for char in self.chars:\n for index in range(config()[\"panel\"][\"height\"]):\n grid[index].extend(self.font[char][index])\n\n return grid", "def new_board(n: int) -> Board:\n\n return [[0 for _ in range(n)] for _ in range(n)]", "def __init__(self, puzzle_height, puzzle_width, initial_grid=None):\n self._height = puzzle_height\n self._width = puzzle_width\n self._grid = [[col + puzzle_width * row\n for col in range(self._width)]\n for row in range(self._height)]\n\n if initial_grid != None:\n for row in range(puzzle_height):\n for col in range(puzzle_width):\n self._grid[row][col] = initial_grid[row][col]", "def __init__(self, puzzle_height, puzzle_width, initial_grid=None):\n self._height = puzzle_height\n self._width = puzzle_width\n self._grid = [[col + puzzle_width * row\n for col in range(self._width)]\n for row in range(self._height)]\n\n if initial_grid != None:\n for row in range(puzzle_height):\n for col in range(puzzle_width):\n self._grid[row][col] = initial_grid[row][col]", "def __init__(self, puzzle_height, puzzle_width, initial_grid = None):\n self._height = puzzle_height\n self._width = puzzle_width\n self._grid = [[col + puzzle_width * row\n for col in range(self._width)]\n for row in range(self._height)]\n\n if initial_grid != None:\n for row in range(puzzle_height):\n for col in range(puzzle_width):\n self._grid[row][col] = initial_grid[row][col]", "def displayGrid(grid):\n wid = grid.shape[0]\n show_num = 9 if wid > 9 else wid\n\n # chessboard\n line = '\\n' + '- + ' * (wid - 1) + '- {}\\n'\n line = line.join([' | '.join(grid[i]) for i in range(wid)])\n\n # mark the number of its lines\n bottom = ('\\n' + ' {} ' * show_num)\n bottom = bottom.format(*[i+1 for i in range(show_num)])\n\n if show_num == 9:\n part = (' {} '*(wid - show_num))\n part = part.format(*[i+1 for i in range(show_num, wid)])\n bottom += part\n\n print(line.format(*[i+1 for i in range(wid)]) + bottom)", "def construct_grid(features):\n grid_shape = K.shape(features)[1:3] # height, width\n grid_y = K.tile(K.reshape(K.arange(0, stop=grid_shape[0]), [-1, 1, 1, 1]),\n [1, grid_shape[1], 1, 1])\n grid_x = K.tile(K.reshape(K.arange(0, stop=grid_shape[1]), [1, -1, 1, 1]),\n [grid_shape[0], 1, 1, 1])\n grid = K.concatenate([grid_x, grid_y])\n grid = K.cast(grid, K.dtype(features))\n\n return grid, grid_shape", "def _buildGridPoints(self):\n self.spacings = []\n for level in xrange(self.depth):\n levelSpacings = []\n refLevel = level + 1\n level = 2**level\n axisData = []\n for axis in self.size:\n spacing = axis / (level+1)\n levelSpacings.append(spacing)\n axisData.append([gridValue*spacing for gridValue in xrange(1, level+1)])\n pointList = [((i, j, k), np.array([axisData[0][i], axisData[1][j], axisData[2][k]]))\n for i in xrange(level)\n for j in xrange(level)\n for k in xrange(level)]\n self.grid[refLevel] = {point[0]: point[1] for point in pointList}\n self.spacings.append(levelSpacings)", "def regular_board(shapes):\n board = [[0]*9 for _ in range(9)]\n for shape in shapes:\n for r, c in shape:\n board[r][c] = shape[(r, c)]\n return board", "def test_Grid_creates_array_space():\n\n # create dummy meshgrid\n img_dim, spacing = (193, 193), 64\n x_vec = np.arange(0, img_dim[1], spacing)\n y_vec = np.arange(0, img_dim[0], spacing)\n xx, yy = np.meshgrid(x_vec, y_vec)\n\n # create Grid\n g = mg.Grid(img_dim, spacing)\n\n assert g.ny == len(y_vec)\n assert g.nx == len(x_vec)" ]
[ "0.89446443", "0.8844845", "0.88298595", "0.8797042", "0.84641975", "0.82472247", "0.8149008", "0.78898084", "0.7839033", "0.7772447", "0.7658475", "0.76574105", "0.76080716", "0.7555471", "0.75466675", "0.74476326", "0.7441377", "0.74026555", "0.7387294", "0.73625684", "0.7360648", "0.7344431", "0.7317405", "0.7299126", "0.7288084", "0.72580534", "0.7199973", "0.71868557", "0.7162282", "0.71539736", "0.7143437", "0.7116673", "0.711232", "0.703947", "0.70385545", "0.70260394", "0.70081234", "0.7003911", "0.7002553", "0.6989667", "0.69667554", "0.69532263", "0.6943398", "0.6939708", "0.6923272", "0.69192535", "0.6906025", "0.6898658", "0.68947196", "0.68931425", "0.68796307", "0.6852161", "0.68363607", "0.6831088", "0.6830073", "0.68297255", "0.6826286", "0.680662", "0.6801383", "0.67948437", "0.6788307", "0.67786694", "0.67685455", "0.6764987", "0.6763304", "0.6751727", "0.67344874", "0.67225105", "0.66965336", "0.66605455", "0.66573995", "0.66550463", "0.6653785", "0.6651536", "0.6643133", "0.6636256", "0.6628172", "0.66232383", "0.66142493", "0.6611142", "0.65989536", "0.65951025", "0.6591155", "0.65752846", "0.65752846", "0.65752846", "0.6571069", "0.6567367", "0.6538354", "0.6528227", "0.6513745", "0.65068036", "0.6503832", "0.6503832", "0.6501267", "0.65007734", "0.6499875", "0.6483399", "0.6480819", "0.64727306" ]
0.848049
4
Initializes the model trait.
def _model_default(self): return NamingTreeModel()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def initialize_model(self):\n pass", "def init_model(self):\n pass", "def initialize(self, model):\n pass", "def __init__(self, **kwargs):\n super(Model, self).__init__(**kwargs)", "def initialize(self) -> None:\n self.model = load(self.path)", "def __init__(self, model):\n self._model = model", "def initialize_model(self):\n model = self.model_class()\n return model", "def __init__(self, **kwargs):\n super(Model, self).__init__(**kwargs)\n self._params = self.find_params()", "def __init__(self):\n self.model = None", "def __init__(self):\n self.model = None", "def __init__(self, model):\n self.model = model", "def __init__(self, model):\n self.model = model", "def __init__(self, model):\n self.model = model", "def __init__(self, model):\n self.model = model", "def initialize(self):\n LOG.info(\"Initializing Model.\")\n self.model = self.convert(df=self.training_df)\n if self.bootstraps is not None:\n LOG.info(\"Bootstrapping Data.\")\n self.bootstrap_data()", "def __init__(self, model):\n\t\tself.model = model", "def __init__(self, model: object):\n self.model = model", "def __init__(self, model: Model1D):\n super().__init__(model=model)", "def __init__(self, model: Model1D):\n super().__init__(model=model)", "def __init__(self, model: Model1D):\n super().__init__(model=model)", "def __init__(self, model: Model1D):\n super().__init__(model=model)", "def __init__(self, model: Model1D):\n super().__init__(model=model)", "def __init__(self, model: Model1D):\n self._model = model", "def initialize(self):\n for key in self.parameter_dict:\n self.models[key] = self._create_model(key)", "def __init__(self, **kwargs):\n super(Model, self).__init__(**kwargs)\n\n for (key, value) in kwargs.iteritems():\n # use setattr so that validation is triggered\n setattr(self, key, value)", "def initialize(self):\n raise NotImplementedError", "def initialize(self):\n raise NotImplementedError", "def initialize(self):\n raise NotImplementedError", "def __init__(self, model: Optional[Model] = None) -> None:\n self.model = model", "def init_model(self):\n cxnlib.CXNNetInitModel(self.handle)", "def __init__(self, model: str, **kwargs):\n super().__init__(model=model)", "def __init__(self, model: str, **kwargs):\n\n super().__init__(model=model, **kwargs)\n logger.info('load model done')", "def init(self):\n raise NotImplementedError", "def init(self):\n raise NotImplementedError", "def __init__(self, model: Type[ModelType]):\n self.model = model", "def __init__(self, model: Type[ModelType]):\n self.model = model", "def init_model(engine):\n # Must use the same engine as the session fixture used by this module.\n initialize_model(model.mapper_registry, engine)", "def __init__(self):\n # Initializing the Model with the class\n super(Model, self).__init__()\n # torch.nn.Linear applies a Linear transformation. The first parameter is the size of each input sample. The second is the size of the output sample\n self.linear = torch.nn.Linear(1, 1)", "def initialize(self):\n pass", "def initialize(self):\n pass", "def initialize(self):\n pass", "def initialize(self):\n pass", "def initialize(self):\n pass", "def initialize_main_model(self, model, **kwargs):\n return NotImplementedError(\n \"Initializer has not implemented an initialize_main_model method. Derived classes \"\n \"are required to overload this.\"\n )", "def __init__(self):\n self.scaler = None\n self.model = None\n self.encoder = {}\n\n self._load_model()\n return", "def init_model(engine):\n # Must use the same engine as the session fixture used by this module.\n initialize_model(mapper_registry, engine)", "def initialize(self):\r\n pass", "def initialize(self):\r\n pass", "def __init__(self):\n self.model = self._get_model()\n\n # NOTE: The order of this list hardcoded here, and needs to be changed when re-training the model!\n # When exporting the model in tflite format, the model_spec is lost, so we cannot do it like that:\n # classes = ['???'] * model.model_spec.config.num_classes\n # label_map = model.model_spec.config.label_map\n # for label_id, label_name in label_map.as_dict().items():\n # classes[label_id-1] = label_name\n self.classes = ['Baked Goods', 'Salad', 'Cheese', 'Seafood', 'Tomato']", "def _add_init(self, p_model):\r\n\r\n raise NotImplementedError", "def __init__(self, model, **kwargs):\n self.model = model\n\n super(RecordView, self).__init__(**kwargs)", "def _init(self):\n raise NotImplementedError", "def __post_init_check(self):\n try:\n t = self.time\n m = self.metadata\n except AttributeError as e:\n clsname = self.__class__.__name__\n raise TypeError(f\"Model not initialized. Please call 'SupernovaModel.__init__' within the '{clsname}.__init__'\") from e", "def initialize(self):\n\t\tpass", "def init(self):\n pass", "def init(self):\n pass", "def init(self):\n pass", "def init(self):\n pass", "def init(self):\n pass", "def init(self):\n pass", "def init(self):\n pass", "def init(self):\n pass", "def initialize(self) -> None:\n pass", "def initialize(self):\n return", "def basic_model_init(model_args, task_infos, tokenizer):\n config = AutoConfig.from_pretrained(\n model_args.model_name_or_path,\n num_labels=task_infos.num_labels,\n cache_dir=model_args.model_cache_dir,\n id2label=task_infos.id2label,\n label2id=task_infos.label2id,\n )\n model_cls = getattr(mod, model_args.architectures,\n AutoModelForSequenceClassification)\n model = model_cls.from_pretrained(\n model_args.model_name_or_path,\n config=config,\n cache_dir=model_args.model_cache_dir,\n )\n if model.config.vocab_size < len(tokenizer):\n print(\"resize...\")\n model.resize_token_embeddings(len(tokenizer))\n return model", "def init(self):\n self.is_init = True\n raise NotImplementedError", "def initialize_trainer(self):\n self.initialize_matrices()\n self.initialize_model()\n self.initialize_optimizers()\n return self", "def __init__(self):\n self.conf = None\n self.section = None\n self._engine = None\n self._session = None\n self.base_model = declarative_base()", "def init(self) -> None:\n ...", "def test_model_initialization():\n MyModel(\"model\", SkillContext())", "def _pre_mcs_init(cls):\n # technically you could also put a @classmethod with the same name on\n # the Model class, if you prefer that approach", "def _init_model(self):\n try:\n model_config = self._conf.get(PredictConstance.BASE_CONFIG,\n PredictConstance.MODEL_CONFIG)\n conf = configparser.ConfigParser()\n conf.read(model_config)\n self._model = model_factory.ModelFactory.create_model(config=conf)\n return True\n except Exception as err:\n self.managerlogger.logger.error(\"init model error: %s\" % err)\n self.errorlogger.logger.error(\"init model error: \\n %s\" % traceback.format_exc())\n return False", "def __init__(self, model: MT):\n self.model: Final[MT] = model", "def __init__(self):\n cwd = os.path.join(os.path.dirname(__file__), config.vosk_model_dir)\n self.model = Model(cwd)\n logger.info(f'Loaded speech recognition model from {cwd}')", "def init_attrs(self):\n raise NotImplementedError", "def initialize(self, **kwargs):", "def __init__(self):\n\n super().__init__()\n\n self._model = None # type: StateSpaceModel\n self._kernel = None # type: Distribution", "def _init(self):\n pass", "def __init__(self):\n if Classifier.__instance is not None:\n LOGGER.logger.exception(\"This class is a singleton!\")\n else:\n self.model = False\n self.load_model()", "def initialize(self, **kwargs):\n raise NotImplementedError()", "def _init_model(self, forrest):\n rels = self.get_rels(forrest)\n self._model = RDPModel(rels)", "def __init__(self, model):\n\n self.model = model\n self.paths = PathOperations()\n self.tags = Tags()", "def _initialize(self):\n pass", "def _initialize(self):\n pass", "def _initialize(self):\n pass", "def initialize(self):\n pass # pragma: no cover", "def initialise(self):\n self.set_up()", "def setUpClass(self):\n\n base_model = BaseModel()", "def do_init(self):\n\n pass", "def initialize(self):\n\n # --------- BEGIN YOUR CODE ----------\n\n # This is exactly the same as Human.initialize, just copy the code over\n\n # --------- END YOUR CODE ----------\n pass", "def init_model(self, model_name, config=None):\n ModelDirectory.init_model(model_name, pipeline=self, config=config)\n return self\n #self._action_list.append({'name': INIT_MODEL_ID, 'model_name': model_name, 'config': config})\n #return self.append_action()", "def initialise(self):", "def __init__(self):\n self.model = self.load_model()\n self.form_html = self.create_form_html()", "def __init__(self):\n logger.debug('Initializing %s model.' % self.__class__.__name__)\n self.dependent_attributes = ['_alpha',\n '_log_like',\n '_gradient','_K',\n '_log_det']\n self._previous_parameters = None # previous parameters from last call\n self.grad_method = None # could be {'finite_difference','adjoint'}\n self.noise_var_constraint = '+ve' # Gaussian noise variance constraint\n return", "def initialize(cls):", "def initialisation(self):\n self.create_variables()\n self.create_placeholders()\n self.build_model()\n self.reset_lr(None, True)\n self.build_loss()\n self.initialised = True", "def initialize_model(self, initial_data):\n # EDIT THIS METHOD TO RETURN A MINIMAX MODEL ###\n return None", "def __init__(self, ml):\n # Save a reference to the model.\n self.ml = ml", "def initialize(self): \r\n pass", "def init(self):\n inputs = self.inputs()\n outputs = self.outputs(inputs)\n self.model = tf.keras.Model(inputs=inputs, outputs=outputs)\n self.model.compile(optimizer=self.optimizer() or self.config.get('optimizer'),\n loss=self.loss() or None,\n metrics=self.metrics() or None,\n loss_weights=self.loss_weights() or None,\n weighted_metrics=self.weighted_metrics() or None,\n target_tensors=self.target_tensors() or None)\n if self.config.get('debug'):\n self.model.summary()", "def MakeModel(self):\n pass" ]
[ "0.84036934", "0.83105385", "0.8114287", "0.7524097", "0.74544305", "0.7413742", "0.7318616", "0.72427833", "0.719481", "0.719481", "0.71610886", "0.71610886", "0.71610886", "0.71610886", "0.7095571", "0.7049685", "0.69679695", "0.69345516", "0.69345516", "0.69345516", "0.69345516", "0.69345516", "0.6910366", "0.689956", "0.6881151", "0.68611", "0.68611", "0.68611", "0.685663", "0.68221647", "0.6815073", "0.67934513", "0.6773146", "0.6773146", "0.6769574", "0.6769574", "0.66678524", "0.66608924", "0.665984", "0.665984", "0.665984", "0.665984", "0.665984", "0.665085", "0.664591", "0.66306394", "0.66251504", "0.66251504", "0.6623963", "0.660533", "0.6604071", "0.6584458", "0.6571673", "0.65639025", "0.6545533", "0.6545533", "0.6545533", "0.6545533", "0.6545533", "0.6545533", "0.6545533", "0.6545533", "0.6530485", "0.6529278", "0.64984065", "0.64845645", "0.64823455", "0.64767444", "0.6476683", "0.64602613", "0.6430096", "0.6425281", "0.641799", "0.64145094", "0.64129734", "0.6410484", "0.6409498", "0.6399144", "0.6389782", "0.63890904", "0.63860327", "0.6376431", "0.6371915", "0.6371915", "0.6371915", "0.6360505", "0.6359458", "0.63564914", "0.6356296", "0.63501215", "0.633855", "0.63333744", "0.6328394", "0.63155097", "0.6313768", "0.6312659", "0.63077235", "0.6304324", "0.62959874", "0.62849534", "0.62846655" ]
0.0
-1
Make sure that the specified node is visible.
def ensure_visible(self, node): try: components = node.namespace_name.split('/') # Make sure that the tree is expanded down to the context that # contains the node. binding = self.root for atom in components[:-1]: binding = binding.obj.lookup_binding(atom) self.expand(binding) # The context is expanded so we know that the node will be in the # node to Id map. wxid = self._node_to_id_map.get(self.model.get_key(node), None) self.control.EnsureVisible(wxid) # We need 'namespace_name' to make this work. If we don't have it # then we simply cannot do this! except OperationNotSupportedError: binding = None return binding
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def ensure_visible(self):\n self.set_visible(True)", "def test_visibility(self):\r\n self.assertFalse(self.net.environment\\\r\n .are_visible(self.net.pos[self.node1],\r\n self.net.pos[self.node2]))\r\n self.assertTrue(self.net.environment\\\r\n .are_visible(self.net.pos[self.node2],\r\n self.net.pos[self.node3]))", "def setVisible( self, state ):\n self._visible = state\n \n super(XNode, self).setVisible(self.isVisible())\n \n self.dispatch.visibilityChanged.emit(state)\n self.setDirty()", "def inspectedNodeIsVisible(self):\n return self._inspected_node_is_visible", "def isVisible(self):\n\t\treturn True", "def invisible_visit(self, node: Node) -> None:\n pass", "def is_visible(self):", "def isVisible(self, p_int): # real signature unknown; restored from __doc__\n return False", "def isVisible(self, p_int): # real signature unknown; restored from __doc__\n return False", "def wait_for_visible(self, timeout=None):\n wait_until(lambda: self.is_displayed(),\n \"Element '%s' not visible after <TIMEOUT>.\" % self._locator,\n timeout)", "def visible(self, show):", "def is_visible(self, path):\n return True", "def is_visible(self, timeout=None):\n try:\n self.visibility_of_element_located(timeout)\n except TimeoutException:\n return False\n return True", "def assert_visible(self, locator, msg=None):\r\n e = driver.find_elements_by_locator(locator)\r\n if len(e) == 0:\r\n raise AssertionError(\"Element at %s was not found\" % locator)\r\n assert e.is_displayed()", "def wait_for_invisible(self, timeout=None):\n wait_until(lambda: not self.is_displayed(),\n \"Element '%s' still visible after <TIMEOUT>.\" % self._locator,\n timeout)", "def EnsureVisible(self, item):\r\n\r\n # first expand all parent branches\r\n parent = item.GetParent()\r\n\r\n if self.HasAGWFlag(TR_HIDE_ROOT):\r\n while parent and parent != self._anchor:\r\n self.Expand(parent)\r\n parent = parent.GetParent()\r\n else:\r\n while parent:\r\n self.Expand(parent)\r\n parent = parent.GetParent()\r\n \r\n self.ScrollTo(item)", "def test_visibility(self, data, visible):\n layer = Points(data)\n assert layer.visible is True\n\n layer = Points(data, visible=visible)\n assert layer.visible is visible\n\n layer.visible = not visible\n assert layer.visible is not visible", "def verify_visible(self, locator, msg=None):\r\n try:\r\n self.assert_visible(locator, msg)\r\n except AssertionError, e:\r\n if msg:\r\n m = \"%s:\\n%s\" % (msg, str(e))\r\n else:\r\n m = str(e)\r\n self.verification_erorrs.append(m)", "def is_element_visible(self):\n if self.web_element.is_displayed():\n return True\n else:\n return False", "def is_visible(self):\n return self.real > 0", "def test_visible_whitelisted(self):\n\n self.feature_test.set_percentage(0)\n self.feature_test.add_to_whitelist(3)\n self.assertTrue(self.feature_test.is_visible(3))", "def visible(self, visible):\n\n self._visible = visible", "def visible(self) -> bool:\n try:\n return bool(self.driver.wait_until_all_visible(*self.ROOT_LOCATOR))\n except WebDriverException:\n return False", "def _is_visible(self, point):\n return point[0] > 0 and point[0] < 1 and point[1] > 0 and point[1] < 1", "def isVisible( self ):\n layer = self.layer()\n if ( layer and not layer.isVisible() ):\n return False\n# \n# if ( self.isIsolateHidden() ):\n# return False\n# \n return self._visible", "def setVisible(*args):", "def setVisible(*args):", "def setVisible(*args):", "def setVisible(*args):", "def setVisible(*args):", "def setVisible(*args):", "def setVisible(*args):", "def setVisible(*args):", "def setVisible(*args):", "def setVisible(*args):", "def setVisible(*args):", "def set_visible(self, visible):\n self.widget.setVisible(visible)", "def set_visible(self, visible):\n self.widget.setVisible(visible)", "def is_visible(self):\n if self._namespace and self._namespace.is_anonymous():\n return True\n return self._rawdoc.get_inherited_visibility() != DocType.none", "def XPIsWidgetVisible(inWidget):\n pass", "def is_visible(self):\n return self.rect.x < self.screen_rect.width", "def is_visible ( self ):\n return not self.is_hidden and (\n self.priority is None or self.priority >= 0\n )", "def test_step_visibility(self, _step: PropertyMock):\n _step.return_value = MagicMock(is_visible=True)\n es = exposed.ExposedStep()\n self.assertTrue(es.visible)\n es.visible = False\n self.assertFalse(es.visible)", "def set_visible(self, visible):\n # Make sure the 'visible' attribute is synced up as a result\n # of the method call. This may fire a notification, in which\n # case the change handler will call this method again. This\n # guard prevents that unneeded recursion.\n if guard.guarded(self, 'set_visible'):\n return\n else:\n with guard(self, 'set_visible'):\n self.visible = visible\n \n # Only set the visibility to True (which will show the window) \n # if the component is fully initialized.\n if not visible or self.initialized:\n self.abstract_obj.set_visible(visible)", "def wait_for_visible(self, locator, timeout=2):\n try:\n WebDriverWait(self.driver, timeout).until(\n ec.visibility_of_element_located(locator)\n )\n except (NoSuchElementException, TimeoutException) as err:\n logging.error(f\"Exception Type: {type(err)}\")\n logging.info(f\"Element does not exist: {(locator, )} \")\n return False\n return True", "def hide_nodes(node_names, network=None, base_url=DEFAULT_BASE_URL):\n res = set_node_property_bypass(node_names, False, 'NODE_VISIBLE', network=network, base_url=base_url)\n return res", "def __checkCenterVisibility(self, itemNode, itemXc, itemYc):\r\n for sibling in itemNode.findall('following-sibling::*[@is-in-tab-area=\"true\"]'):\r\n name = sibling.getAttribute('image')\r\n siblingX, siblingY, siblingW, siblingH = [int(c) for c in sibling.getAttribute('coords').split(\",\")]\r\n if itemXc>=siblingX and itemXc <=(siblingX + siblingW) and itemYc>=siblingY and itemYc <= (siblingY + siblingH):\r\n return (self.HIDDEN,(itemXc,itemYc), itemNode)\r\n\r\n return (self.VISIBLE,(itemXc,itemYc), itemNode)", "def wait_for_invisible(self, locator, timeout=2):\n try:\n WebDriverWait(self.driver, timeout).until(\n ec.invisibility_of_element_located(locator)\n )\n except (NoSuchElementException, TimeoutException):\n return False\n return True", "def set_visible(self, visible):\n self.ec._win.set_mouse_visible(visible)\n self.ec._win.set_mouse_platform_visible(visible) # Pyglet workaround\n self._visible = visible", "def ensure_hidden(self):\n self.set_visible(False)", "def ToggleVisible(self, event):\n pass", "def test_anon_public(self):\n self.do_visible(True, None, True)", "def setVisibleCondition(*args):", "def setVisibleCondition(*args):", "def setVisibleCondition(*args):", "def setVisibleCondition(*args):", "def setVisibleCondition(*args):", "def setVisibleCondition(*args):", "def setVisibleCondition(*args):", "def setVisibleCondition(*args):", "def setVisibleCondition(*args):", "def setVisibleCondition(*args):", "def setVisibleCondition(*args):", "def set_mouse_visible(self, visible):\n raise NotImplementedError", "def tag_visible(element):\n\n if element.parent.name in ['style', 'script', 'head', 'title', 'meta', '[document]']:\n return False\n if isinstance(element, Comment):\n return False\n return True", "def set_visible(self):\n\t\tself.hide()\n\t\tself.__sys_tray_icon.setVisible(True)", "def test_visible_widgets(plugin_dialog):\n\n assert plugin_dialog.direct_entry_edit.isVisible()\n assert plugin_dialog.direct_entry_btn.isVisible()", "def is_visible(self):\n return self.proto.display_type == DISPLAY_TYPE.Visible.value", "def visibility_toggle(self, _):\n raise VisibilityToggleEvent", "def enable_intra_node_visibility(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"enable_intra_node_visibility\")", "def show_node(self):\n if self.controller.node_id:\n self.print_object(\n 'node',\n ('uid', 'status', 'roles'),\n self.controller.get_node()\n )\n else:\n print(\"Please select node at first.\")", "def _force_visibility(self, visibility_field):\r\n authorized_project = acl.get_limited_to_project(pecan.request.headers)\r\n is_admin = authorized_project is None\r\n if not is_admin:\r\n self._restrict_to_project(authorized_project, visibility_field)\r\n self._check_cross_project_references(authorized_project,\r\n visibility_field)", "def test_constructor_visible_widgets(plugin_dialog_constructor):\n assert not plugin_dialog_constructor.direct_entry_edit.isVisible()\n assert not plugin_dialog_constructor.direct_entry_btn.isVisible()", "def is_visible(self):\n try:\n return self.element.is_displayed()\n except (NoSuchElementException,\n ElementNotVisibleException,\n StaleElementReferenceException):\n return False", "def test_empty_public(self):\n self.do_visible(True, None, True, is_admin=True)", "def is_visible(self, locator, timeout=15):\n try:\n ui.WebDriverWait(self.driver, timeout).until(EC.visibility_of_element_located((By.CSS_SELECTOR, locator)))\n return True\n except TimeoutException:\n return False", "def waitForElementVisible(self, element_tuple, *, timeout=5):\n try:\n WebDriverWait(self.CORE, timeout).until(EC.visibility_of_element_located(self.format_element(element_tuple))) # Don't unpack, use function to parse out first 2 items\n self.log_info(f\"Browser.waitForElementVisible: {element_tuple} is visible within {timeout} seconds\")\n return True\n except SeleniumExceptions.TimeoutException:\n self.log_warning(f\"Browser.waitForElementVisible: {element_tuple} did not become visible after {timeout} seconds\")\n return False", "def test_wiki_visibility(self):\r\n\r\n wiki_tab = tabs.WikiTab()\r\n self.assertTrue(wiki_tab.is_hideable)\r\n wiki_tab.is_hidden = True\r\n self.assertTrue(wiki_tab['is_hidden'])\r\n self.check_tab_json_methods(wiki_tab)\r\n self.check_tab_equality(wiki_tab, wiki_tab.to_json())\r\n wiki_tab['is_hidden'] = False\r\n self.assertFalse(wiki_tab.is_hidden)", "def set_visible(self, target: bool) -> None:\n hidden = not target\n for ent in self.child_ents():\n ent.vis_shown = target\n ent.hidden = hidden\n for solid in ent.solids:\n solid.vis_shown = target\n solid.hidden = hidden\n\n for solid in self.child_solids():\n solid.vis_shown = solid.hidden = target\n solid.hidden = hidden", "def test_visible_blacklisted(self):\n\n self.feature_test.set_percentage(100)\n self.feature_test.add_to_blacklist(3)\n self.assertFalse(self.feature_test.is_visible(3))", "def is_visible_to(self, user):\n return True", "def set_visible(self, state: bool):\n self.box.set_visible(state)\n if not state:\n self.add_box.set_visible(False)", "def IsVisible(self, item):\r\n\r\n # An item is only visible if it's not a descendant of a collapsed item\r\n parent = item.GetParent()\r\n\r\n while parent:\r\n \r\n if not parent.IsExpanded():\r\n return False\r\n \r\n parent = parent.GetParent()\r\n \r\n startX, startY = self.GetViewStart()\r\n clientSize = self.GetClientSize()\r\n\r\n rect = self.GetBoundingRect(item)\r\n \r\n if not rect:\r\n return False\r\n if rect.GetWidth() == 0 or rect.GetHeight() == 0:\r\n return False\r\n if rect.GetBottom() < 0 or rect.GetTop() > clientSize.y:\r\n return False\r\n if rect.GetRight() < 0 or rect.GetLeft() > clientSize.x:\r\n return False\r\n\r\n return True", "def visible(self):\n return self._turtle.isvisible()", "def visible(self):\n return self._turtle.isvisible()", "def is_node_onscreen(self, node, screen_edges):\n real_node = self.original_graph.get_node_by_serial(node.serial)\n node_x = real_node.x\n node_y = real_node.y\n node_r = node.get_radius() * 0.05\n return (node_x + node_r) > screen_edges[\"bottom_left\"].get_x() and \\\n (node_x - node_r) < screen_edges[\"top_right\"].get_x() and \\\n (node_y + node_r) > screen_edges[\"bottom_left\"].get_y() and \\\n (node_y - node_r) < screen_edges[\"top_right\"].get_y()", "def leftmakevisible(self, pos):\n pass", "def test_anon_private(self):\n self.do_visible(True, None, False)", "def rightmakevisible(self, pos):\n pass", "def cycle_visible_anatomy_vis(self):\n raise CycleAnatomyEvent", "def test_visible_white_and_blacklisted(self):\n\n self.feature_test.set_percentage(0)\n self.feature_test.add_to_whitelist(3)\n self.feature_test.add_to_blacklist(3)\n self.assertTrue(self.feature_test.is_visible(3))", "def waitForElementNotVisible(self, element_tuple, *, timeout=5):\n try:\n WebDriverWait(self.CORE, timeout).until(EC.invisibility_of_element_located(self.format_element(element_tuple))) # Don't unpack, use function to parse out first 2 items\n self.log_info(f\"Browser.waitForElementNotVisible: {element_tuple} is invisible within {timeout} seconds\")\n return True\n except SeleniumExceptions.TimeoutException:\n self.log_warning(f\"Browser.waitForElementNotVisible: {element_tuple} did not become invisible after {timeout} seconds\")\n return False", "def is_visible(self, x, y) :\n\t\tres_x = (x > self.x_min) and (x < self.x_max)\n\t\t# print 'res_x : {0}, x : {1}, x_min : {2}, x_max:{3}'.format(res_x, x, self.x_min, self.x_max)\n\t\tres_y = (y > self.y_min) #and (y < self.y_max)\n\t\treturn res_x and res_y", "def has_visible_entity(self):\n ret = False\n for e in self:\n if e.is_visible() == True:\n ret = True\n break\n return ret", "def visible( self, frustum=None, matrix=None, occlusion=0, mode=None ):\n try:\n return self.boundingVolume(mode).visible( \n frustum, matrix, occlusion=occlusion, mode=mode \n )\n except Exception, err:\n tb = traceback.format_exc( )\n log.warn(\n \"\"\"Failure during Shape.visible check for %r:\\n%s\"\"\",\n self,\n tb\n )", "def test_empty_private(self):\n self.do_visible(True, None, False, is_admin=True)", "def test_empty_public_owned(self):\n self.do_visible(True, 'pattieblack', True, is_admin=True)", "def is_visible(self, is_visible):\n\n self.container['is_visible'] = is_visible", "def update_visible(self, immediate=False):\n raise NotImplementedError", "def UpdateVisibility(self):\r\n # Clear the map\r\n self.ClearVisibilityMap()\r\n \r\n # Only update it if we have a player\r\n if not self.game.player:\r\n return\r\n \r\n max_vis_day = self.data.get('max_visibility', self.game.data['map']['max_visibility'])\r\n max_vis_night = self.data.get('max_visibility_night', self.game.data['map']['max_visibility_night'])\r\n \r\n #TODO(g): Add day/night cycle\r\n max_vis = max_vis_day\r\n \r\n # Cast rays from the player. Step out from the player and find the\r\n # angle to the player to determine if visible.\r\n center = self.game.player.pos.ToList()\r\n \r\n # Check every tile\r\n for y in range(center[1] - max_vis, center[1] + max_vis):\r\n for x in range(center[0] - max_vis, center[0] + max_vis):\r\n dist = rpg_base.GetDistance(center, [x, y])\r\n # Only really test tiles that are within viewing range\r\n if dist <= max_vis:\r\n #Log('%s -> %s = %s' % (center, [x, y], dist))\r\n if self.game.map.HasLineOfSightToPlayer(x, y):\r\n self.SetVisibility(x, y)" ]
[ "0.6804122", "0.6494671", "0.6356905", "0.6340063", "0.63099027", "0.6276163", "0.62759423", "0.61556643", "0.61556643", "0.61108905", "0.6087162", "0.6059026", "0.6036995", "0.6013018", "0.5971871", "0.59257257", "0.58930624", "0.58760315", "0.58694416", "0.58317655", "0.5784247", "0.5783572", "0.57727337", "0.57556677", "0.573891", "0.571824", "0.571824", "0.571824", "0.571824", "0.571824", "0.571824", "0.571824", "0.571824", "0.571824", "0.571824", "0.571824", "0.5709854", "0.5709854", "0.57045907", "0.5682272", "0.5675361", "0.56727546", "0.56716216", "0.5644074", "0.56362176", "0.56293553", "0.56265026", "0.56245357", "0.5621022", "0.5611928", "0.55840075", "0.5560535", "0.5549912", "0.5549912", "0.5549912", "0.5549912", "0.5549912", "0.5549912", "0.5549912", "0.5549912", "0.5549912", "0.5549912", "0.5549912", "0.55495185", "0.55438477", "0.55377394", "0.55350006", "0.5512988", "0.5497266", "0.54850847", "0.54779905", "0.5464462", "0.5457586", "0.54551756", "0.5447846", "0.54473424", "0.5444893", "0.5442731", "0.5440836", "0.5436635", "0.5431826", "0.54301924", "0.5419589", "0.5418743", "0.5418743", "0.54121846", "0.5403556", "0.540297", "0.54020643", "0.5401142", "0.5395893", "0.5393838", "0.53767616", "0.53763276", "0.5366211", "0.53649914", "0.5360815", "0.5356934", "0.5348476", "0.5344851" ]
0.7291304
0
Method test the endpoint for getting bucketlist items
def test_get_bucketlist_items(self): email = "test@test.com" _pword = "test" user = User.query.filter_by(email=email).first() bucketlist = BucketList.query.filter_by(user_id=user.id, id=1).first() items_no = len(bucketlist.bucketlist_items) headers = self.authentication_headers(email=email, password=_pword) response = self.client.get( '/api/v1/bucketlist/1/items/', content_type="application/json", headers=headers, follow_redirects=True ) result = json.loads(response.data.decode('utf-8')) self.assertEqual(len(result), items_no)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_get_bucketlist_items(self):\n resp = self.client.post('/bucketlists',\n data=json.dumps(self.bucketlist),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n\n resp_item = self.client.post('/bucketlistitems/1/items',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n\n self.assertEqual(resp_item.status_code, 200)\n resp_item = self.client.get('/bucketlistitems/1/items', headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp_item.status_code, 200)", "def test_user_can_get_list_of_buckets(self):\n with self.client:\n response = self.client.get(\n '/bucketlists/',\n headers=dict(Authorization='Bearer ' + self.get_user_token())\n )\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 200)\n self.assertTrue(data['status'] == 'success')\n self.assertIsInstance(data['buckets'], list)\n self.assertEqual(len(data['buckets']), 0)\n self.assertEqual(data['count'], 0)\n self.assertIsInstance(data['count'], int)\n self.assertEqual(data['previous'], None)\n self.assertEqual(data['next'], None)", "def test_api_get_all_bucketlists(self):\n res = self.client().post('/bucketlist', data=self.bucketlist)\n self.assertEqual(res.status_code, 201)\n res = self.client().get('/bucketlist')\n self.assertEqual(res.status_code, 200)\n self.assertIn('Go to vacation', str(res.data))", "def test_get_request_on_bucketlist_resource(self):\n\n response = self.client.get(\"/bucketlists/\")\n self.assertEqual(response.status_code, 401)", "def test_read_bucket(self):\n post_data = self.post_a_bucket()\n self.assertEqual(post_data.status_code, 201)\n result_of_get_method = self.client().get('/bucketlists/',\n headers=dict(Authorization='Bearer '\n + self.token())\n )\n self.assertEqual(result_of_get_method.status_code, 200)\n self.assertIn('Climb the Himalayas', str(result_of_get_method.data))", "def test_list(self):\n responses.add(\n responses.Response(\n method='GET',\n url='https://connection.keboola.com/v2/storage/buckets',\n json=list_response\n )\n )\n buckets_list = self.buckets.list()\n assert isinstance(buckets_list, list)", "def test_creating_and_getting_a_bucketlist_for_authenticated_user(self):\n\n # test all bucketlists\n response = self.client.post(\n \"/bucketlists/\",\n data=dict(name='test_bucketlist'),\n headers={'Authorization': self.user_token}\n )\n bucketlist = json.loads(response.data)\n\n self.assertEqual(response.status_code, 200)\n self.assertEqual(bucketlist[\"name\"], 'test_bucketlist')\n\n # test single bucketlist\n self.bucketlist_id = bucketlist[\"bucketlist_id\"]\n single_bucketlist = self.client.get(\n \"/bucketlists/\" + str(self.bucketlist_id) + \"\",\n headers={'Authorization': self.user_token}\n )\n\n one_bucketlist = json.loads(single_bucketlist.data)\n\n self.assertEqual(single_bucketlist.status_code, 200)\n self.assertEqual(one_bucketlist[\"name\"], 'test_bucketlist')\n\n # test all items in bucketlist\n item = self.client.post(\n \"/bucketlists/\" + str(self.bucketlist_id) + \"/items/\",\n data=dict(name=\"test_item\"),\n headers={'Authorization': self.user_token}\n )\n\n one_item = json.loads(item.data)\n\n self.assertEqual(item.status_code, 200)\n self.assertEqual(one_item[\"name\"], 'test_item')\n\n # test single item in bucketlist\n self.item_id = one_item[\"item_id\"]\n single_item = self.client.get(\n \"/bucketlists/\" + str(self.bucketlist_id) + \"/items/\" + str(self.item_id) + \"\",\n headers={'Authorization': self.user_token}\n )\n\n created_item = json.loads(single_item.data)\n\n self.assertEqual(single_item.status_code, 200)\n self.assertEqual(created_item[\"name\"], 'test_item')\n\n # test for deletion of bucketlist\n second_bucketlist = self.client.post(\n \"/bucketlists/\",\n data=dict(name='second_bucketlist'),\n headers={'Authorization': self.user_token}\n )\n\n bucketlist_two = json.loads(second_bucketlist.data)\n\n self.assertEqual(second_bucketlist.status_code, 200)\n self.assertEqual(bucketlist_two[\"name\"], 'second_bucketlist')\n\n delete_response = self.client.delete(\n \"/bucketlists/\" + str(bucketlist_two[\"bucketlist_id\"]) + \"\",\n headers={'Authorization': self.user_token}\n )\n\n deletion = json.loads(delete_response.data)\n\n self.assertEqual(delete_response.status_code, 200)\n self.assertEqual(deletion[\"message\"], \"Deleted\")\n\n # test for deletion of an item in bucketlist\n delete_item = self.client.delete(\n \"/bucketlists/\" + str(bucketlist[\"bucketlist_id\"]) + \"/items/\" + str(one_item[\"item_id\"]) + \"\",\n headers={'Authorization': self.user_token}\n )\n\n item_deletion = json.loads(delete_item.data)\n\n self.assertEqual(delete_item.status_code, 200)\n self.assertEqual(item_deletion[\"message\"], \"Deleted\")\n\n # test for updating of bucketlist\n self.bucketlist_id = bucketlist[\"bucketlist_id\"]\n bucketlist_update = self.client.put(\n \"/bucketlists/\" + str(self.bucketlist_id) + \"\",\n data=dict(name='bucketlist_test'),\n headers={'Authorization': self.user_token}\n )\n\n updated_bucketlist = json.loads(bucketlist_update.data)\n\n self.assertEqual(bucketlist_update.status_code, 200)\n self.assertEqual(updated_bucketlist[\"name\"], 'bucketlist_test')\n\n # test update of item in bucketlist\n item = self.client.post(\n \"/bucketlists/\" + str(self.bucketlist_id) + \"/items/\",\n data=dict(name=\"test_item\"),\n headers={'Authorization': self.user_token}\n )\n\n one_item = json.loads(item.data)\n\n item_update = self.client.put(\n \"/bucketlists/\" + str(self.bucketlist_id) + \"/items/\"+ str(one_item[\"item_id\"]) + \"\",\n data=dict(name=\"item_test\"),\n headers={'Authorization': self.user_token}\n )\n\n updated_item = json.loads(item_update.data)\n\n self.assertEqual(item_update.status_code, 200)\n self.assertEqual(updated_item[\"name\"], 'item_test')", "def test_bucket_by_id_is_returned_on_get_request(self):\n with self.client:\n token = self.get_user_token()\n # Create a Bucket\n response = self.client.post(\n '/bucketlists',\n data=json.dumps(dict(name='Travel')),\n headers=dict(Authorization='Bearer ' + token),\n content_type='application/json'\n )\n # Test Bucket creation\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 201)\n self.assertTrue(data['status'], 'success')\n self.assertTrue(data['name'], 'Travel')\n response = self.client.get(\n '/bucketlists/1',\n headers=dict(Authorization='Bearer ' + token)\n )\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 200)\n self.assertTrue(data['status'] == 'success')\n self.assertTrue(data['bucket']['name'] == 'travel')\n self.assertIsInstance(data['bucket'], dict)\n self.assertTrue(response.content_type == 'application/json')", "def test_get_bucketlist_item_id(self):\n resp = self.client.post('/bucketlists',\n data=json.dumps(self.bucketlist),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n resp_item = self.client.post('/bucketlistitems/1/items',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n get_item = self.client.get('/bucketlistitems/1/items/1', headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)", "def test_buckets_returned_when_searched(self):\n with self.client:\n token = self.get_user_token()\n self.create_buckets(token)\n response = self.client.get(\n '/bucketlists/?q=T',\n headers=dict(Authorization='Bearer ' + token)\n )\n data = json.loads(response.data.decode())\n self.assertTrue(data['status'] == 'success')\n self.assertIsInstance(data['buckets'], list, 'Items must be a list')\n self.assertEqual(len(data['buckets']), 3)\n self.assertEqual(data['buckets'][0]['id'], 1)\n self.assertEqual(data['count'], 6)\n self.assertEqual(data['next'], 'http://localhost/bucketlists/?page=2')\n self.assertEqual(data['previous'], None)\n self.assertEqual(response.status_code, 200)", "def test_buckets_returned_when_searched_2(self):\n with self.client:\n token = self.get_user_token()\n self.create_buckets(token)\n response = self.client.get(\n '/bucketlists/?q=T&page=2',\n headers=dict(Authorization='Bearer ' + token)\n )\n data = json.loads(response.data.decode())\n self.assertTrue(data['status'] == 'success')\n self.assertIsInstance(data['buckets'], list, 'Items must be a list')\n self.assertEqual(len(data['buckets']), 3)\n self.assertEqual(data['buckets'][0]['id'], 4)\n self.assertEqual(data['count'], 6)\n self.assertEqual(data['next'], None)\n self.assertEqual(data['previous'], 'http://localhost/bucketlists/?page=1')\n self.assertEqual(response.status_code, 200)", "def test_api_get_bucketlist_by_id(self):\n res_post = self.client().post('/bucketlist', data=self.bucketlist)\n self.assertEqual(res_post.status_code, 201)\n res_in_json = json.loads(res_post.data.decode('UTF-8').replace(\"'\", \"\\\"\"))\n res = self.client().get(f\"/bucketlist/{res_in_json['id']}\")\n self.assertEqual(res.status_code, 200)\n self.assertIn('Go to vacation', str(res.data))", "def test_create_bucket_list_return(self):\n bucket = BucketList(\"\", \"\")\n bucket = bucket.create_bucket_list(\"Name\", \"Completed\")\n self.assertIsInstance(bucket, BucketList)", "def test_no_bucket_returned_by_given_id(self):\n with self.client:\n token = self.get_user_token()\n\n response = self.client.get(\n '/bucketlists/1',\n headers=dict(Authorization='Bearer ' + token)\n )\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 200)\n self.assertTrue(data['status'] == 'success')\n self.assertIsInstance(data['bucket'], list)\n self.assertTrue(response.content_type == 'application/json')", "def test_create_bucketlist_item(self):\n resp = self.client.post('/bucketlists',\n data=json.dumps(self.bucketlist),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n response = self.client.get(\n \"/bucketlists/1\", headers={\n \"Authorization\": self.token})\n self.assertEqual(response.status_code, 200)\n resp_item = self.client.post('/bucketlistitems/1/items',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n result = json.loads(resp_item.data)\n self.assertEqual(result[\"message\"],\n \"Bucket list item added successfully.\")\n self.assertEqual(resp.status_code, 201)", "def test_get_bucket(self):\n pass", "def get_bucketlist():\n pass", "def test_get_buckets(self):\n pass", "def test_add_bucketlist_items(self):\r\n email = \"test@test.com\"\r\n _pword = \"test\"\r\n user = User.query.filter_by(email=email).first()\r\n bucketlist = BucketList.query.filter_by(user_id=user.id, name=\"test bucketlist\").first()\r\n item_no = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id).count()\r\n response = self.add_bucketlist_item(email, _pword, bucketlist.id, \"bucketlist item name\")\r\n result = json.loads(response.data.decode('utf-8'))\r\n self.assertEqual(response.status, '201 CREATED')\r\n self.assertEqual(result['message'], 'Bucket list item added')\r\n new_item_no = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id).count()\r\n self.assertLess(item_no, new_item_no)", "def test_list_bucket(self):\n\n if self.bos.does_bucket_exist(\"aaaaaaxzr1\"):\n self.bos.delete_bucket(\"aaaaaaxzr1\")\n if self.bos.does_bucket_exist(\"aaaaaaxzr2\"):\n self.bos.delete_bucket(\"aaaaaaxzr2\")\n\n time1 = utils.get_canonical_time()\n self.bos.create_bucket(\"aaaaaaxzr1\")\n\n time2 = utils.get_canonical_time()\n self.bos.create_bucket(\"aaaaaaxzr2\")\n\n response = self.bos.list_buckets()\n self.check_headers(response)\n\n self.assertEqual(response.owner.id, bos_test_config.OWNER_ID)\n self.assertEqual(response.owner.display_name, bos_test_config.DISPLAY_NAME)\n for bucket in response.buckets:\n if bucket.name == \"aaaaaaxzr1\":\n self.assertEqual(\n compat.convert_to_bytes(bucket.creation_date)[0:19], \n compat.convert_to_bytes(time1)[0:19])\n elif bucket.name == \"aaaaaaxzr2\":\n self.assertEqual(\n compat.convert_to_bytes(bucket.creation_date)[0:19], \n compat.convert_to_bytes(time2)[0:19])\n self.bos.delete_bucket(\"aaaaaaxzr1\")\n self.bos.delete_bucket(\"aaaaaaxzr2\")", "def test_bucketlist_create(self):\n res = self.client().post('/bucketlist', data=self.bucketlist)\n self.assertEqual(res.status_code, 201)\n self.assertIn('Go to vacation', str(res.data))", "def test_list_all_bucektlists_for_authenticated_user(self):\n\n response = self.client.get(\n \"/bucketlists/\",\n headers={'Authorization': self.user_token}\n )\n\n self.assertEqual(response.status_code, 200)\n self.assertEqual(response.data, '[]\\n')", "def test_put_bucketlist_item(self):\r\n data = {\"name\": \"bucketlist item name\", \"completed\": \"true\"}\r\n email = \"test@test.com\"\r\n _pword = \"test\"\r\n user = User.query.filter_by(email=email).first()\r\n bucketlist = BucketList.query.filter_by(user_id=user.id, name=\"test bucketlist\").first()\r\n item = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id, id=1).first()\r\n self.assertNotEqual(item.name, \"bucketlist item name\")\r\n self.assertFalse(item.completed)\r\n\r\n response = self.put_bucketlist_item(email, _pword, bucketlist.id, 1, data)\r\n result = json.loads(response.data.decode('utf-8'))\r\n item2 = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id, id=1).first()\r\n self.assertEqual(response.status, '201 CREATED')\r\n self.assertEqual(item2.name, \"bucketlist item name\")\r\n self.assertTrue(item2.completed)", "def test_delete_bucketlist_item(self):\n self.register_user()\n result = self.login_user()\n access_token = json.loads(result.data.decode())['access_token']\n\n # create a bucketlist by making a POST request\n res = self.client().post(\n '/api/v1/bucketlists/',\n headers=dict(Authorization=\"Bearer \" + access_token),\n data=self.bucketlist)\n self.assertEqual(res.status_code, 201)\n # get the json with the bucketlist\n results = json.loads(res.data.decode())\n\n # create a bucketlist item by making a POST request and add it to the created bucketlist\n res = self.client().post(\n '/api/v1/bucketlists/{}/items/'.format(results['id']),\n headers=dict(Authorization=\"Bearer \" + access_token),\n data={\n \"name\": \"Eat fried crabs\"\n })\n self.assertEqual(res.status_code, 201)\n # get the json containing the created bucketlist item\n res_item = json.loads(res.data.decode())\n\n # delete the bucketlist item we just created\n res = self.client().delete(\n '/api/v1/bucketlists/{}/items/{}'.format(results['id'], res_item['id']),\n headers=dict(Authorization=\"Bearer \" + access_token), )\n self.assertEqual(res.status_code, 200)\n\n # Test to see if it exists, should return a 404\n result = self.client().get(\n '/api/v1/bucketlists/{}/items/1'.format(results['id']),\n headers=dict(Authorization=\"Bearer \" + access_token))\n self.assertEqual(result.status_code, 404)", "def test_edit_bucketlist(self):\n post_data = self.post_a_bucket()\n self.assertEqual(post_data.status_code, 201)\n result_of_put_method = self.client().put(\n '/bucketlists/1',\n headers=dict(Authorization='Bearer '\n + self.token()\n ),\n data={\n \"name\": \"The seasons will be, summer winter and autumn\"\n })\n self.assertEqual(result_of_put_method.status_code, 201)\n result_of_get_method = self.client().get('/bucketlists/1',\n headers=dict(Authorization='Bearer '\n + self.token())\n )\n self.assertIn('The seasons will b', str(result_of_get_method.data))", "def test_api_delete_bucketlist(self):\n\n res_post = self.client().post('/bucketlist', data={'name': \"Don't forget to exercise\"})\n self.assertEqual(res_post.status_code, 201)\n res_post_in_json = json.loads(res_post.data.decode('UTF-8'))\n id = res_post_in_json['id']\n res_delete = self.client().delete(f\"/bucketlist/{id}\")\n self.assertEqual(res_delete.status_code, 200)\n\n # should return 404 after delete the data\n res = self.client().get(f'/bucketlist/{id}')\n self.assertEqual(res.status_code, 404)", "def test_create_bucketlist_view_returns_200_status_code(self):\n response = self.app.get('/create-bucketlist')\n self.assertEqual(response.status_code, 200, \n \"should return a status code of 200\")", "def test_get_item_list(self):\n resp = self.app.get('/items')\n self.assertEqual(resp.status_code, status.HTTP_200_OK)\n data = json.loads(resp.data)\n self.assertEqual(len(data), 3)", "def test_bucketlist_creation(self):\n post_data = self.post_a_bucket()\n self.assertEqual(post_data.status_code, 201)\n self.assertIn('Climb the Himalayas', str(post_data.data))", "def manipulate_bucketlist():\n pass", "def get(self, user):\n search = True if self.request.args.get('q') else False\n limit = int(self.request.args.get('limit')) if self.request.args.get('limit') else 20\n page = int(self.request.args.get('page')) if self.request.args.get('page') else 1\n bucketlists = user.bucketlists.paginate(page, limit, True).items\n bucketlists = user.bucketlists.filter(Bucketlist.name.contains(self.request.args.get('q'))) if self.request.args.get('q') else bucketlists\n\n bucketlists = [\n {'id': bucketlist.id,\n 'name': bucketlist.name,\n 'items': [\n {'id': item.id,\n 'name': item.description,\n 'date_created': str(item.date_created),\n 'date_modified': str(item.date_modified),\n 'done': str(item.is_done)\n } for item in bucketlist.items\n ],\n 'date_created': str(bucketlist.date_created),\n 'date_modified': str(bucketlist.date_modified),\n 'created_by': bucketlist.created_by\n } for bucketlist in bucketlists\n ]\n\n # if empty retutn no bucketlists added\n if not bucketlists:\n return \"You have no avialable bucketlists\", 200\n\n return bucketlists, 200", "def test_api_get_bucketlist_by_id_not_exist(self):\n res = self.client().get(f\"/bucketlist/99\")\n self.assertEqual(res.status_code, 404)", "def test_delete_bucketlist(self):\n post_data = self.post_a_bucket()\n self.assertEqual(post_data.status_code, 201)\n result_of_delete_method = self.client().delete('/bucketlists/1',\n headers=dict(Authorization='Bearer '\n + self.token())\n )\n self.assertEqual(result_of_delete_method.status_code, 200)\n response_after_removal = self.client().get('/bucketlists/1',\n headers=dict(Authorization='Bearer '\n + self.token())\n )\n self.assertEqual(response_after_removal.status_code, 400)", "def post_bucketlist():\n pass", "def test_delete_bucketlistitem_by_id(self):\n resp = self.client.post('/bucketlists',\n data=json.dumps(self.bucketlist),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n resp_item = self.client.post('/bucketlistitems/1/items',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n delete_item = self.client.delete('/bucketlistitems/1/items/1',\n headers={\n \"Authorization\": self.token\n })\n self.assertEqual(delete_item.status_code, 204)", "def test_api_edit_bucketlist(self):\n res_post = self.client().post('/bucketlist', data={'name': 'Wake up, Eat, Code, Sleep & Repeat'})\n self.assertEqual(res_post.status_code, 201)\n res_post_in_json = json.loads(res_post.data.decode('UTF-8').replace(\"'\", \"\\\"\"))\n id = res_post_in_json['id']\n res_put = self.client().put(\n f'bucketlist/{id}',\n data={\n 'name': \"Don't forget to exercise\"\n }\n )\n self.assertEqual(res_put.status_code, 200)\n res = self.client().get(f'/bucketlist/{id}')\n self.assertIn(\"exercise\", str(res.data))", "def test_get_list(self):\n pass", "def test_s3_csv_file_list(\n self,\n mock_init_storage_function,\n mock_get_list_of_bucket_files\n ): # pylint: disable=unused-argument\n\n with self.app.app_context():\n url = '/donation/s3/csv/files?bucket={}&path={}'\n\n bucket = self.app.config[ 'AWS_CSV_FILES_BUCKET' ]\n path = self.app.config[ 'AWS_CSV_FILES_PATH' ]\n\n response = self.test_client.get( url.format( bucket, path ), headers=self.headers )\n self.assertEqual( response.status_code, status.HTTP_200_OK )", "def test_single_bucketlist_item_delete_with_no_auth_header(self):\n self.register_user()\n result = self.login_user()\n access_token = json.loads(result.data.decode())['access_token']\n\n res = self.client().post(\n '/api/v1/bucketlists/',\n headers=dict(Authorization=\"Bearer \" + access_token),\n data={'name': 'Visit the Grand Canyon!'})\n self.assertEqual(res.status_code, 201)\n # get the bucketlist in json\n results = json.loads(res.data.decode())\n\n # create a bucketlist item by making a POST request and add it to the created bucketlist\n res = self.client().post(\n '/api/v1/bucketlists/{}/items/'.format(results['id']),\n headers=dict(Authorization=\"Bearer \" + access_token),\n data={\n \"name\": \"Eat fried crabs\"\n })\n self.assertEqual(res.status_code, 201)\n # get the json containing the created bucketlist item\n res_item = json.loads(res.data.decode())\n\n # delete the bucketlist item we just created\n res = self.client().delete(\n '/api/v1/bucketlists/{}/items/{}'.format(results['id'], res_item['id']),\n headers=dict(), )\n self.assertEqual(res.status_code, 401)\n self.assertIn('Header with key Authorization missing.', str(res.data))", "def list_bucket(self, bucket):\n self.response.write('Listbucket result:\\n')\n\n page_size = 1\n stats = gcs.listbucket(bucket + '/foo', max_keys=page_size)\n while True:\n count = 0\n for stat in stats:\n count += 1\n self.response.write(repr(stat))\n self.response.write('\\n')\n\n if count != page_size or count == 0:\n break\n stats = gcs.listbucket(bucket + '/foo', max_keys=page_size,\n marker=stat.filename)", "def list_bucket(self, bucket):\n\n self.response.write(\"Listbucket result:\\n\")\n\n # Production apps should set page_size to a practical value.\n page_size = 1\n stats = cloudstorage.listbucket(bucket + \"/foo\", max_keys=page_size)\n while True:\n count = 0\n for stat in stats:\n count += 1\n self.response.write(repr(stat))\n self.response.write(\"\\n\")\n\n if count != page_size or count == 0:\n break\n stats = cloudstorage.listbucket(\n bucket + \"/foo\", max_keys=page_size, marker=stat.filename\n )", "def test_single_bucketlist_item_delete_with_empty_token(self):\n self.register_user()\n result = self.login_user()\n access_token = json.loads(result.data.decode())['access_token']\n\n res = self.client().post(\n '/api/v1/bucketlists/',\n headers=dict(Authorization=\"Bearer \" + access_token),\n data=self.bucketlist)\n\n # assert that the bucketlist is created\n self.assertEqual(res.status_code, 201)\n # get the response data in json format\n results = json.loads(res.data.decode())\n\n # create a bucketlist item by making a POST request and add it to the created bucketlist\n res = self.client().post(\n '/api/v1/bucketlists/{}/items/'.format(results['id']),\n headers=dict(Authorization=\"Bearer \" + access_token),\n data={\n \"name\": \"Eat fried crabs\"\n })\n self.assertEqual(res.status_code, 201)\n # get the json containing the created bucketlist item\n res_item = json.loads(res.data.decode())\n\n # delete the bucketlist item we just created\n res = self.client().delete(\n '/api/v1/bucketlists/{}/items/{}'.format(results['id'], res_item['id']),\n headers=dict(Authorization=\"\"), )\n self.assertEqual(res.status_code, 401)\n self.assertIn('Token not provided in the header with key Authorization.', str(res.data))", "def test_delete_bucketlist_item(self):\r\n email = \"test@test.com\"\r\n _pword = \"test\"\r\n user = User.query.filter_by(email=email).first()\r\n bucketlist = BucketList.query.filter_by(user_id=user.id, name=\"test bucketlist\").first()\r\n item = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id, id=1).first()\r\n self.assertTrue(item)\r\n\r\n response = self.delete_bucketlist_item(email, _pword, bucketlist.id, item.id)\r\n result = json.loads(response.data.decode('utf-8'))\r\n self.assertEqual(response.status, '200 OK')\r\n self.assertEqual(\r\n result['message'],\r\n 'Bucketlist Item with ID {} deleted'.format(item.id)\r\n )\r\n item = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id, id=1).first()\r\n self.assertFalse(item)", "def test_listtem_using_get(self):\n pass", "def test_list(self):\n key = Key(self.bucket)\n name, version, filename = 'mypkg', '1.2', 'pkg.tar.gz'\n key.key = name + '/' + filename\n key.set_metadata('name', name)\n key.set_metadata('version', version)\n key.set_contents_from_string('foobar')\n package = list(self.storage.list(Package))[0]\n self.assertEquals(package.name, name)\n self.assertEquals(package.version, version)\n self.assertEquals(package.filename, filename)", "def test_ls(self):\n fake_key = namedtuple('Key', ['name', 'last_modified', 'size'])\n num_items = 10\n items = []\n for i in range(num_items):\n items.append({'name': 'item_%d' % i, 'last_modified': 'fake_date', 'size': 100})\n\n backend = self.test_init_valid()\n\n backend.bucket.get_all_keys.return_value = [fake_key(i['name'], i['last_modified'], i['size']) for i in items]\n\n results = backend.ls()\n self.assertEqual(len(results), num_items)\n self.assertEqual(results, items)\n\n for item in results:\n self.assertEqual(item['size'], 100)\n self.assertEqual(item['last_modified'], 'fake_date')", "def get_items():\n return requester.perform_request(Uri.items)", "def list_buckets():\n pass", "def get(self, user, id):\n # Search for bucketlist\n bucketlist = Bucketlist.query.filter_by(\n id=id, created_by=user.email).first()\n\n # return 400 if bucketlist non exixtant or not belongs to this user\n if bucketlist is None:\n return 'Bucketlist not found', 202\n\n # serialize items if ann\n bucketlists_items = [\n {'id': item.id,\n 'name': item.description,\n 'date_created': str(item.date_created),\n 'date_modified': str(item.date_modified),\n 'done': str(item.is_done)\n } for item in bucketlist.items\n ]\n\n # serialize bucketlist\n response_bucketlist = [\n {'id': bucketlist.id,\n 'name': bucketlist.name,\n 'items': bucketlists_items,\n 'date_created': str(bucketlist.date_created),\n 'date_modified': str(bucketlist.date_modified),\n 'created_by': bucketlist.created_by\n }\n ]\n\n return response_bucketlist, 200", "def listBucket(self, clientCall):\n\t\ttry:\n\t\t\tresponse \t= clientCall.list_objects(Bucket=self.bucket)\n\t\t\tresponse \t= self.parseJson(\"Contents[].Key\", response)\n\n\t\t\tfor objects in response:\n\t\t\t\twrite(var=f\"{g}#\", color=w, data=f\"{objects}\")\n\t\t\t\tsleep(0.01)\n\n\t\t\treturn(response)\n\n\t\texcept botocore.exceptions.ClientError as e:\n\t\t\tif \"AccessDenied\" and \"ListObjects\" in e.args[0]:\n\t\t\t\treturn(None)", "def test_bucket_is_deleted(self):\n with self.client:\n # Get an auth token\n token = self.get_user_token()\n response = self.client.post(\n '/bucketlists',\n data=json.dumps(dict(name='Travel')),\n headers=dict(Authorization='Bearer ' + token),\n content_type='application/json'\n )\n # Test Bucket creation\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 201)\n self.assertTrue(data['status'], 'success')\n self.assertTrue(data['name'], 'Travel')\n # Delete the created Bucket\n res = self.client.delete(\n '/bucketlists/1',\n headers=dict(Authorization='Bearer ' + token)\n )\n data = json.loads(res.data.decode())\n self.assertEqual(res.status_code, 200)\n self.assertTrue(data['status'] == 'success')\n self.assertTrue(data['message'] == 'Bucket Deleted successfully')\n self.assertTrue(res.content_type == 'application/json')", "def test_get_buckets(self):\n conn = boto3.resource('s3', region_name='us-east-1')\n # We need to create the bucket since this is all in Moto's 'virtual' AWS account\n conn.create_bucket(Bucket='foobucket')\n\n s3_connector = S3Connector()\n s3_connector.connect(\"default\")\n self.assertEqual(s3_connector.get_buckets(), [\"foobucket\"])", "def list_bucket(self, bucket):\n self.response.write('Creating more files for listbucket...\\n')\n self.create_file(bucket + '/foo1')\n self.create_file(bucket + '/foo2')\n self.response.write('\\nListbucket result:\\n')\n\n page_size = 1\n stats = gcs.listbucket(bucket, max_keys=page_size)\n while True:\n count = 0\n for stat in stats:\n count += 1\n self.response.write(repr(stat))\n self.response.write('\\n')\n\n if count != page_size or count == 0:\n break\n last_filename = stat.filename[len(bucket) + 1:]\n stats = gcs.listbucket(bucket, max_keys=page_size, marker=last_filename)", "def update_bucketlist():\n pass", "def test_bucket_is_updated(self):\n with self.client:\n # Get an auth token\n token = self.get_user_token()\n # Create a Bucket\n response = self.client.post(\n '/bucketlists',\n data=json.dumps(dict(name='Travel')),\n headers=dict(Authorization='Bearer ' + token),\n content_type='application/json'\n )\n # Test Bucket creation\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 201)\n self.assertTrue(data['status'], 'success')\n self.assertTrue(data['name'], 'Travel')\n # Update the bucket name\n res = self.client.put(\n '/bucketlists/1',\n headers=dict(Authorization='Bearer ' + token),\n data=json.dumps(dict(name='Adventure')),\n content_type='application/json'\n )\n data = json.loads(res.data.decode())\n self.assertEqual(res.status_code, 201)\n self.assertTrue(res.content_type == 'application/json')\n self.assertTrue(data['status'] == 'success')\n self.assertTrue(data['name'] == 'Adventure')\n self.assertEqual(data['id'], 1)", "def test_list_bucket_content(self):\n conn = boto3.resource('s3', region_name='us-east-1')\n # We need to create the bucket since this is all in Moto's 'virtual' AWS account\n conn.create_bucket(Bucket='foobucket')\n # Upload fake file to bucket\n s3 = boto3.client('s3')\n with open('test/test_resources/test_file', 'rb') as data:\n s3.upload_fileobj(data, 'foobucket', 'foofile')\n\n s3_connector = S3Connector()\n s3_connector.connect(\"default\")\n self.assertEqual(s3_connector.list_bucket_content(\n \"foobucket\"), [\"foofile\"])", "def test_get_all_items(test_client):\n\n response = test_client.get(BASE_URL)\n\n data = json.loads(response.get_data())\n\n assert response.status_code == 200\n assert len(data['items']) == 3", "def list_buckets():\n for bucket in BUCKET_MANAGER.all_buckets():\n print(bucket)", "def test_update_busketlistitem_by_id(self):\n resp = self.client.post('/bucketlists',\n data=json.dumps(self.bucketlist),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n resp_item = self.client.post('/bucketlistitems/1/items',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n update_item = self.client.put('/bucketlistitems/1/items/1',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs and museums too.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(update_item.status_code, 201)", "def test_get_list(self):\n #Validate the response\n resp = self.client.get('/api/v1/purchase-order/', format='json')\n self.assertEqual(resp.status_code, 200)\n \n #Validate the returned data\n resp = resp.data\n self.assertIsInstance(resp, list)\n self.assertEqual(len(resp), 1)", "def test_create_bucket_list_name(self):\n bucket = BucketList(\"\", \"\")\n bucket = bucket.create_bucket_list(\"\")\n self.assertEqual(bucket, \"Please provide a name for your bucket list\", )", "def test_list(self):\n response = self.app.get(self.url('tags.list'))\n # Test response...", "def test_get_all_item(self, app, item):\n res = app.store_items.get_all_items(\n header=item.header,\n type_response=ItemsResponse,\n )\n assert res.status_code == 200", "def testList(self):\n response = requests.get(url=self.url)\n headers = response.headers\n\n self.assertEqual(response.status_code, 200, WRONG_STATUS_CODE_MSG)\n self.assertEqual(\n headers['Content-Type'], 'application/json', WRONG_TYPE_RETURN_MSG)", "def list_buckets():\n for bucket in s3.buckets.all():\n print(bucket)", "def test_list_objects(self):\n for i in range(0, 10):\n self.bos.put_object_from_string(\n self.BUCKET, \n \"test_object_%s\" % compat.convert_to_bytes(random.random()),\n \"This is a string.\")\n\n response = self.bos.list_objects(self.BUCKET, prefix=\"\", delimiter=\"\")\n self.check_headers(response)\n self.assertEqual(response.is_truncated, 'false')\n self.assertEqual(response.max_keys, '1000')\n self.assertEqual(response.name, self.BUCKET)\n self.assertEqual(response.prefix, None)\n\n # TODO: test prefix and marker with Chineses\n for i in range(0, 5):\n key1 = \"test_%s\" % compat.convert_to_string(random.random())\n key2 = \"testfile_%s\" % compat.convert_to_string(random.random())\n self.bos.put_object_from_string(\n self.BUCKET, \n key1,\n \"This is a string.\")\n self.bos.put_object_from_string(\n self.BUCKET, \n key2,\n \"This is a string.\")\n\n prefix = 'test'\n marker = 'testfile'\n response = self.bos.list_objects(self.BUCKET, prefix = prefix)\n self.check_headers(response)\n self.assertEqual(len(response.contents), 20)\n self.assertEqual(response.prefix, prefix)\n response = self.bos.list_objects(self.BUCKET, marker = marker)\n self.check_headers(response)\n self.assertEqual(len(response.contents), 5)\n self.assertEqual(response.marker, marker)", "def test_single_bucketlist_item_delete_with_invalid_token(self):\n self.register_user()\n result = self.login_user()\n access_token = json.loads(result.data.decode())['access_token']\n\n res = self.client().post(\n '/api/v1/bucketlists/',\n headers=dict(Authorization=\"Bearer \" + access_token),\n data=self.bucketlist)\n\n # assert that the bucketlist is created\n self.assertEqual(res.status_code, 201)\n # get the response data in json format\n results = json.loads(res.data.decode())\n\n # create a bucketlist item by making a POST request and add it to the created bucketlist\n res = self.client().post(\n '/api/v1/bucketlists/{}/items/'.format(results['id']),\n headers=dict(Authorization=\"Bearer \" + access_token),\n data={\n \"name\": \"Eat fried crabs\"\n })\n self.assertEqual(res.status_code, 201)\n # get the json containing the created bucketlist item\n res_item = json.loads(res.data.decode())\n\n # delete the bucketlist item we just created\n res = self.client().delete(\n '/api/v1/bucketlists/{}/items/{}'.format(results['id'], res_item['id']),\n headers=dict(Authorization=access_token), )\n self.assertEqual(res.status_code, 401)\n self.assertIn('Invalid token format.', str(res.data))", "def list_bucket(self, bucket_id=None):\n url = self.prism_endpoint + \"/wBuckets\"\n\n if bucket_id is not None:\n url = url + \"/\" + bucket_id\n\n headers = {\"Authorization\": \"Bearer \" + self.bearer_token}\n\n r = requests.get(url, headers=headers)\n\n if r.status_code == 200:\n logging.info(\"Successfully obtained information about your buckets\")\n return r.json()\n else:\n logging.warning(\"HTTP Error {}\".format(r.status_code))", "async def fetch_file_list(client, bucket) -> List:\n # pylint: disable=invalid-name\n PG_HOSTNAME = config('PG_HOSTNAME')\n PG_DATABASE = config('PG_DATABASE')\n folder = f'backup/{PG_HOSTNAME}_{PG_DATABASE}'\n result = await client.list_objects_v2(Bucket=bucket, Prefix=folder)\n contents = result.get('Contents', None)\n file_list = list([])\n if contents:\n for content in contents:\n file_list.append(content.get('Key'))\n return file_list", "def test_deletion_handles_no_bucket_found_by_id(self):\n with self.client:\n response = self.client.delete(\n '/bucketlists/1',\n headers=dict(Authorization='Bearer ' + self.get_user_token())\n )\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 404)\n self.assertTrue(data['status'] == 'failed')\n self.assertTrue(data['message'] == 'Bucket resource cannot be found')\n self.assertTrue(response.content_type == 'application/json')", "def test_stubber_no_response_metadata(self):\n from botocore.stub import Stubber\n\n response = {\n \"Owner\": {\"ID\": \"foo\", \"DisplayName\": \"bar\"},\n \"Buckets\": [{\"CreationDate\": datetime.datetime(2016, 1, 20, 22, 9), \"Name\": \"baz\"}],\n }\n\n s3 = self.session.create_client(\"s3\", aws_access_key_id=\"foo\", aws_secret_access_key=\"bar\")\n with Stubber(s3) as stubber:\n stubber.add_response(\"list_buckets\", response, {})\n service_response = s3.list_buckets()\n assert service_response == response", "def test_if_app_gets_shoppinglists(self):\n li = self.client.get('/shoppinglists/?each_page=1&page_number=1',\n headers = {\n 'Content-Type':'application/json',\n 'x-access-token':self.tok})\n self.assertEqual(li.status_code, 200)", "def test_authorization_is_enforced(self):\n new_client = APIClient()\n res = new_client.get('/bucketlists/', kwargs={'pk': 2}, format=\"json\")\n self.assertEqual(res.status_code, status.HTTP_401_UNAUTHORIZED)", "async def la() -> Tuple[str]:\n li = []\n async with _create_client() as client:\n for bucket in (await client.list_buckets())['Buckets']:\n _ = await client.list_objects(Bucket=bucket['Name'])\n for item in _['Contents']:\n if item['Key'].endswith('/') is False:\n li.append(bucket['Name'] + '/' + item['Key'])\n logger.info('List all objects in all buckets.')\n return tuple(li)", "def test_invoice_item_list(self):\n self.url = reverse(\"invoiceitem-list\")\n response = self.client.get(self.url, **self.auth_headers)\n self.assertEqual(200, response.status_code)", "def list_all_buckets(riak_host,riak_port):\n url='http://%s:%s/buckets?buckets=true' % (riak_host,riak_port)\n r=requests.get(url)\n print json.dumps(r.json(), sort_keys=True, indent=4)", "def test_fail_repeated_buckelist_item(self):\r\n user = User.query.filter_by(email=\"test@test.com\").first()\r\n bucketlist = BucketList.query.filter_by(user_id=user.id, name=\"test bucketlist\").first()\r\n item_no = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id).count()\r\n response = self.add_bucketlist_item(\"test@test.com\", \"test\", bucketlist.id, \"test item\")\r\n result = json.loads(response.data.decode('utf-8'))\r\n self.assertEqual(response.status, '409 CONFLICT')\r\n self.assertEqual(result['message'], 'Bucketlist Item Exists')\r\n new_item_no = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id).count()\r\n self.assertEqual(item_no, new_item_no)", "def test_list(self):\n self.add_to_queue('not-image.txt')\n self.add_to_queue('riker.gif')\n\n rv = self.get('/queue/', token=self.user_token)\n\n expected = {\"filelist\": [{\"filename\": \"riker.gif\",\n \"url\": \"/queue/riker.gif\"\n }\n ]}\n self.assertJSONOk(rv, **expected)\n return", "def test_mocked_get_list_template(self):\n c = Client()\n response = c.get(reverse('mocked'))\n self.assertEqual(response.status_code, 200)\n self.assertIn(\"Here is the list of all possible apis:\",\n response.content)\n self.assertIn(\"^mocked_get$\", response.content)", "def test_list(self):\n bust_fragments(self.resp, ['/foo/bar', '/zip/zap'])\n self.assert_header_set('[\"/foo/bar\", \"/zip/zap\"]')", "def test_buckets(self):\n objectstore.bucket.Bucket.create('new_bucket', self.context)\n bucket = objectstore.bucket.Bucket('new_bucket')\n\n # creator is authorized to use bucket\n self.assert_(bucket.is_authorized(self.context))\n\n # another user is not authorized\n context2 = context.RequestContext('user2', 'proj2')\n self.assertFalse(bucket.is_authorized(context2))\n\n # admin is authorized to use bucket\n admin_context = context.RequestContext('admin_user', None)\n self.assertTrue(bucket.is_authorized(admin_context))\n\n # new buckets are empty\n self.assertTrue(bucket.list_keys()['Contents'] == [])\n\n # storing keys works\n bucket['foo'] = \"bar\"\n\n self.assertEquals(len(bucket.list_keys()['Contents']), 1)\n\n self.assertEquals(bucket['foo'].read(), 'bar')\n\n # md5 of key works\n self.assertEquals(bucket['foo'].md5, hashlib.md5('bar').hexdigest())\n\n # deleting non-empty bucket should throw a NotEmpty exception\n self.assertRaises(NotEmpty, bucket.delete)\n\n # deleting key\n del bucket['foo']\n\n # deleting empty bucket\n bucket.delete()\n\n # accessing deleted bucket throws exception\n self.assertRaises(NotFound, objectstore.bucket.Bucket, 'new_bucket')", "def list_buckets():\n response = s3.list_buckets()\n # Output the bucket names\n print('Existing buckets:')\n for bucket in response['Buckets']:\n print(bucket[\"Name\"])", "def test_book_list(self):\n response = self.client.get(self.url_list, format=\"application/json\")\n self.assertTrue(response.status_code == 200)\n self.assertEqual(\n json.loads(response.content)[0][\"name\"], self.book.name)", "def test_duplicate_bucketlist_item(self):\n resp = self.client.post('/bucketlists',\n data=json.dumps(self.bucketlist),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n resp_item = self.client.post('/bucketlistitems/1/items',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n resp_item2 = self.client.post('/bucketlistitems/1/items',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n result = json.loads(resp_item2.data)\n self.assertEqual(result[\"message\"], \"Item with the given name exists.\")\n self.assertEqual(resp_item2.status_code, 409)", "def test_000_list_buckets(self):\n deferred = threads.deferToThread(self.conn.get_all_buckets)\n deferred.addCallback(self._ensure_no_buckets)\n return deferred", "def test_get_single(single_bucket): # pylint: disable=redefined-outer-name\n returned_value = single_bucket.get(\"key 1\")\n\n assert returned_value == \"value 1\"", "def test_abbeys_get(self):\n query_string = [('label', 'label_example'),\n ('page', 1),\n ('per_page', 100)]\n headers = { \n 'Accept': 'application/json',\n }\n response = self.client.open(\n '/v0.0.1/abbeys',\n method='GET',\n headers=headers,\n query_string=query_string)\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def test_put_item_wrong_id(self):\r\n data = {\"name\": \"bucketlist item name\", \"completed\": \"true\"}\r\n email = \"test@test.com\"\r\n _pword = \"test\"\r\n user = User.query.filter_by(email=email).first()\r\n bucketlist = BucketList.query.filter_by(user_id=user.id, name=\"test bucketlist\").first()\r\n item = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id, id=0).first()\r\n self.assertFalse(item)\r\n\r\n response = self.put_bucketlist_item(email, _pword, bucketlist.id, 0, data)\r\n result = json.loads(response.data.decode('utf-8'))\r\n self.assertEqual(response.status, '404 NOT FOUND')\r\n self.assertEqual(\r\n result['message'],\r\n 'Bucketlist Item with ID {} not found in the database. You have requested this URI '\\\r\n '[/api/v1/bucketlist/1/items/0] but did you mean /api/v1/bucketlist/<int:bucketlist_id>/items/'\\\r\n ' or /api/v1/bucketlist/<int:bucketlist_id> or /api/v1/bucketlist ?'.format(0)\r\n )", "def delete_bucketlist():\n pass", "def test_mocked_get_list_template(self):\n c = Client()\n response = c.get(reverse('mocked'))\n self.assertEqual(response.status_code, 200)\n self.assertIn(\"Here is the list of all possible apis:\",\n response.content)\n self.assertIn(\"^mocked_post$\", response.content)", "def test_vault_get_all_vault_items(self):\n pass", "def test_get_items_page(self, mock_requests_get):\n result = resources.get_items_page(1, \"a\", 1)\n\n assert result.total == 97\n\n item = result.items[0]\n assert item.id == 1\n assert item.name == \"Thing\"\n assert item.description == \"A thing\"\n\n assert item.current.price == 100\n assert item.today.price == 110\n\n assert item.members is True\n\n item = result.items[1]\n\n assert item.current.price == 11300\n assert item.today.price == 24400000\n\n assert item.members is False\n\n item = result.items[2]\n\n assert item.current.price == 1800000000\n assert item.today.price == 43657", "def test_bucket():\n return 'test_bucket'", "def list(self, prefix='', delimiter='', marker='', headers=None):\r\n return BucketListResultSet(self, prefix, delimiter, marker, headers)", "def list_buckets(self):\n msg = \"list_buckets not implemented\"\n raise NotImplementedError(msg)", "def test_api_get_all_activities(self):\n # create a bucket\n res = self.register_login_get_token()\n self.assertEqual(res.status_code, 201)\n\n # create a activity\n res = self.client().post('/bucketlist/1/activities',\n headers=dict(\n Authorization=\"Bearer \" + self.access_token),\n data=self.activity)\n self.assertEqual(res.status_code, 201)\n\n # get activities\n res = self.client().get('/bucketlist/1/activities',\n headers=dict(\n Authorization=\"Bearer \" + self.access_token))\n self.assertEqual(res.status_code, 200)", "def test_create_bucket(self):\n pass", "def GET(self, env, start_response):\n qs = env.get('QUERY_STRING', '')\n args = urlparse.parse_qs(qs, 1)\n\n key_args = set(['cors','lifecycle', 'policy', 'logging', 'notification',\n 'tagging', 'requestPayment', 'versioning', 'versions',\n 'website', 'location'])\n\n if not key_args & set(args):\n # GET bucket to list objects\n max_keys = self.MAX_BUCKET_LISTING\n if 'max-keys' in args:\n if args.get('max-keys')[0].isdigit() is False:\n return self.get_err_response('InvalidArgument')\n max_keys = min(int(args.get('max-keys')[0]), self.MAX_BUCKET_LISTING)\n\n\n if 'acl' not in args:\n #acl request sent with format=json etc confuses swift\n env['QUERY_STRING'] = 'format=json&limit=%s' % (max_keys + 1)\n if 'marker' in args:\n env['QUERY_STRING'] += '&marker=%s' % quote(args['marker'])\n if 'prefix' in args:\n env['QUERY_STRING'] += '&prefix=%s' % quote(args['prefix'])\n if 'delimiter' in args:\n env['QUERY_STRING'] += '&delimiter=%s' % quote(args['delimiter'])\n body_iter = self._app_call(env)\n if env['REQUEST_METHOD'] == 'HEAD':\n body_iter = ''\n status = self._get_status_int()\n headers = dict(self._response_headers)\n\n if is_success(status) and 'acl' in args:\n return self.get_acl(self.account_name, headers)\n\n if 'versioning' in args:\n # Just report there is no versioning configured here.\n body = ('<VersioningConfiguration '\n 'xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\"/>')\n return Response(body=body, content_type=\"text/plain\")\n\n if status != HTTP_OK:\n if status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN):\n return self.get_err_response('AccessDenied')\n elif status == HTTP_NOT_FOUND:\n return self.get_err_response('NoSuchBucket')\n else:\n return self.get_err_response('InvalidURI')\n\n if 'location' in args:\n body = ('<?xml version=\"1.0\" encoding=\"UTF-8\"?>'\n '<LocationConstraint '\n 'xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\"')\n if self.location == 'US':\n body += '/>'\n else:\n body += ('>%s</LocationConstraint>' % self.location)\n return Response(body=body, content_type='application/xml')\n\n if 'logging' in args:\n # logging disabled\n body = ('<?xml version=\"1.0\" encoding=\"UTF-8\"?>'\n '<BucketLoggingStatus '\n 'xmlns=\"http://doc.s3.amazonaws.com/2006-03-01\" />')\n return Response(body=body, content_type='application/xml')\n\n objects = loads(''.join(list(body_iter)))\n body = ('<?xml version=\"1.0\" encoding=\"UTF-8\"?>'\n '<ListBucketResult '\n 'xmlns=\"http://s3.amazonaws.com/doc/2006-03-01\">'\n '<Prefix>%s</Prefix>'\n '<Marker>%s</Marker>'\n '<Delimiter>%s</Delimiter>'\n '<IsTruncated>%s</IsTruncated>'\n '<MaxKeys>%s</MaxKeys>'\n '<Name>%s</Name>'\n '%s'\n '%s'\n '</ListBucketResult>' %\n (\n xml_escape(args.get('prefix', '')),\n xml_escape(args.get('marker', '')),\n xml_escape(args.get('delimiter', '')),\n 'true' if max_keys > 0 and len(objects) == (max_keys + 1) else\n 'false',\n max_keys,\n xml_escape(self.container_name),\n \"\".join(['<Contents><Key>%s</Key><LastModified>%sZ</LastModif'\n 'ied><ETag>%s</ETag><Size>%s</Size><StorageClass>STA'\n 'NDARD</StorageClass><Owner><ID>%s</ID><DisplayName>'\n '%s</DisplayName></Owner></Contents>' %\n (xml_escape(unquote(i['name'])), i['last_modified'],\n i['hash'],\n i['bytes'], self.account_name, self.account_name)\n for i in objects[:max_keys] if 'subdir' not in i]),\n \"\".join(['<CommonPrefixes><Prefix>%s</Prefix></CommonPrefixes>'\n % xml_escape(i['subdir'])\n for i in objects[:max_keys] if 'subdir' in i])))\n return Response(body=body, content_type='application/xml')\n else:\n # GET specified data\n #env['REQUEST_METHOD'] = 'HEAD'\n body_iter = self._app_call(env)\n status = self._get_status_int()\n headers = dict(self._response_headers)\n\n action = args.keys().pop()\n if action == 'acl':\n # get acl\n # get policy\n acl = headers.get('X-Container-Meta-Policy') or ''\n\n if is_success(status):\n if acl:\n return Response(status=HTTP_OK, content_type='application/xml', body=unquote(acl))\n else:\n return self.get_err_response('NotSuchPolicy')\n\n elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN):\n return self.get_err_response('AccessDenied')\n else:\n return self.get_err_response('InvalidURI')\n elif action == 'cors':\n # get cors\n _headers = set(['X-Container-Meta-Access-Control-Expose-Headers',\n 'X-Container-Meta-Access-Control-Allow-Origin',\n 'X-Container-Meta-Access-Control-Max-Age',\n 'X-Container-Meta-Access-Control-Allow-Method'])\n bodye = etree.Element('CORSConfiguration')\n if _headers & set(headers):\n rule = etree.Element('CORSRule')\n if 'X-Container-Meta-Access-Control-Expose-Headers' in headers:\n valuel = headers['X-Container-Meta-Access-Control-Expose-Headers'].split(',')\n for i in valuel:\n eh = self.create_elem('ExposeHeader', i)\n rule.append(eh)\n if 'X-Container-Meta-Access-Control-Allow-Origin' in headers:\n valuel = headers['X-Container-Meta-Access-Control-Allow-Origin'].split(',')\n for i in valuel:\n ao = self.create_elem('AllowedOrigin', i)\n rule.append(ao)\n if 'X-Container-Meta-Access-Control-Max-Age' in headers:\n valuel = headers['X-Container-Meta-Access-Control-Max-Age'].split(',')\n for i in valuel:\n ma = self.create_elem('MaxAgeSeconds', i)\n rule.append(ma)\n if 'X-Container-Meta-Access-Control-Allow-Method' in headers:\n valuel = headers['X-Container-Meta-Access-Control-Allow-Method'].split(',')\n for i in valuel:\n al = self.create_elem('AllowedMethod', i)\n rule.append(al)\n rule.append(self.create_elem('ID', 'unique_rule'))\n bodye.append(rule)\n else:\n bodye.text = ''\n\n if is_success(status):\n return Response(status=HTTP_OK, content_type='application/xml', body=self.elem2xmlbody(bodye))\n elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN):\n return self.get_err_response('AccessDenied')\n else:\n return self.get_err_response('InvalidURI')\n\n elif action == 'lifecycle':\n # get lifecycle\n bodye = etree.Element('LifecycleConfiguration')\n if 'X-Container-Meta-Expiration-Status' in headers:\n rule = etree.Element('Rule')\n rule.append(self.create_elem('Status', headers['X-Container-Meta-Expiration-Status']))\n rule.append(self.create_elem('ID', 'unique_rule'))\n if 'X-Container-Meta-Expiration-Prefix' in headers:\n rule.append(self.create_elem('Prefix', headers['X-Container-Meta-Expiration-Prefix']))\n if 'X-Container-Meta-Expiration-At' in headers or \\\n 'X-Container-Meta-Expiration-After' in headers:\n expir = etree.Element('Expiration')\n if 'X-Container-Meta-Expiration-At' in headers:\n expir.append(self.create_elem('Date', headers['X-Container-Meta-Expiration-At']))\n if 'X-Container-Meta-Expiration-After' in headers:\n expir.append(self.create_elem('Days', headers['X-Container-Meta-Expiration-After']))\n rule.append(expir)\n if 'X-Container-Meta-Trans-Class' in headers:\n trans = etree.Element('Transition')\n cls = self.create_elem('StorageClass', headers['X-Container-Meta-Trans-Class'])\n trans.append(cls)\n if 'X-Container-Meta-Trans-At' in headers:\n trans.append(self.create_elem('Date', headers['X-Container-Meta-Trans-At']))\n if 'X-Container-Meta-Trans-After' in headers:\n trans.append(self.create_elem('Days', headers['X-Container-Meta-Trans-After']))\n rule.append(trans)\n bodye.append(rule)\n else:\n bodye.text = ''\n\n if is_success(status):\n return Response(status=HTTP_OK, content_type='application/xml', body=self.elem2xmlbody(bodye))\n elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN):\n return self.get_err_response('AccessDenied')\n else:\n return self.get_err_response('InvalidURI')\n\n elif action == 'policy':\n # get policy\n json = headers.get('X-Container-Meta-Policy') or ''\n\n if is_success(status):\n if json:\n return Response(status=HTTP_OK, content_type='application/json', body=unquote(json))\n else:\n return self.get_err_response('NotSuchPolicy')\n\n elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN):\n return self.get_err_response('AccessDenied')\n else:\n return self.get_err_response('InvalidURI')\n elif action == 'logging':\n # get logging\n target = headers.get('X-Container-Meta-Logging-Target') or ''\n prefix = headers.get('X-Container-Meta-Logging-Prefix') or ''\n statuse = etree.Element('BucketLoggingStatus')\n if target:\n enabled = etree.Element('LoggingEnabled')\n target_bucket = self.create_elem('TargetBucket', target)\n if prefix:\n target_prefix = self.create_elem('TargetPrefix', prefix)\n enabled.append(target_bucket)\n enabled.append(target_prefix)\n statuse.append(enabled)\n else:\n pass # set text None\n\n if is_success(status):\n return Response(status=HTTP_OK, content_type='application/xml', body=self.elem2xmlbody(statuse))\n elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN):\n return self.get_err_response('AccessDenied')\n else:\n return self.get_err_response('InvalidURI')\n elif action == 'notification':\n # get it\n topic = headers.get('X-Container-Meta-Noti-Topic')\n event = headers.get('X-Container-Meta-Noti-Event')\n if is_success(status):\n if topic:\n body = ('<WebsiteConfiguration xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">'\n '<NotificationConfiguration> '\n '<TopicConfiguration>'\n '<Topic>%s</Topic>'\n '<Event>%s</Event>'\n '</TopicConfiguration>'\n '</NotificationConfiguration>',\n topic, event)\n return Response(status=HTTP_OK, content_type='application/xml', body=body)\n else:\n return self.get_err_response('NotSuchWebsite')\n\n elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN):\n return self.get_err_response('AccessDenied')\n else:\n return self.get_err_response('InvalidURI')\n elif action == 'tagging':\n # get tagging\n Tagging = etree.Element('Tagging')\n TagSet = etree.Element('TagSet')\n meta_keys = [header[21:] for header in headers if header.startswith('X-Container-Meta-Tag-')]\n for key in meta_keys:\n Tag = etree.Element('Tag')\n keyvalues = headers['X-Container-Meta-Tag-' + key]\n _key = keyvalues[:len(key)]\n _value = keyvalues[len(key):]\n Tag.append(self.create_elem('Key', _key))\n Tag.append(self.create_elem('Value', _value))\n TagSet.append(Tag)\n Tagging.append(TagSet)\n if is_success(status):\n return Response(status=HTTP_OK, content_type='application/xml', body=self.elem2xmlbody(Tagging))\n elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN):\n return self.get_err_response('AccessDenied')\n else:\n return self.get_err_response('InvalidURI')\n\n elif action == 'requestPayment':\n # get it\n # default value is BucketOwner\n pay = headers.get('X-Container-Meta-Payment', 'BucketOwner')\n if is_success(status):\n if pay:\n return Response(status=HTTP_OK, content_type='application/xml', body=unquote(pay))\n else:\n return self.get_err_response('NotSuchWebsite')\n\n elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN):\n return self.get_err_response('AccessDenied')\n else:\n return self.get_err_response('InvalidURI')\n\n elif action == 'versioning':\n versioning = 'Enabled' if 'X-Versions-Location' in headers else 'Suspended'\n bodye = etree.Element('VersioningConfiguration')\n stat = self.create_elem('Status', versioning)\n bodye.append(stat)\n if is_success(status):\n return Response(status=HTTP_OK, content_type='application/xml', body=self.elem2xmlbody(bodye))\n elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN):\n return self.get_err_response('AccessDenied')\n else:\n return self.get_err_response('InvalidURI')\n elif action == 'website':\n # get website\n website = headers.get('X-Container-Meta-Website')\n fake = ('<WebsiteConfiguration xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">'\n '<IndexDocument>'\n '<Suffix>index.html</Suffix>'\n '</IndexDocument>'\n '<ErrorDocument>'\n '<Key>SomeErrorDocument.html</Key>'\n '</ErrorDocument>'\n '</WebsiteConfiguration>')\n\n if is_success(status):\n if website:\n # return fake data\n return Response(status=HTTP_OK, content_type='application/xml', body=fake)\n else:\n return self.get_err_response('NotSuchWebsite')\n\n elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN):\n return self.get_err_response('AccessDenied')\n else:\n return self.get_err_response('InvalidURI')\n elif action == 'location':\n bodye = self.create_elem('LocationConstraint', 'CN')\n return Response(status=HTTP_OK, content_type='application/xml', body=self.elem2xmlbody(bodye))\n elif action == 'versions':\n # get versions container\n path = '/v1/AUTH_%s/%s' % (self.account_name, self.container_name)\n env = copyenv(env, method='GET', path=path, query_string='')\n body_iter = self._app_call(env)\n status = self._get_status_int()\n\n # get origin container\n path = '/v1/AUTH_%s/%s' % (quote(self.account_name), quote(self.version_name(self.container_name)))\n env2 = copyenv(env, method='GET', path=path, query_string='')\n body_iter2 = self._app_call(env2)\n status2 = self._get_status_int()\n\n last = list(body_iter)\n history = list(body_iter2)\n res = etree.Element('ListVersionsResult')\n bucket = self.create_elem('Name', self.container_name)\n res.append(bucket)\n if last:\n last = [i for i in last[0].split('\\n') if i]\n for i in last:\n ver = etree.Element('Version')\n ver.append(self.create_elem('Key', i))\n ver.append(self.create_elem('VersionId', 'lastest'))\n ver.append(self.create_elem('IsLastest', 'true'))\n res.append(ver)\n\n if history:\n history = [i for i in history[0].split('\\n') if i]\n for i in history:\n ver = etree.Element('Version')\n ver.append(self.create_elem('Key', i.split('/')[0][3:]))\n ver.append(self.create_elem('VersionId', i.split('/')[1]))\n ver.append(self.create_elem('IsLastest', 'false'))\n res.append(ver)\n\n if is_success(status) and is_success(status2):\n return Response(status=HTTP_OK, content_type='application/xml', body=self.elem2xmlbody(res))\n elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN):\n return self.get_err_response('AccessDenied')\n else:\n return self.get_err_response('InvalidURI')\n else:\n return self.get_err_response('InvalidURI')", "def test_get_basic(client):\n client.delete(\"/businesses\")\n insert_test_data(client)\n rs = client.get(\"/businesses\")\n collection = rs.json[\"result\"][\"businesses\"]\n assert len(collection) == 12", "def test_get_cloud_resources(self):\n pass" ]
[ "0.8520477", "0.8132996", "0.7968485", "0.79678524", "0.7939482", "0.791598", "0.76777196", "0.76503426", "0.760791", "0.75708175", "0.7465317", "0.7436615", "0.7426375", "0.74000996", "0.7353851", "0.73192596", "0.73068476", "0.725187", "0.72268575", "0.7211199", "0.71193653", "0.7109211", "0.7098046", "0.707913", "0.70477027", "0.7005692", "0.7002244", "0.6994109", "0.69594705", "0.6937721", "0.693645", "0.6932397", "0.69217986", "0.6862955", "0.6858156", "0.6821214", "0.68044287", "0.6761502", "0.6722465", "0.67055726", "0.66888326", "0.66877335", "0.6681608", "0.6673815", "0.6647553", "0.66289604", "0.6619871", "0.65904325", "0.65854746", "0.65689737", "0.6567732", "0.65643084", "0.65620977", "0.6559593", "0.65462714", "0.65436745", "0.65103436", "0.650723", "0.648631", "0.64578307", "0.6448968", "0.642473", "0.6399842", "0.63978577", "0.638198", "0.63419944", "0.6282766", "0.6250742", "0.62414706", "0.6231312", "0.6225189", "0.6223141", "0.62023157", "0.619202", "0.61814976", "0.6179818", "0.6169548", "0.61648184", "0.6163996", "0.61636317", "0.6159744", "0.612454", "0.61181486", "0.609687", "0.60940576", "0.6066209", "0.6062232", "0.60617644", "0.6054157", "0.6037827", "0.5997769", "0.599754", "0.5996841", "0.5992512", "0.5976147", "0.59709966", "0.59673667", "0.59672415", "0.5964139", "0.5962159" ]
0.84309566
1
Method test the endpoint for adding bucketlist item
def test_add_bucketlist_items(self): email = "test@test.com" _pword = "test" user = User.query.filter_by(email=email).first() bucketlist = BucketList.query.filter_by(user_id=user.id, name="test bucketlist").first() item_no = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id).count() response = self.add_bucketlist_item(email, _pword, bucketlist.id, "bucketlist item name") result = json.loads(response.data.decode('utf-8')) self.assertEqual(response.status, '201 CREATED') self.assertEqual(result['message'], 'Bucket list item added') new_item_no = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id).count() self.assertLess(item_no, new_item_no)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_create_bucketlist_item(self):\n resp = self.client.post('/bucketlists',\n data=json.dumps(self.bucketlist),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n response = self.client.get(\n \"/bucketlists/1\", headers={\n \"Authorization\": self.token})\n self.assertEqual(response.status_code, 200)\n resp_item = self.client.post('/bucketlistitems/1/items',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n result = json.loads(resp_item.data)\n self.assertEqual(result[\"message\"],\n \"Bucket list item added successfully.\")\n self.assertEqual(resp.status_code, 201)", "def test_put_bucketlist_item(self):\r\n data = {\"name\": \"bucketlist item name\", \"completed\": \"true\"}\r\n email = \"test@test.com\"\r\n _pword = \"test\"\r\n user = User.query.filter_by(email=email).first()\r\n bucketlist = BucketList.query.filter_by(user_id=user.id, name=\"test bucketlist\").first()\r\n item = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id, id=1).first()\r\n self.assertNotEqual(item.name, \"bucketlist item name\")\r\n self.assertFalse(item.completed)\r\n\r\n response = self.put_bucketlist_item(email, _pword, bucketlist.id, 1, data)\r\n result = json.loads(response.data.decode('utf-8'))\r\n item2 = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id, id=1).first()\r\n self.assertEqual(response.status, '201 CREATED')\r\n self.assertEqual(item2.name, \"bucketlist item name\")\r\n self.assertTrue(item2.completed)", "def test_bucketlist_create(self):\n res = self.client().post('/bucketlist', data=self.bucketlist)\n self.assertEqual(res.status_code, 201)\n self.assertIn('Go to vacation', str(res.data))", "def test_bucketlist_creation(self):\n post_data = self.post_a_bucket()\n self.assertEqual(post_data.status_code, 201)\n self.assertIn('Climb the Himalayas', str(post_data.data))", "def test_get_bucketlist_items(self):\n resp = self.client.post('/bucketlists',\n data=json.dumps(self.bucketlist),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n\n resp_item = self.client.post('/bucketlistitems/1/items',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n\n self.assertEqual(resp_item.status_code, 200)\n resp_item = self.client.get('/bucketlistitems/1/items', headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp_item.status_code, 200)", "def add_bucketlist_item(self, email, password, buckelist_id, item_name):\r\n test_date = str(date(2020, 9, 22))\r\n headers = self.authentication_headers(email=email, password=password)\r\n return self.client.post(\r\n '/api/v1/bucketlist/{}/items/'.format(buckelist_id),\r\n data=json.dumps({\"name\": item_name, \"finished_by\": test_date}),\r\n content_type=\"application/json\",\r\n headers=headers,\r\n follow_redirects=True\r\n )", "def test_add_item_at_using_put(self):\n pass", "def test_creating_and_getting_a_bucketlist_for_authenticated_user(self):\n\n # test all bucketlists\n response = self.client.post(\n \"/bucketlists/\",\n data=dict(name='test_bucketlist'),\n headers={'Authorization': self.user_token}\n )\n bucketlist = json.loads(response.data)\n\n self.assertEqual(response.status_code, 200)\n self.assertEqual(bucketlist[\"name\"], 'test_bucketlist')\n\n # test single bucketlist\n self.bucketlist_id = bucketlist[\"bucketlist_id\"]\n single_bucketlist = self.client.get(\n \"/bucketlists/\" + str(self.bucketlist_id) + \"\",\n headers={'Authorization': self.user_token}\n )\n\n one_bucketlist = json.loads(single_bucketlist.data)\n\n self.assertEqual(single_bucketlist.status_code, 200)\n self.assertEqual(one_bucketlist[\"name\"], 'test_bucketlist')\n\n # test all items in bucketlist\n item = self.client.post(\n \"/bucketlists/\" + str(self.bucketlist_id) + \"/items/\",\n data=dict(name=\"test_item\"),\n headers={'Authorization': self.user_token}\n )\n\n one_item = json.loads(item.data)\n\n self.assertEqual(item.status_code, 200)\n self.assertEqual(one_item[\"name\"], 'test_item')\n\n # test single item in bucketlist\n self.item_id = one_item[\"item_id\"]\n single_item = self.client.get(\n \"/bucketlists/\" + str(self.bucketlist_id) + \"/items/\" + str(self.item_id) + \"\",\n headers={'Authorization': self.user_token}\n )\n\n created_item = json.loads(single_item.data)\n\n self.assertEqual(single_item.status_code, 200)\n self.assertEqual(created_item[\"name\"], 'test_item')\n\n # test for deletion of bucketlist\n second_bucketlist = self.client.post(\n \"/bucketlists/\",\n data=dict(name='second_bucketlist'),\n headers={'Authorization': self.user_token}\n )\n\n bucketlist_two = json.loads(second_bucketlist.data)\n\n self.assertEqual(second_bucketlist.status_code, 200)\n self.assertEqual(bucketlist_two[\"name\"], 'second_bucketlist')\n\n delete_response = self.client.delete(\n \"/bucketlists/\" + str(bucketlist_two[\"bucketlist_id\"]) + \"\",\n headers={'Authorization': self.user_token}\n )\n\n deletion = json.loads(delete_response.data)\n\n self.assertEqual(delete_response.status_code, 200)\n self.assertEqual(deletion[\"message\"], \"Deleted\")\n\n # test for deletion of an item in bucketlist\n delete_item = self.client.delete(\n \"/bucketlists/\" + str(bucketlist[\"bucketlist_id\"]) + \"/items/\" + str(one_item[\"item_id\"]) + \"\",\n headers={'Authorization': self.user_token}\n )\n\n item_deletion = json.loads(delete_item.data)\n\n self.assertEqual(delete_item.status_code, 200)\n self.assertEqual(item_deletion[\"message\"], \"Deleted\")\n\n # test for updating of bucketlist\n self.bucketlist_id = bucketlist[\"bucketlist_id\"]\n bucketlist_update = self.client.put(\n \"/bucketlists/\" + str(self.bucketlist_id) + \"\",\n data=dict(name='bucketlist_test'),\n headers={'Authorization': self.user_token}\n )\n\n updated_bucketlist = json.loads(bucketlist_update.data)\n\n self.assertEqual(bucketlist_update.status_code, 200)\n self.assertEqual(updated_bucketlist[\"name\"], 'bucketlist_test')\n\n # test update of item in bucketlist\n item = self.client.post(\n \"/bucketlists/\" + str(self.bucketlist_id) + \"/items/\",\n data=dict(name=\"test_item\"),\n headers={'Authorization': self.user_token}\n )\n\n one_item = json.loads(item.data)\n\n item_update = self.client.put(\n \"/bucketlists/\" + str(self.bucketlist_id) + \"/items/\"+ str(one_item[\"item_id\"]) + \"\",\n data=dict(name=\"item_test\"),\n headers={'Authorization': self.user_token}\n )\n\n updated_item = json.loads(item_update.data)\n\n self.assertEqual(item_update.status_code, 200)\n self.assertEqual(updated_item[\"name\"], 'item_test')", "def post_bucketlist():\n pass", "def test_edit_bucketlist(self):\n post_data = self.post_a_bucket()\n self.assertEqual(post_data.status_code, 201)\n result_of_put_method = self.client().put(\n '/bucketlists/1',\n headers=dict(Authorization='Bearer '\n + self.token()\n ),\n data={\n \"name\": \"The seasons will be, summer winter and autumn\"\n })\n self.assertEqual(result_of_put_method.status_code, 201)\n result_of_get_method = self.client().get('/bucketlists/1',\n headers=dict(Authorization='Bearer '\n + self.token())\n )\n self.assertIn('The seasons will b', str(result_of_get_method.data))", "def test_api_edit_bucketlist(self):\n res_post = self.client().post('/bucketlist', data={'name': 'Wake up, Eat, Code, Sleep & Repeat'})\n self.assertEqual(res_post.status_code, 201)\n res_post_in_json = json.loads(res_post.data.decode('UTF-8').replace(\"'\", \"\\\"\"))\n id = res_post_in_json['id']\n res_put = self.client().put(\n f'bucketlist/{id}',\n data={\n 'name': \"Don't forget to exercise\"\n }\n )\n self.assertEqual(res_put.status_code, 200)\n res = self.client().get(f'/bucketlist/{id}')\n self.assertIn(\"exercise\", str(res.data))", "def test_duplicate_bucketlist_item(self):\n resp = self.client.post('/bucketlists',\n data=json.dumps(self.bucketlist),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n resp_item = self.client.post('/bucketlistitems/1/items',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n resp_item2 = self.client.post('/bucketlistitems/1/items',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n result = json.loads(resp_item2.data)\n self.assertEqual(result[\"message\"], \"Item with the given name exists.\")\n self.assertEqual(resp_item2.status_code, 409)", "def test_create_bucket_list_return(self):\n bucket = BucketList(\"\", \"\")\n bucket = bucket.create_bucket_list(\"Name\", \"Completed\")\n self.assertIsInstance(bucket, BucketList)", "def test_create_bucket_list_name(self):\n bucket = BucketList(\"\", \"\")\n bucket = bucket.create_bucket_list(\"\")\n self.assertEqual(bucket, \"Please provide a name for your bucket list\", )", "def test_delete_bucketlist_item(self):\n self.register_user()\n result = self.login_user()\n access_token = json.loads(result.data.decode())['access_token']\n\n # create a bucketlist by making a POST request\n res = self.client().post(\n '/api/v1/bucketlists/',\n headers=dict(Authorization=\"Bearer \" + access_token),\n data=self.bucketlist)\n self.assertEqual(res.status_code, 201)\n # get the json with the bucketlist\n results = json.loads(res.data.decode())\n\n # create a bucketlist item by making a POST request and add it to the created bucketlist\n res = self.client().post(\n '/api/v1/bucketlists/{}/items/'.format(results['id']),\n headers=dict(Authorization=\"Bearer \" + access_token),\n data={\n \"name\": \"Eat fried crabs\"\n })\n self.assertEqual(res.status_code, 201)\n # get the json containing the created bucketlist item\n res_item = json.loads(res.data.decode())\n\n # delete the bucketlist item we just created\n res = self.client().delete(\n '/api/v1/bucketlists/{}/items/{}'.format(results['id'], res_item['id']),\n headers=dict(Authorization=\"Bearer \" + access_token), )\n self.assertEqual(res.status_code, 200)\n\n # Test to see if it exists, should return a 404\n result = self.client().get(\n '/api/v1/bucketlists/{}/items/1'.format(results['id']),\n headers=dict(Authorization=\"Bearer \" + access_token))\n self.assertEqual(result.status_code, 404)", "def test_fail_repeated_buckelist_item(self):\r\n user = User.query.filter_by(email=\"test@test.com\").first()\r\n bucketlist = BucketList.query.filter_by(user_id=user.id, name=\"test bucketlist\").first()\r\n item_no = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id).count()\r\n response = self.add_bucketlist_item(\"test@test.com\", \"test\", bucketlist.id, \"test item\")\r\n result = json.loads(response.data.decode('utf-8'))\r\n self.assertEqual(response.status, '409 CONFLICT')\r\n self.assertEqual(result['message'], 'Bucketlist Item Exists')\r\n new_item_no = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id).count()\r\n self.assertEqual(item_no, new_item_no)", "def test_get_bucketlist_item_id(self):\n resp = self.client.post('/bucketlists',\n data=json.dumps(self.bucketlist),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n resp_item = self.client.post('/bucketlistitems/1/items',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n get_item = self.client.get('/bucketlistitems/1/items/1', headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)", "def test_update_busketlistitem_by_id(self):\n resp = self.client.post('/bucketlists',\n data=json.dumps(self.bucketlist),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n resp_item = self.client.post('/bucketlistitems/1/items',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n update_item = self.client.put('/bucketlistitems/1/items/1',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs and museums too.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(update_item.status_code, 201)", "def test_bucket_is_updated(self):\n with self.client:\n # Get an auth token\n token = self.get_user_token()\n # Create a Bucket\n response = self.client.post(\n '/bucketlists',\n data=json.dumps(dict(name='Travel')),\n headers=dict(Authorization='Bearer ' + token),\n content_type='application/json'\n )\n # Test Bucket creation\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 201)\n self.assertTrue(data['status'], 'success')\n self.assertTrue(data['name'], 'Travel')\n # Update the bucket name\n res = self.client.put(\n '/bucketlists/1',\n headers=dict(Authorization='Bearer ' + token),\n data=json.dumps(dict(name='Adventure')),\n content_type='application/json'\n )\n data = json.loads(res.data.decode())\n self.assertEqual(res.status_code, 201)\n self.assertTrue(res.content_type == 'application/json')\n self.assertTrue(data['status'] == 'success')\n self.assertTrue(data['name'] == 'Adventure')\n self.assertEqual(data['id'], 1)", "def test_put_item_wrong_id(self):\r\n data = {\"name\": \"bucketlist item name\", \"completed\": \"true\"}\r\n email = \"test@test.com\"\r\n _pword = \"test\"\r\n user = User.query.filter_by(email=email).first()\r\n bucketlist = BucketList.query.filter_by(user_id=user.id, name=\"test bucketlist\").first()\r\n item = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id, id=0).first()\r\n self.assertFalse(item)\r\n\r\n response = self.put_bucketlist_item(email, _pword, bucketlist.id, 0, data)\r\n result = json.loads(response.data.decode('utf-8'))\r\n self.assertEqual(response.status, '404 NOT FOUND')\r\n self.assertEqual(\r\n result['message'],\r\n 'Bucketlist Item with ID {} not found in the database. You have requested this URI '\\\r\n '[/api/v1/bucketlist/1/items/0] but did you mean /api/v1/bucketlist/<int:bucketlist_id>/items/'\\\r\n ' or /api/v1/bucketlist/<int:bucketlist_id> or /api/v1/bucketlist ?'.format(0)\r\n )", "def test_create(self):\n responses.add(\n responses.Response(\n method='POST',\n url='https://connection.keboola.com/v2/storage/buckets',\n json=create_response\n )\n )\n name = 'my-new-bucket'\n description = 'Some Description'\n backend = 'snowflake'\n created_detail = self.buckets.create(name=name,\n description=description,\n backend=backend)\n assert created_detail['id'] == 'in.c-{}'.format(name)", "def post(self, user):\n # parse request data\n bucketlist_name = self.request.form['name']\n\n # validate bucketlist\n if not bucketlist_name:\n return \"Name cannot be empty\", 401\n\n # create bucketlist and save bucketlist\n bucketlist = Bucketlist(name=bucketlist_name, date_created=datetime.utcnow(\n ), created_by=user.username, author=user)\n bucketlist.save()\n\n return \"Successfully created bucketlist\", 201", "def test_delete_bucketlistitem_by_id(self):\n resp = self.client.post('/bucketlists',\n data=json.dumps(self.bucketlist),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n resp_item = self.client.post('/bucketlistitems/1/items',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n delete_item = self.client.delete('/bucketlistitems/1/items/1',\n headers={\n \"Authorization\": self.token\n })\n self.assertEqual(delete_item.status_code, 204)", "def test_add_item_using_post(self):\n pass", "def test_bucket_by_id_is_returned_on_get_request(self):\n with self.client:\n token = self.get_user_token()\n # Create a Bucket\n response = self.client.post(\n '/bucketlists',\n data=json.dumps(dict(name='Travel')),\n headers=dict(Authorization='Bearer ' + token),\n content_type='application/json'\n )\n # Test Bucket creation\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 201)\n self.assertTrue(data['status'], 'success')\n self.assertTrue(data['name'], 'Travel')\n response = self.client.get(\n '/bucketlists/1',\n headers=dict(Authorization='Bearer ' + token)\n )\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 200)\n self.assertTrue(data['status'] == 'success')\n self.assertTrue(data['bucket']['name'] == 'travel')\n self.assertIsInstance(data['bucket'], dict)\n self.assertTrue(response.content_type == 'application/json')", "def test_create_bucket(self):\n pass", "def put_bucketlist_item(self, email, password, bucketlist_id, item_id, data):\r\n headers = self.authentication_headers(email=email, password=password)\r\n return self.client.put(\r\n '/api/v1/bucketlist/{}/items/{}'.format(bucketlist_id, item_id),\r\n content_type=\"application/json\",\r\n data=json.dumps(data),\r\n headers=headers,\r\n follow_redirects=True\r\n )", "def test_create_bucketlist_view_returns_200_status_code(self):\n response = self.app.get('/create-bucketlist')\n self.assertEqual(response.status_code, 200, \n \"should return a status code of 200\")", "def test_single_bucketlist_item_delete_with_empty_token(self):\n self.register_user()\n result = self.login_user()\n access_token = json.loads(result.data.decode())['access_token']\n\n res = self.client().post(\n '/api/v1/bucketlists/',\n headers=dict(Authorization=\"Bearer \" + access_token),\n data=self.bucketlist)\n\n # assert that the bucketlist is created\n self.assertEqual(res.status_code, 201)\n # get the response data in json format\n results = json.loads(res.data.decode())\n\n # create a bucketlist item by making a POST request and add it to the created bucketlist\n res = self.client().post(\n '/api/v1/bucketlists/{}/items/'.format(results['id']),\n headers=dict(Authorization=\"Bearer \" + access_token),\n data={\n \"name\": \"Eat fried crabs\"\n })\n self.assertEqual(res.status_code, 201)\n # get the json containing the created bucketlist item\n res_item = json.loads(res.data.decode())\n\n # delete the bucketlist item we just created\n res = self.client().delete(\n '/api/v1/bucketlists/{}/items/{}'.format(results['id'], res_item['id']),\n headers=dict(Authorization=\"\"), )\n self.assertEqual(res.status_code, 401)\n self.assertIn('Token not provided in the header with key Authorization.', str(res.data))", "def test_get_bucketlist_items(self):\r\n email = \"test@test.com\"\r\n _pword = \"test\"\r\n user = User.query.filter_by(email=email).first()\r\n bucketlist = BucketList.query.filter_by(user_id=user.id, id=1).first()\r\n items_no = len(bucketlist.bucketlist_items)\r\n headers = self.authentication_headers(email=email, password=_pword)\r\n response = self.client.get(\r\n '/api/v1/bucketlist/1/items/',\r\n content_type=\"application/json\",\r\n headers=headers,\r\n follow_redirects=True\r\n )\r\n result = json.loads(response.data.decode('utf-8'))\r\n self.assertEqual(len(result), items_no)", "def update_bucketlist():\n pass", "def test_single_bucketlist_item_delete_with_no_auth_header(self):\n self.register_user()\n result = self.login_user()\n access_token = json.loads(result.data.decode())['access_token']\n\n res = self.client().post(\n '/api/v1/bucketlists/',\n headers=dict(Authorization=\"Bearer \" + access_token),\n data={'name': 'Visit the Grand Canyon!'})\n self.assertEqual(res.status_code, 201)\n # get the bucketlist in json\n results = json.loads(res.data.decode())\n\n # create a bucketlist item by making a POST request and add it to the created bucketlist\n res = self.client().post(\n '/api/v1/bucketlists/{}/items/'.format(results['id']),\n headers=dict(Authorization=\"Bearer \" + access_token),\n data={\n \"name\": \"Eat fried crabs\"\n })\n self.assertEqual(res.status_code, 201)\n # get the json containing the created bucketlist item\n res_item = json.loads(res.data.decode())\n\n # delete the bucketlist item we just created\n res = self.client().delete(\n '/api/v1/bucketlists/{}/items/{}'.format(results['id'], res_item['id']),\n headers=dict(), )\n self.assertEqual(res.status_code, 401)\n self.assertIn('Header with key Authorization missing.', str(res.data))", "def test_delete_bucketlist_item(self):\r\n email = \"test@test.com\"\r\n _pword = \"test\"\r\n user = User.query.filter_by(email=email).first()\r\n bucketlist = BucketList.query.filter_by(user_id=user.id, name=\"test bucketlist\").first()\r\n item = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id, id=1).first()\r\n self.assertTrue(item)\r\n\r\n response = self.delete_bucketlist_item(email, _pword, bucketlist.id, item.id)\r\n result = json.loads(response.data.decode('utf-8'))\r\n self.assertEqual(response.status, '200 OK')\r\n self.assertEqual(\r\n result['message'],\r\n 'Bucketlist Item with ID {} deleted'.format(item.id)\r\n )\r\n item = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id, id=1).first()\r\n self.assertFalse(item)", "def test_api_delete_bucketlist(self):\n\n res_post = self.client().post('/bucketlist', data={'name': \"Don't forget to exercise\"})\n self.assertEqual(res_post.status_code, 201)\n res_post_in_json = json.loads(res_post.data.decode('UTF-8'))\n id = res_post_in_json['id']\n res_delete = self.client().delete(f\"/bucketlist/{id}\")\n self.assertEqual(res_delete.status_code, 200)\n\n # should return 404 after delete the data\n res = self.client().get(f'/bucketlist/{id}')\n self.assertEqual(res.status_code, 404)", "def test_read_bucket(self):\n post_data = self.post_a_bucket()\n self.assertEqual(post_data.status_code, 201)\n result_of_get_method = self.client().get('/bucketlists/',\n headers=dict(Authorization='Bearer '\n + self.token())\n )\n self.assertEqual(result_of_get_method.status_code, 200)\n self.assertIn('Climb the Himalayas', str(result_of_get_method.data))", "def test_api_get_all_bucketlists(self):\n res = self.client().post('/bucketlist', data=self.bucketlist)\n self.assertEqual(res.status_code, 201)\n res = self.client().get('/bucketlist')\n self.assertEqual(res.status_code, 200)\n self.assertIn('Go to vacation', str(res.data))", "def manipulate_bucketlist():\n pass", "def test_api_get_bucketlist_by_id(self):\n res_post = self.client().post('/bucketlist', data=self.bucketlist)\n self.assertEqual(res_post.status_code, 201)\n res_in_json = json.loads(res_post.data.decode('UTF-8').replace(\"'\", \"\\\"\"))\n res = self.client().get(f\"/bucketlist/{res_in_json['id']}\")\n self.assertEqual(res.status_code, 200)\n self.assertIn('Go to vacation', str(res.data))", "def add_bucket_list_item(self, id, collection, item):\n if type(id) is not ObjectId:\n id = ObjectId(id)\n obj = getattr(self.db, collection)\n result = obj.update(\n {'_id': id},\n {'$addToSet': {'bucket_list': item}}\n )\n return result", "def test_app_can_add_list(self):\n add_list=self.client.post('/addshoppinglists/?user='+self.user['user'], \n data=self.shopllist, \n headers={\n 'Content-Type':'application/json',\n 'x-access-token':self.tok})\n self.assertEqual(add_list.status_code,200)", "def test_delete_bucketlist(self):\n post_data = self.post_a_bucket()\n self.assertEqual(post_data.status_code, 201)\n result_of_delete_method = self.client().delete('/bucketlists/1',\n headers=dict(Authorization='Bearer '\n + self.token())\n )\n self.assertEqual(result_of_delete_method.status_code, 200)\n response_after_removal = self.client().get('/bucketlists/1',\n headers=dict(Authorization='Bearer '\n + self.token())\n )\n self.assertEqual(response_after_removal.status_code, 400)", "def test_name_attribute_is_set_in_bucket_creation_request(self):\n with self.client:\n response = self.client.post(\n '/bucketlists',\n headers=dict(Authorization='Bearer ' + self.get_user_token()),\n data=json.dumps({}),\n content_type='application/json'\n )\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 400)\n self.assertTrue(data['status'], 'failed')\n self.assertTrue(data['message'], 'Missing name attribute')", "def create_item(_id, item_name, description):\n data_ = Data.get_the_data(_id, Data.bucketlists)\n for data in data_:\n bucketlist = Bucketlist(data['title'],\n data['owner'],\n data['intro'],\n data['owner_id'],\n data['_id'])\n bucketlist.new_item(item_name=item_name,\n description=description)", "def test_creating_a_bucket(self):\n with self.client:\n self.create_bucket(self.get_user_token())", "def add_bucket(bucket_name):\n pass", "def test_single_bucketlist_item_delete_with_invalid_token(self):\n self.register_user()\n result = self.login_user()\n access_token = json.loads(result.data.decode())['access_token']\n\n res = self.client().post(\n '/api/v1/bucketlists/',\n headers=dict(Authorization=\"Bearer \" + access_token),\n data=self.bucketlist)\n\n # assert that the bucketlist is created\n self.assertEqual(res.status_code, 201)\n # get the response data in json format\n results = json.loads(res.data.decode())\n\n # create a bucketlist item by making a POST request and add it to the created bucketlist\n res = self.client().post(\n '/api/v1/bucketlists/{}/items/'.format(results['id']),\n headers=dict(Authorization=\"Bearer \" + access_token),\n data={\n \"name\": \"Eat fried crabs\"\n })\n self.assertEqual(res.status_code, 201)\n # get the json containing the created bucketlist item\n res_item = json.loads(res.data.decode())\n\n # delete the bucketlist item we just created\n res = self.client().delete(\n '/api/v1/bucketlists/{}/items/{}'.format(results['id'], res_item['id']),\n headers=dict(Authorization=access_token), )\n self.assertEqual(res.status_code, 401)\n self.assertIn('Invalid token format.', str(res.data))", "def test_get_request_on_bucketlist_resource(self):\n\n response = self.client.get(\"/bucketlists/\")\n self.assertEqual(response.status_code, 401)", "def test_list(self):\n responses.add(\n responses.Response(\n method='GET',\n url='https://connection.keboola.com/v2/storage/buckets',\n json=list_response\n )\n )\n buckets_list = self.buckets.list()\n assert isinstance(buckets_list, list)", "def test_id_of_bucket_to_be_edited_does_not_exist(self):\n with self.client:\n # Get an auth token\n token = self.get_user_token()\n # Update the bucket name\n res = self.client.put(\n '/bucketlists/1',\n headers=dict(Authorization='Bearer ' + token),\n data=json.dumps(dict(name='Adventure')),\n content_type='application/json'\n )\n data = json.loads(res.data.decode())\n self.assertEqual(res.status_code, 404)\n self.assertTrue(res.content_type == 'application/json')\n self.assertTrue(data['status'] == 'failed')\n self.assertTrue(data['message'] == 'The Bucket with Id 1 does not exist')", "def test_update_bucket(self):\n pass", "def test_user_can_get_list_of_buckets(self):\n with self.client:\n response = self.client.get(\n '/bucketlists/',\n headers=dict(Authorization='Bearer ' + self.get_user_token())\n )\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 200)\n self.assertTrue(data['status'] == 'success')\n self.assertIsInstance(data['buckets'], list)\n self.assertEqual(len(data['buckets']), 0)\n self.assertEqual(data['count'], 0)\n self.assertIsInstance(data['count'], int)\n self.assertEqual(data['previous'], None)\n self.assertEqual(data['next'], None)", "def test_list_bucket(self):\n\n if self.bos.does_bucket_exist(\"aaaaaaxzr1\"):\n self.bos.delete_bucket(\"aaaaaaxzr1\")\n if self.bos.does_bucket_exist(\"aaaaaaxzr2\"):\n self.bos.delete_bucket(\"aaaaaaxzr2\")\n\n time1 = utils.get_canonical_time()\n self.bos.create_bucket(\"aaaaaaxzr1\")\n\n time2 = utils.get_canonical_time()\n self.bos.create_bucket(\"aaaaaaxzr2\")\n\n response = self.bos.list_buckets()\n self.check_headers(response)\n\n self.assertEqual(response.owner.id, bos_test_config.OWNER_ID)\n self.assertEqual(response.owner.display_name, bos_test_config.DISPLAY_NAME)\n for bucket in response.buckets:\n if bucket.name == \"aaaaaaxzr1\":\n self.assertEqual(\n compat.convert_to_bytes(bucket.creation_date)[0:19], \n compat.convert_to_bytes(time1)[0:19])\n elif bucket.name == \"aaaaaaxzr2\":\n self.assertEqual(\n compat.convert_to_bytes(bucket.creation_date)[0:19], \n compat.convert_to_bytes(time2)[0:19])\n self.bos.delete_bucket(\"aaaaaaxzr1\")\n self.bos.delete_bucket(\"aaaaaaxzr2\")", "def test_bucket_post_content_type_is_json(self):\n with self.client:\n response = self.client.post(\n '/bucketlists',\n headers=dict(Authorization='Bearer ' + self.get_user_token()),\n data=json.dumps(dict(name='Travel'))\n )\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 202)\n self.assertTrue(data['status'], 'failed')\n self.assertTrue(data['message'], 'Content-type must be json')", "def test_id_of_bucket_to_be_edited_is_invalid(self):\n with self.client:\n # Get an auth token\n token = self.get_user_token()\n # Update the bucket name\n res = self.client.put(\n '/bucketlists/bucketid',\n headers=dict(Authorization='Bearer ' + token),\n data=json.dumps(dict(name='Adventure')),\n content_type='application/json'\n )\n data = json.loads(res.data.decode())\n self.assertEqual(res.status_code, 400)\n self.assertTrue(res.content_type == 'application/json')\n self.assertTrue(data['status'] == 'failed')\n self.assertTrue(data['message'] == 'Please provide a valid Bucket Id')", "def test_get_bucket(self):\n pass", "def test_patch_bucket(self):\n pass", "def test_delete_item_with_non_existing_bucket(self):\n self.client.post('/buckets',\n content_type='application/json',\n data=self.bucket, headers=self.header)\n self.client.post('/buckets/1/items',\n content_type='application/json',\n data=self.item, headers=self.header)\n response = self.client.delete('/buckets/2/items/1'\n , headers=self.header)\n self.assertEquals(response.status_code, 400)\n self.assertIn('Attempting to delete item on non existing bucket',\n response.data.decode())", "def create_bucketlist(self, title, intro):\n bucketlist_ = Bucketlist(owner_id=self._id,\n title=title,\n intro=intro,\n owner=self.username)\n bucketlist_.save_to_bucketlists()", "def test_add_url(self):\n url = 'http://test.com/'\n info = self.api.add_url(url, tags=['asd'])\n self.assertEqual(info['value'], url)\n tags = [t['name'] for t in info['tags']]\n self.assertEqual(tags, ['asd'])", "def test_vault_create_new_vault_item(self):\n pass", "def test_putorganizations_item(self):\n pass", "def test_app_can_update_a_list(self):\n self.ne=json.dumps({\"newName\":\"pants\"})\n list_update=self.client.put('/shoppinglists/trou',\n data=self.ne,\n headers={\n 'Content-Type':'application/json',\n 'x-access-token':self.tok})\n self.assertIn(\"list doesnt exist\",str(list_update.data)) \n self.assertEqual(list_update.status_code,200)", "def add(self, item):", "def test_no_bucket_returned_by_given_id(self):\n with self.client:\n token = self.get_user_token()\n\n response = self.client.get(\n '/bucketlists/1',\n headers=dict(Authorization='Bearer ' + token)\n )\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 200)\n self.assertTrue(data['status'] == 'success')\n self.assertIsInstance(data['bucket'], list)\n self.assertTrue(response.content_type == 'application/json')", "def test_bucket_is_deleted(self):\n with self.client:\n # Get an auth token\n token = self.get_user_token()\n response = self.client.post(\n '/bucketlists',\n data=json.dumps(dict(name='Travel')),\n headers=dict(Authorization='Bearer ' + token),\n content_type='application/json'\n )\n # Test Bucket creation\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 201)\n self.assertTrue(data['status'], 'success')\n self.assertTrue(data['name'], 'Travel')\n # Delete the created Bucket\n res = self.client.delete(\n '/bucketlists/1',\n headers=dict(Authorization='Bearer ' + token)\n )\n data = json.loads(res.data.decode())\n self.assertEqual(res.status_code, 200)\n self.assertTrue(data['status'] == 'success')\n self.assertTrue(data['message'] == 'Bucket Deleted successfully')\n self.assertTrue(res.content_type == 'application/json')", "def test_buckets(self):\n objectstore.bucket.Bucket.create('new_bucket', self.context)\n bucket = objectstore.bucket.Bucket('new_bucket')\n\n # creator is authorized to use bucket\n self.assert_(bucket.is_authorized(self.context))\n\n # another user is not authorized\n context2 = context.RequestContext('user2', 'proj2')\n self.assertFalse(bucket.is_authorized(context2))\n\n # admin is authorized to use bucket\n admin_context = context.RequestContext('admin_user', None)\n self.assertTrue(bucket.is_authorized(admin_context))\n\n # new buckets are empty\n self.assertTrue(bucket.list_keys()['Contents'] == [])\n\n # storing keys works\n bucket['foo'] = \"bar\"\n\n self.assertEquals(len(bucket.list_keys()['Contents']), 1)\n\n self.assertEquals(bucket['foo'].read(), 'bar')\n\n # md5 of key works\n self.assertEquals(bucket['foo'].md5, hashlib.md5('bar').hexdigest())\n\n # deleting non-empty bucket should throw a NotEmpty exception\n self.assertRaises(NotEmpty, bucket.delete)\n\n # deleting key\n del bucket['foo']\n\n # deleting empty bucket\n bucket.delete()\n\n # accessing deleted bucket throws exception\n self.assertRaises(NotFound, objectstore.bucket.Bucket, 'new_bucket')", "def test_modify_item_successfully(self):\n self.client.post('/buckets',\n content_type='application/json',\n data=self.bucket, headers=self.header)\n self.client.post('/buckets/1/items',\n content_type='application/json',\n data=self.item,\n headers=self.header)\n response = self.client.put('/buckets/1/items/1',\n content_type='application/json',\n data=self.item_edit,\n headers=self.header)\n self.assertEquals(response.status_code, 200)\n self.assertIn('Item successfully updated',\n response.data.decode())", "def AddItem(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def test_add_item_to_cart(client):\n raise NotImplemented('Acceptance test failed')", "def test_add_stock_item(self):\n pass", "def test_delete_item_wrong_id(self):\r\n email = \"test@test.com\"\r\n _pword = \"test\"\r\n user = User.query.filter_by(email=email).first()\r\n bucketlist = BucketList.query.filter_by(user_id=user.id, name=\"test bucketlist\").first()\r\n item = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id, id=0).first()\r\n self.assertFalse(item)\r\n\r\n response = self.delete_bucketlist_item(email, _pword, bucketlist.id, 0)\r\n result = json.loads(response.data.decode('utf-8'))\r\n self.assertEqual(response.status, '404 NOT FOUND')\r\n self.assertEqual(\r\n result['message'],\r\n 'Bucketlist Item with ID {} not found in the database. You have requested this URI '\\\r\n '[/api/v1/bucketlist/1/items/0] but did you mean /api/v1/bucketlist/<int:bucketlist_id>/items/'\\\r\n ' or /api/v1/bucketlist/<int:bucketlist_id> or /api/v1/bucketlist ?'.format(0)\r\n )", "def test_buckets_returned_when_searched(self):\n with self.client:\n token = self.get_user_token()\n self.create_buckets(token)\n response = self.client.get(\n '/bucketlists/?q=T',\n headers=dict(Authorization='Bearer ' + token)\n )\n data = json.loads(response.data.decode())\n self.assertTrue(data['status'] == 'success')\n self.assertIsInstance(data['buckets'], list, 'Items must be a list')\n self.assertEqual(len(data['buckets']), 3)\n self.assertEqual(data['buckets'][0]['id'], 1)\n self.assertEqual(data['count'], 6)\n self.assertEqual(data['next'], 'http://localhost/bucketlists/?page=2')\n self.assertEqual(data['previous'], None)\n self.assertEqual(response.status_code, 200)", "def taco_test_put_new(self):\n body = '{ \"id\": 400, \"name\": \"item_new\", \"content\": \"after test update\" }'\n env = self.get_env('PUT', '/item/4', body=body)\n webapi_start(env, lambda status, response_headers: self.assertEqual(status, '204'))", "def addItem(*args):", "def addItem(*args):", "def addItem(*args):", "def put(self, user, id):\n # parse request data\n if 'name' not in self.request.form:\n return \"Bucketlist not Update\", 202\n\n bucketlist_name = self.request.form['name']\n\n # validate bucketlist\n if not bucketlist_name:\n return \"Name cannot be empty\", 401\n\n # search for the bucketlist_id\n bucketlist = Bucketlist.query.filter_by(\n id=id, created_by=user.email).first()\n\n # return 400 if bucketlist non exixtant or not belongs to this user\n if bucketlist is None:\n return 'Bucketlist not found', 202\n\n # Update bucketlist and save changes\n bucketlist.name = bucketlist_name\n bucketlist.save()\n\n return \"Successfully updated bucketlist\", 201", "def test_api_get_bucketlist_by_id_not_exist(self):\n res = self.client().get(f\"/bucketlist/99\")\n self.assertEqual(res.status_code, 404)", "def add_items_handler():\n rq = request.get_json()\n name = rq['name']\n picture = rq['picture']\n description = rq['description']\n category_id = rq['category_id']\n item = addItem(name, picture, description, category_id, g.user.id)\n return jsonify(item=item.serialize)", "def test_create_item(self):\n\n url = reverse('stock-item-create')\n\n response = self.client.get(url, {'part': 1}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')\n self.assertEqual(response.status_code, 200)\n\n response = self.client.get(url, {'part': 999}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')\n self.assertEqual(response.status_code, 200)\n\n # Copy from a valid item, valid location\n response = self.client.get(url, {'location': 1, 'copy': 1}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')\n self.assertEqual(response.status_code, 200)\n\n # Copy from an invalid item, invalid location\n response = self.client.get(url, {'location': 999, 'copy': 9999}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')\n self.assertEqual(response.status_code, 200)", "def test_get_buckets(self):\n pass", "def test_add(self):\n r = main.List.connection()\n main.List.add(r, \"ToDo\", 1, \"Buy apples\", 2, \"20.05.2015\")\n task = r.get(\"ToDo\")\n self.assertTrue(task, \"No such entry in DB. Adding failed.\")", "def bfAdd(self, key, item):\n params = [key, item]\n \n return self.execute_command(self.BF_ADD, *params)", "def test_add_with_existing_key(self):\n self.client.login(username='admin', password='admin')\n response = self.client.post('/add/', {'url': 'http://example.com', 'key': 'example'})\n # TODO status 201\n self.client.login(user='admin', password='admin')\n response = self.client.post('/add/', {'url': 'http://example.com', 'key': 'example'})\n # TODO status 409", "def test_add(self):\n self.client.login(username='admin', password='admin')\n response = self.client.post('/add/', {'url': 'http://example.com'}, follow=True)\n self.assertShortURLCreated(response)", "def create_bucket(request: Dict) -> Dict:\n global config\n\n body = {\n \"user_name\": request.get(\"user_name\"),\n \"prefix\": request.get(\"bucket_name\")[0:5],\n \"bucket_name\": request.get(\"bucket_name\"),\n \"region\": request.get(\"region\")\n }\n\n response = requests.post(url=config.api_url('bucket'),\n data=json.dumps(body),\n headers={'content-type': 'application/json'})\n\n if response.status_code == HTTPStatus.OK:\n return response.json()", "def test_shoppingitems_creation(self):\n # register and login a user\n self.app.post('/register', data=self.user_reg_details)\n self.app.post('/login', data=self.user_login_details)\n # create a shopping list\n self.shopping_class_obj.create_list(\n 'Easter', 'maina@gmail.com')\n # make a post request to add an item\n res = self.app.post(\n '/shoppingitems/Easter', data={'item-name': 'Bread'})\n self.assertEqual(res.status_code, 200)\n response = self.item_class_obj.add_item(\n 'Easter', 'Bread', 'maina@gmail.com')\n self.assertIsInstance(response, list)\n # check if item was successfully created\n self.assertIn(\"Bread\", str(res.data))", "def test_buckets_returned_when_searched_2(self):\n with self.client:\n token = self.get_user_token()\n self.create_buckets(token)\n response = self.client.get(\n '/bucketlists/?q=T&page=2',\n headers=dict(Authorization='Bearer ' + token)\n )\n data = json.loads(response.data.decode())\n self.assertTrue(data['status'] == 'success')\n self.assertIsInstance(data['buckets'], list, 'Items must be a list')\n self.assertEqual(len(data['buckets']), 3)\n self.assertEqual(data['buckets'][0]['id'], 4)\n self.assertEqual(data['count'], 6)\n self.assertEqual(data['next'], None)\n self.assertEqual(data['previous'], 'http://localhost/bucketlists/?page=1')\n self.assertEqual(response.status_code, 200)", "def test_content_type_for_editing_bucket_is_json(self):\n with self.client:\n token = self.get_user_token()\n res = self.client.put(\n '/bucketlists/1',\n headers=dict(Authorization='Bearer ' + token),\n data=json.dumps(dict(name='Adventure'))\n )\n data = json.loads(res.data.decode())\n self.assertEqual(res.status_code, 202)\n self.assertTrue(data['status'] == 'failed')\n self.assertTrue(data['message'] == 'Content-type must be json')", "def test_delete_item_successfully(self):\n self.client.post('/buckets',\n content_type='application/json',\n data=self.bucket, headers=self.header)\n self.client.post('/buckets/1/items',\n content_type='application/json',\n data=self.item, headers=self.header)\n response = self.client.delete('/buckets/1/items/1',\n headers=self.header)\n self.assertEquals(response.status_code, 200)\n self.assertIn('Item successfully deleted', response.data.decode())", "def test_shelflistitem_putpatch_requires_auth(api_settings,\n assemble_custom_shelflist,\n get_shelflist_urls, api_client):\n test_lcode, test_id = '1test', 99999999\n _, _, trecs = assemble_custom_shelflist(test_lcode, [(test_id, {})])\n url = '{}{}'.format(get_shelflist_urls(trecs)[test_lcode], test_id)\n before = api_client.get(url)\n put_resp = api_client.put(url, {})\n patch_resp = api_client.patch(url, {})\n after = api_client.get(url) \n assert put_resp.status_code == 403\n assert patch_resp.status_code == 403\n assert before.data == after.data", "def test_adding_item_to_list(create_shopping_item, create_shopping_list):\n shopping_list = create_shopping_list\n items_before = shopping_list.items.values_list().count()\n new_item = create_shopping_item\n shopping_list.items.add(new_item)\n items_after = shopping_list.items.values_list().count()\n assert items_after > items_before\n assert items_before == 0\n assert items_after == 1", "def test_wish_list(self):\n data = {\"name\": \"test list 1\"}\n response = self.client.post(\"/wish_list/\", data, format='json')\n self.assertEqual(response.status_code, 200)\n response = self.client.get(\"/wish_list/\")\n self.assertEqual(response.status_code, 200)\n # item = Item.objects.get(name=\"New Item\")\n # self.assertEqual(item.name(), \"New Item\")", "def test_create_drives_drive_add_item(self):\n pass", "def test_deletion_handles_no_bucket_found_by_id(self):\n with self.client:\n response = self.client.delete(\n '/bucketlists/1',\n headers=dict(Authorization='Bearer ' + self.get_user_token())\n )\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 404)\n self.assertTrue(data['status'] == 'failed')\n self.assertTrue(data['message'] == 'Bucket resource cannot be found')\n self.assertTrue(response.content_type == 'application/json')", "def test_update_item_using_post(self):\n pass", "def test_addEntryByList(self):\n self.g.entryFormat = ['term', 'tags', 'value']\n b = self.g.add_entry(['foo', 'a', '1'])\n self.assertTrue(b)", "def test_model_can_create_a_bucketlist(self):\n old_count = Job.objects.count()\n self.job.save()\n new_count = Job.objects.count()\n self.assertNotEqual(old_count, new_count)", "def test_unauthorized_add(self):\n response = self.client.post('/add/', {'url': 'http://example.com', 'key': 'example'})\n # TODO status 403", "def test_create_item_good(test_client, item):\n\n response = test_client.post(BASE_URL,\n data=json.dumps(item),\n content_type='application/json')\n\n data = json.loads(response.get_data())\n\n assert response.status_code == 201\n assert data['item']['name'] == item['name']\n assert data['item']['value'] == item['value']\n assert data['item']['id'] > 0" ]
[ "0.8470567", "0.8040441", "0.76671934", "0.7665731", "0.75571686", "0.7541412", "0.73769414", "0.7354037", "0.735012", "0.73247266", "0.73235244", "0.7313603", "0.7295238", "0.71595377", "0.7149308", "0.7145139", "0.71401936", "0.71109724", "0.7093405", "0.70680696", "0.6946335", "0.68785876", "0.687531", "0.6841962", "0.68415093", "0.6760094", "0.66894764", "0.66697323", "0.6665334", "0.6653366", "0.665111", "0.66420627", "0.6642026", "0.6613187", "0.6612258", "0.65891534", "0.6587562", "0.65813696", "0.6554041", "0.65115404", "0.64841986", "0.6473743", "0.64523935", "0.643752", "0.6423354", "0.6421168", "0.64125764", "0.64011", "0.6395915", "0.63578594", "0.63570005", "0.63526165", "0.63043636", "0.62425524", "0.62273556", "0.6220105", "0.621353", "0.6211389", "0.6145463", "0.612434", "0.6122311", "0.6115918", "0.61152214", "0.61010903", "0.60748416", "0.606897", "0.60687643", "0.60570246", "0.6055577", "0.60540944", "0.6029122", "0.602288", "0.60048205", "0.59946233", "0.59946233", "0.59946233", "0.5991657", "0.59743285", "0.5967504", "0.5963464", "0.5960404", "0.5951099", "0.59421945", "0.5935129", "0.5922737", "0.5920058", "0.59104663", "0.58824253", "0.588105", "0.5874163", "0.5863439", "0.5854639", "0.5843555", "0.5840236", "0.5824057", "0.58181643", "0.5810036", "0.58052206", "0.580004", "0.5776674" ]
0.84557176
1
Method tests that there can not be more than one bucketlist item added with the same name. We will use one of the already existing bucketlist names 'test item'
def test_fail_repeated_buckelist_item(self): user = User.query.filter_by(email="test@test.com").first() bucketlist = BucketList.query.filter_by(user_id=user.id, name="test bucketlist").first() item_no = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id).count() response = self.add_bucketlist_item("test@test.com", "test", bucketlist.id, "test item") result = json.loads(response.data.decode('utf-8')) self.assertEqual(response.status, '409 CONFLICT') self.assertEqual(result['message'], 'Bucketlist Item Exists') new_item_no = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id).count() self.assertEqual(item_no, new_item_no)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_duplicate_bucketlist_item(self):\n resp = self.client.post('/bucketlists',\n data=json.dumps(self.bucketlist),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n resp_item = self.client.post('/bucketlistitems/1/items',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n resp_item2 = self.client.post('/bucketlistitems/1/items',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n result = json.loads(resp_item2.data)\n self.assertEqual(result[\"message\"], \"Item with the given name exists.\")\n self.assertEqual(resp_item2.status_code, 409)", "def test_add_bucketlist_items(self):\r\n email = \"test@test.com\"\r\n _pword = \"test\"\r\n user = User.query.filter_by(email=email).first()\r\n bucketlist = BucketList.query.filter_by(user_id=user.id, name=\"test bucketlist\").first()\r\n item_no = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id).count()\r\n response = self.add_bucketlist_item(email, _pword, bucketlist.id, \"bucketlist item name\")\r\n result = json.loads(response.data.decode('utf-8'))\r\n self.assertEqual(response.status, '201 CREATED')\r\n self.assertEqual(result['message'], 'Bucket list item added')\r\n new_item_no = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id).count()\r\n self.assertLess(item_no, new_item_no)", "def test_create_bucketlist_item(self):\n resp = self.client.post('/bucketlists',\n data=json.dumps(self.bucketlist),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n response = self.client.get(\n \"/bucketlists/1\", headers={\n \"Authorization\": self.token})\n self.assertEqual(response.status_code, 200)\n resp_item = self.client.post('/bucketlistitems/1/items',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n result = json.loads(resp_item.data)\n self.assertEqual(result[\"message\"],\n \"Bucket list item added successfully.\")\n self.assertEqual(resp.status_code, 201)", "def test_put_bucketlist_item(self):\r\n data = {\"name\": \"bucketlist item name\", \"completed\": \"true\"}\r\n email = \"test@test.com\"\r\n _pword = \"test\"\r\n user = User.query.filter_by(email=email).first()\r\n bucketlist = BucketList.query.filter_by(user_id=user.id, name=\"test bucketlist\").first()\r\n item = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id, id=1).first()\r\n self.assertNotEqual(item.name, \"bucketlist item name\")\r\n self.assertFalse(item.completed)\r\n\r\n response = self.put_bucketlist_item(email, _pword, bucketlist.id, 1, data)\r\n result = json.loads(response.data.decode('utf-8'))\r\n item2 = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id, id=1).first()\r\n self.assertEqual(response.status, '201 CREATED')\r\n self.assertEqual(item2.name, \"bucketlist item name\")\r\n self.assertTrue(item2.completed)", "def test_get_bucketlist_items(self):\n resp = self.client.post('/bucketlists',\n data=json.dumps(self.bucketlist),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n\n resp_item = self.client.post('/bucketlistitems/1/items',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n\n self.assertEqual(resp_item.status_code, 200)\n resp_item = self.client.get('/bucketlistitems/1/items', headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp_item.status_code, 200)", "def test_get_bucketlist_item_id(self):\n resp = self.client.post('/bucketlists',\n data=json.dumps(self.bucketlist),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n resp_item = self.client.post('/bucketlistitems/1/items',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n get_item = self.client.get('/bucketlistitems/1/items/1', headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)", "def test_put_item_wrong_id(self):\r\n data = {\"name\": \"bucketlist item name\", \"completed\": \"true\"}\r\n email = \"test@test.com\"\r\n _pword = \"test\"\r\n user = User.query.filter_by(email=email).first()\r\n bucketlist = BucketList.query.filter_by(user_id=user.id, name=\"test bucketlist\").first()\r\n item = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id, id=0).first()\r\n self.assertFalse(item)\r\n\r\n response = self.put_bucketlist_item(email, _pword, bucketlist.id, 0, data)\r\n result = json.loads(response.data.decode('utf-8'))\r\n self.assertEqual(response.status, '404 NOT FOUND')\r\n self.assertEqual(\r\n result['message'],\r\n 'Bucketlist Item with ID {} not found in the database. You have requested this URI '\\\r\n '[/api/v1/bucketlist/1/items/0] but did you mean /api/v1/bucketlist/<int:bucketlist_id>/items/'\\\r\n ' or /api/v1/bucketlist/<int:bucketlist_id> or /api/v1/bucketlist ?'.format(0)\r\n )", "def test_create_bucket_list_name(self):\n bucket = BucketList(\"\", \"\")\n bucket = bucket.create_bucket_list(\"\")\n self.assertEqual(bucket, \"Please provide a name for your bucket list\", )", "def test_creating_and_getting_a_bucketlist_for_authenticated_user(self):\n\n # test all bucketlists\n response = self.client.post(\n \"/bucketlists/\",\n data=dict(name='test_bucketlist'),\n headers={'Authorization': self.user_token}\n )\n bucketlist = json.loads(response.data)\n\n self.assertEqual(response.status_code, 200)\n self.assertEqual(bucketlist[\"name\"], 'test_bucketlist')\n\n # test single bucketlist\n self.bucketlist_id = bucketlist[\"bucketlist_id\"]\n single_bucketlist = self.client.get(\n \"/bucketlists/\" + str(self.bucketlist_id) + \"\",\n headers={'Authorization': self.user_token}\n )\n\n one_bucketlist = json.loads(single_bucketlist.data)\n\n self.assertEqual(single_bucketlist.status_code, 200)\n self.assertEqual(one_bucketlist[\"name\"], 'test_bucketlist')\n\n # test all items in bucketlist\n item = self.client.post(\n \"/bucketlists/\" + str(self.bucketlist_id) + \"/items/\",\n data=dict(name=\"test_item\"),\n headers={'Authorization': self.user_token}\n )\n\n one_item = json.loads(item.data)\n\n self.assertEqual(item.status_code, 200)\n self.assertEqual(one_item[\"name\"], 'test_item')\n\n # test single item in bucketlist\n self.item_id = one_item[\"item_id\"]\n single_item = self.client.get(\n \"/bucketlists/\" + str(self.bucketlist_id) + \"/items/\" + str(self.item_id) + \"\",\n headers={'Authorization': self.user_token}\n )\n\n created_item = json.loads(single_item.data)\n\n self.assertEqual(single_item.status_code, 200)\n self.assertEqual(created_item[\"name\"], 'test_item')\n\n # test for deletion of bucketlist\n second_bucketlist = self.client.post(\n \"/bucketlists/\",\n data=dict(name='second_bucketlist'),\n headers={'Authorization': self.user_token}\n )\n\n bucketlist_two = json.loads(second_bucketlist.data)\n\n self.assertEqual(second_bucketlist.status_code, 200)\n self.assertEqual(bucketlist_two[\"name\"], 'second_bucketlist')\n\n delete_response = self.client.delete(\n \"/bucketlists/\" + str(bucketlist_two[\"bucketlist_id\"]) + \"\",\n headers={'Authorization': self.user_token}\n )\n\n deletion = json.loads(delete_response.data)\n\n self.assertEqual(delete_response.status_code, 200)\n self.assertEqual(deletion[\"message\"], \"Deleted\")\n\n # test for deletion of an item in bucketlist\n delete_item = self.client.delete(\n \"/bucketlists/\" + str(bucketlist[\"bucketlist_id\"]) + \"/items/\" + str(one_item[\"item_id\"]) + \"\",\n headers={'Authorization': self.user_token}\n )\n\n item_deletion = json.loads(delete_item.data)\n\n self.assertEqual(delete_item.status_code, 200)\n self.assertEqual(item_deletion[\"message\"], \"Deleted\")\n\n # test for updating of bucketlist\n self.bucketlist_id = bucketlist[\"bucketlist_id\"]\n bucketlist_update = self.client.put(\n \"/bucketlists/\" + str(self.bucketlist_id) + \"\",\n data=dict(name='bucketlist_test'),\n headers={'Authorization': self.user_token}\n )\n\n updated_bucketlist = json.loads(bucketlist_update.data)\n\n self.assertEqual(bucketlist_update.status_code, 200)\n self.assertEqual(updated_bucketlist[\"name\"], 'bucketlist_test')\n\n # test update of item in bucketlist\n item = self.client.post(\n \"/bucketlists/\" + str(self.bucketlist_id) + \"/items/\",\n data=dict(name=\"test_item\"),\n headers={'Authorization': self.user_token}\n )\n\n one_item = json.loads(item.data)\n\n item_update = self.client.put(\n \"/bucketlists/\" + str(self.bucketlist_id) + \"/items/\"+ str(one_item[\"item_id\"]) + \"\",\n data=dict(name=\"item_test\"),\n headers={'Authorization': self.user_token}\n )\n\n updated_item = json.loads(item_update.data)\n\n self.assertEqual(item_update.status_code, 200)\n self.assertEqual(updated_item[\"name\"], 'item_test')", "def test_update_busketlistitem_by_id(self):\n resp = self.client.post('/bucketlists',\n data=json.dumps(self.bucketlist),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n resp_item = self.client.post('/bucketlistitems/1/items',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n update_item = self.client.put('/bucketlistitems/1/items/1',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs and museums too.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(update_item.status_code, 201)", "def add_bucketlist_item(self, email, password, buckelist_id, item_name):\r\n test_date = str(date(2020, 9, 22))\r\n headers = self.authentication_headers(email=email, password=password)\r\n return self.client.post(\r\n '/api/v1/bucketlist/{}/items/'.format(buckelist_id),\r\n data=json.dumps({\"name\": item_name, \"finished_by\": test_date}),\r\n content_type=\"application/json\",\r\n headers=headers,\r\n follow_redirects=True\r\n )", "def test_delete_bucketlist_item(self):\n self.register_user()\n result = self.login_user()\n access_token = json.loads(result.data.decode())['access_token']\n\n # create a bucketlist by making a POST request\n res = self.client().post(\n '/api/v1/bucketlists/',\n headers=dict(Authorization=\"Bearer \" + access_token),\n data=self.bucketlist)\n self.assertEqual(res.status_code, 201)\n # get the json with the bucketlist\n results = json.loads(res.data.decode())\n\n # create a bucketlist item by making a POST request and add it to the created bucketlist\n res = self.client().post(\n '/api/v1/bucketlists/{}/items/'.format(results['id']),\n headers=dict(Authorization=\"Bearer \" + access_token),\n data={\n \"name\": \"Eat fried crabs\"\n })\n self.assertEqual(res.status_code, 201)\n # get the json containing the created bucketlist item\n res_item = json.loads(res.data.decode())\n\n # delete the bucketlist item we just created\n res = self.client().delete(\n '/api/v1/bucketlists/{}/items/{}'.format(results['id'], res_item['id']),\n headers=dict(Authorization=\"Bearer \" + access_token), )\n self.assertEqual(res.status_code, 200)\n\n # Test to see if it exists, should return a 404\n result = self.client().get(\n '/api/v1/bucketlists/{}/items/1'.format(results['id']),\n headers=dict(Authorization=\"Bearer \" + access_token))\n self.assertEqual(result.status_code, 404)", "def test_delete_bucketlistitem_by_id(self):\n resp = self.client.post('/bucketlists',\n data=json.dumps(self.bucketlist),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n resp_item = self.client.post('/bucketlistitems/1/items',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n delete_item = self.client.delete('/bucketlistitems/1/items/1',\n headers={\n \"Authorization\": self.token\n })\n self.assertEqual(delete_item.status_code, 204)", "def test_bucket_is_updated(self):\n with self.client:\n # Get an auth token\n token = self.get_user_token()\n # Create a Bucket\n response = self.client.post(\n '/bucketlists',\n data=json.dumps(dict(name='Travel')),\n headers=dict(Authorization='Bearer ' + token),\n content_type='application/json'\n )\n # Test Bucket creation\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 201)\n self.assertTrue(data['status'], 'success')\n self.assertTrue(data['name'], 'Travel')\n # Update the bucket name\n res = self.client.put(\n '/bucketlists/1',\n headers=dict(Authorization='Bearer ' + token),\n data=json.dumps(dict(name='Adventure')),\n content_type='application/json'\n )\n data = json.loads(res.data.decode())\n self.assertEqual(res.status_code, 201)\n self.assertTrue(res.content_type == 'application/json')\n self.assertTrue(data['status'] == 'success')\n self.assertTrue(data['name'] == 'Adventure')\n self.assertEqual(data['id'], 1)", "def test_id_of_bucket_to_be_edited_does_not_exist(self):\n with self.client:\n # Get an auth token\n token = self.get_user_token()\n # Update the bucket name\n res = self.client.put(\n '/bucketlists/1',\n headers=dict(Authorization='Bearer ' + token),\n data=json.dumps(dict(name='Adventure')),\n content_type='application/json'\n )\n data = json.loads(res.data.decode())\n self.assertEqual(res.status_code, 404)\n self.assertTrue(res.content_type == 'application/json')\n self.assertTrue(data['status'] == 'failed')\n self.assertTrue(data['message'] == 'The Bucket with Id 1 does not exist')", "def test_bucketlist_creation(self):\n post_data = self.post_a_bucket()\n self.assertEqual(post_data.status_code, 201)\n self.assertIn('Climb the Himalayas', str(post_data.data))", "def test_single_bucketlist_item_delete_with_empty_token(self):\n self.register_user()\n result = self.login_user()\n access_token = json.loads(result.data.decode())['access_token']\n\n res = self.client().post(\n '/api/v1/bucketlists/',\n headers=dict(Authorization=\"Bearer \" + access_token),\n data=self.bucketlist)\n\n # assert that the bucketlist is created\n self.assertEqual(res.status_code, 201)\n # get the response data in json format\n results = json.loads(res.data.decode())\n\n # create a bucketlist item by making a POST request and add it to the created bucketlist\n res = self.client().post(\n '/api/v1/bucketlists/{}/items/'.format(results['id']),\n headers=dict(Authorization=\"Bearer \" + access_token),\n data={\n \"name\": \"Eat fried crabs\"\n })\n self.assertEqual(res.status_code, 201)\n # get the json containing the created bucketlist item\n res_item = json.loads(res.data.decode())\n\n # delete the bucketlist item we just created\n res = self.client().delete(\n '/api/v1/bucketlists/{}/items/{}'.format(results['id'], res_item['id']),\n headers=dict(Authorization=\"\"), )\n self.assertEqual(res.status_code, 401)\n self.assertIn('Token not provided in the header with key Authorization.', str(res.data))", "def test_delete_item_with_non_existing_bucket(self):\n self.client.post('/buckets',\n content_type='application/json',\n data=self.bucket, headers=self.header)\n self.client.post('/buckets/1/items',\n content_type='application/json',\n data=self.item, headers=self.header)\n response = self.client.delete('/buckets/2/items/1'\n , headers=self.header)\n self.assertEquals(response.status_code, 400)\n self.assertIn('Attempting to delete item on non existing bucket',\n response.data.decode())", "def test_edit_bucketlist(self):\n post_data = self.post_a_bucket()\n self.assertEqual(post_data.status_code, 201)\n result_of_put_method = self.client().put(\n '/bucketlists/1',\n headers=dict(Authorization='Bearer '\n + self.token()\n ),\n data={\n \"name\": \"The seasons will be, summer winter and autumn\"\n })\n self.assertEqual(result_of_put_method.status_code, 201)\n result_of_get_method = self.client().get('/bucketlists/1',\n headers=dict(Authorization='Bearer '\n + self.token())\n )\n self.assertIn('The seasons will b', str(result_of_get_method.data))", "def test_name_attribute_is_set_in_bucket_creation_request(self):\n with self.client:\n response = self.client.post(\n '/bucketlists',\n headers=dict(Authorization='Bearer ' + self.get_user_token()),\n data=json.dumps({}),\n content_type='application/json'\n )\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 400)\n self.assertTrue(data['status'], 'failed')\n self.assertTrue(data['message'], 'Missing name attribute')", "def manipulate_bucketlist():\n pass", "def test_single_bucketlist_item_delete_with_invalid_token(self):\n self.register_user()\n result = self.login_user()\n access_token = json.loads(result.data.decode())['access_token']\n\n res = self.client().post(\n '/api/v1/bucketlists/',\n headers=dict(Authorization=\"Bearer \" + access_token),\n data=self.bucketlist)\n\n # assert that the bucketlist is created\n self.assertEqual(res.status_code, 201)\n # get the response data in json format\n results = json.loads(res.data.decode())\n\n # create a bucketlist item by making a POST request and add it to the created bucketlist\n res = self.client().post(\n '/api/v1/bucketlists/{}/items/'.format(results['id']),\n headers=dict(Authorization=\"Bearer \" + access_token),\n data={\n \"name\": \"Eat fried crabs\"\n })\n self.assertEqual(res.status_code, 201)\n # get the json containing the created bucketlist item\n res_item = json.loads(res.data.decode())\n\n # delete the bucketlist item we just created\n res = self.client().delete(\n '/api/v1/bucketlists/{}/items/{}'.format(results['id'], res_item['id']),\n headers=dict(Authorization=access_token), )\n self.assertEqual(res.status_code, 401)\n self.assertIn('Invalid token format.', str(res.data))", "def test_api_edit_bucketlist(self):\n res_post = self.client().post('/bucketlist', data={'name': 'Wake up, Eat, Code, Sleep & Repeat'})\n self.assertEqual(res_post.status_code, 201)\n res_post_in_json = json.loads(res_post.data.decode('UTF-8').replace(\"'\", \"\\\"\"))\n id = res_post_in_json['id']\n res_put = self.client().put(\n f'bucketlist/{id}',\n data={\n 'name': \"Don't forget to exercise\"\n }\n )\n self.assertEqual(res_put.status_code, 200)\n res = self.client().get(f'/bucketlist/{id}')\n self.assertIn(\"exercise\", str(res.data))", "def test_single_bucketlist_item_delete_with_no_auth_header(self):\n self.register_user()\n result = self.login_user()\n access_token = json.loads(result.data.decode())['access_token']\n\n res = self.client().post(\n '/api/v1/bucketlists/',\n headers=dict(Authorization=\"Bearer \" + access_token),\n data={'name': 'Visit the Grand Canyon!'})\n self.assertEqual(res.status_code, 201)\n # get the bucketlist in json\n results = json.loads(res.data.decode())\n\n # create a bucketlist item by making a POST request and add it to the created bucketlist\n res = self.client().post(\n '/api/v1/bucketlists/{}/items/'.format(results['id']),\n headers=dict(Authorization=\"Bearer \" + access_token),\n data={\n \"name\": \"Eat fried crabs\"\n })\n self.assertEqual(res.status_code, 201)\n # get the json containing the created bucketlist item\n res_item = json.loads(res.data.decode())\n\n # delete the bucketlist item we just created\n res = self.client().delete(\n '/api/v1/bucketlists/{}/items/{}'.format(results['id'], res_item['id']),\n headers=dict(), )\n self.assertEqual(res.status_code, 401)\n self.assertIn('Header with key Authorization missing.', str(res.data))", "def test_bucketlist_create(self):\n res = self.client().post('/bucketlist', data=self.bucketlist)\n self.assertEqual(res.status_code, 201)\n self.assertIn('Go to vacation', str(res.data))", "def test_delete_bucketlist_item(self):\r\n email = \"test@test.com\"\r\n _pword = \"test\"\r\n user = User.query.filter_by(email=email).first()\r\n bucketlist = BucketList.query.filter_by(user_id=user.id, name=\"test bucketlist\").first()\r\n item = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id, id=1).first()\r\n self.assertTrue(item)\r\n\r\n response = self.delete_bucketlist_item(email, _pword, bucketlist.id, item.id)\r\n result = json.loads(response.data.decode('utf-8'))\r\n self.assertEqual(response.status, '200 OK')\r\n self.assertEqual(\r\n result['message'],\r\n 'Bucketlist Item with ID {} deleted'.format(item.id)\r\n )\r\n item = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id, id=1).first()\r\n self.assertFalse(item)", "def test_list_bucket(self):\n\n if self.bos.does_bucket_exist(\"aaaaaaxzr1\"):\n self.bos.delete_bucket(\"aaaaaaxzr1\")\n if self.bos.does_bucket_exist(\"aaaaaaxzr2\"):\n self.bos.delete_bucket(\"aaaaaaxzr2\")\n\n time1 = utils.get_canonical_time()\n self.bos.create_bucket(\"aaaaaaxzr1\")\n\n time2 = utils.get_canonical_time()\n self.bos.create_bucket(\"aaaaaaxzr2\")\n\n response = self.bos.list_buckets()\n self.check_headers(response)\n\n self.assertEqual(response.owner.id, bos_test_config.OWNER_ID)\n self.assertEqual(response.owner.display_name, bos_test_config.DISPLAY_NAME)\n for bucket in response.buckets:\n if bucket.name == \"aaaaaaxzr1\":\n self.assertEqual(\n compat.convert_to_bytes(bucket.creation_date)[0:19], \n compat.convert_to_bytes(time1)[0:19])\n elif bucket.name == \"aaaaaaxzr2\":\n self.assertEqual(\n compat.convert_to_bytes(bucket.creation_date)[0:19], \n compat.convert_to_bytes(time2)[0:19])\n self.bos.delete_bucket(\"aaaaaaxzr1\")\n self.bos.delete_bucket(\"aaaaaaxzr2\")", "def test_id_of_bucket_to_be_edited_is_invalid(self):\n with self.client:\n # Get an auth token\n token = self.get_user_token()\n # Update the bucket name\n res = self.client.put(\n '/bucketlists/bucketid',\n headers=dict(Authorization='Bearer ' + token),\n data=json.dumps(dict(name='Adventure')),\n content_type='application/json'\n )\n data = json.loads(res.data.decode())\n self.assertEqual(res.status_code, 400)\n self.assertTrue(res.content_type == 'application/json')\n self.assertTrue(data['status'] == 'failed')\n self.assertTrue(data['message'] == 'Please provide a valid Bucket Id')", "def test_adding_item_to_list(create_shopping_item, create_shopping_list):\n shopping_list = create_shopping_list\n items_before = shopping_list.items.values_list().count()\n new_item = create_shopping_item\n shopping_list.items.add(new_item)\n items_after = shopping_list.items.values_list().count()\n assert items_after > items_before\n assert items_before == 0\n assert items_after == 1", "def test_add_item_at_using_put(self):\n pass", "def test_get_bucketlist_items(self):\r\n email = \"test@test.com\"\r\n _pword = \"test\"\r\n user = User.query.filter_by(email=email).first()\r\n bucketlist = BucketList.query.filter_by(user_id=user.id, id=1).first()\r\n items_no = len(bucketlist.bucketlist_items)\r\n headers = self.authentication_headers(email=email, password=_pword)\r\n response = self.client.get(\r\n '/api/v1/bucketlist/1/items/',\r\n content_type=\"application/json\",\r\n headers=headers,\r\n follow_redirects=True\r\n )\r\n result = json.loads(response.data.decode('utf-8'))\r\n self.assertEqual(len(result), items_no)", "def create_item(_id, item_name, description):\n data_ = Data.get_the_data(_id, Data.bucketlists)\n for data in data_:\n bucketlist = Bucketlist(data['title'],\n data['owner'],\n data['intro'],\n data['owner_id'],\n data['_id'])\n bucketlist.new_item(item_name=item_name,\n description=description)", "def test_detect_duplicate_upload_items(duplicate_items: List[JSONDict]) -> None:\n\n with pytest.raises(ValidationError) as e:\n invalid_collection = UploadCollection(items=duplicate_items) # noqa: F841\n\n assert e.value.errors() == [\n {\n \"loc\": (\"items\",),\n \"msg\": \"Duplicate item guids detected: ['http://www.crimsonhexagon.com/post1']\",\n \"type\": \"value_error\",\n }\n ]", "def test_delete_item_wrong_id(self):\r\n email = \"test@test.com\"\r\n _pword = \"test\"\r\n user = User.query.filter_by(email=email).first()\r\n bucketlist = BucketList.query.filter_by(user_id=user.id, name=\"test bucketlist\").first()\r\n item = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id, id=0).first()\r\n self.assertFalse(item)\r\n\r\n response = self.delete_bucketlist_item(email, _pword, bucketlist.id, 0)\r\n result = json.loads(response.data.decode('utf-8'))\r\n self.assertEqual(response.status, '404 NOT FOUND')\r\n self.assertEqual(\r\n result['message'],\r\n 'Bucketlist Item with ID {} not found in the database. You have requested this URI '\\\r\n '[/api/v1/bucketlist/1/items/0] but did you mean /api/v1/bucketlist/<int:bucketlist_id>/items/'\\\r\n ' or /api/v1/bucketlist/<int:bucketlist_id> or /api/v1/bucketlist ?'.format(0)\r\n )", "def test_bucket_by_id_is_returned_on_get_request(self):\n with self.client:\n token = self.get_user_token()\n # Create a Bucket\n response = self.client.post(\n '/bucketlists',\n data=json.dumps(dict(name='Travel')),\n headers=dict(Authorization='Bearer ' + token),\n content_type='application/json'\n )\n # Test Bucket creation\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 201)\n self.assertTrue(data['status'], 'success')\n self.assertTrue(data['name'], 'Travel')\n response = self.client.get(\n '/bucketlists/1',\n headers=dict(Authorization='Bearer ' + token)\n )\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 200)\n self.assertTrue(data['status'] == 'success')\n self.assertTrue(data['bucket']['name'] == 'travel')\n self.assertIsInstance(data['bucket'], dict)\n self.assertTrue(response.content_type == 'application/json')", "def test_create_bucket_list_return(self):\n bucket = BucketList(\"\", \"\")\n bucket = bucket.create_bucket_list(\"Name\", \"Completed\")\n self.assertIsInstance(bucket, BucketList)", "def update_bucketlist():\n pass", "def test_model_can_create_a_bucketlist(self):\n old_count = Job.objects.count()\n self.job.save()\n new_count = Job.objects.count()\n self.assertNotEqual(old_count, new_count)", "def test_unique_upload_items(upload_items: List[JSONDict]) -> None:\n validated = UploadCollection(items=upload_items)\n assert validated.dict() == upload_items", "def test_no_bucket_returned_by_given_id(self):\n with self.client:\n token = self.get_user_token()\n\n response = self.client.get(\n '/bucketlists/1',\n headers=dict(Authorization='Bearer ' + token)\n )\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 200)\n self.assertTrue(data['status'] == 'success')\n self.assertIsInstance(data['bucket'], list)\n self.assertTrue(response.content_type == 'application/json')", "def test_add_item_with_duplicate_value_on_unique_field_raises(\n test_store, andy, pandy, candy\n):\n\n person_with_duplicate_name = Person(name=\"Andy\", age=80)\n\n with pytest.raises(NotUniqueException):\n test_store.add(person_with_duplicate_name)\n\n items = list(test_store.get_by())\n assert len(items) == 3\n assert andy in items\n assert pandy in items\n assert candy in items", "def test_add_with_existing_item(self):\n settings.TEST_SETTING_LIST = ['item1']\n wrapper = SettingListWrapper('TEST_SETTING_LIST', 'test setting list')\n wrapper.add('item1')\n\n self.assertEqual(settings.TEST_SETTING_LIST, ['item1'])\n self.assertEqual(wrapper.ref_counts.get('item1'), 2)", "def test_buckets_returned_when_searched_2(self):\n with self.client:\n token = self.get_user_token()\n self.create_buckets(token)\n response = self.client.get(\n '/bucketlists/?q=T&page=2',\n headers=dict(Authorization='Bearer ' + token)\n )\n data = json.loads(response.data.decode())\n self.assertTrue(data['status'] == 'success')\n self.assertIsInstance(data['buckets'], list, 'Items must be a list')\n self.assertEqual(len(data['buckets']), 3)\n self.assertEqual(data['buckets'][0]['id'], 4)\n self.assertEqual(data['count'], 6)\n self.assertEqual(data['next'], None)\n self.assertEqual(data['previous'], 'http://localhost/bucketlists/?page=1')\n self.assertEqual(response.status_code, 200)", "def multiple_bucket(single_bucket): # pylint: disable=redefined-outer-name\n single_bucket.insert(\"key 2\", \"value 2\")\n return single_bucket", "def post_bucketlist():\n pass", "def test_duplicate_entries(self):", "def attemptBucketCreate(self, bucketName) -> str:\n for i in range(10):\n suffix = randint(1000, 9999)\n suffixedBucketName = '{}-{}'.format(bucketName, suffix)\n try:\n self.client.create_bucket(\n Bucket=suffixedBucketName,\n CreateBucketConfiguration={\n 'LocationConstraint': self.region\n }\n )\n except Exception as exception:\n if 'BucketAlreadyExists' in str(exception):\n self.logger.error('Seems like bucket name: {} is already taken'.format(bucketName))\n continue\n else:\n self.logger.error('Exception while creating bucket with name: {}'.format(bucketName))\n raise exception\n return suffixedBucketName\n\n raise AppException('Could not create bucket for account name: {}'.format(bucketName))", "def test_add_with_new_item(self):\n settings.TEST_SETTING_LIST = []\n wrapper = SettingListWrapper('TEST_SETTING_LIST', 'test setting list')\n wrapper.add('item1')\n\n self.assertEqual(settings.TEST_SETTING_LIST, ['item1'])\n self.assertEqual(wrapper.ref_counts.get('item1'), 1)", "def add_item(self, item_name):\n if not self.has_item(item_name):\n self.item_list.append(item_name)", "def test_create_item_duplicate_name(test_client, item_duplicate):\n\n response = test_client.post(BASE_URL,\n data=json.dumps(item_duplicate),\n content_type='application/json')\n\n data = json.loads(response.get_data())\n\n assert response.status_code == 400\n assert data['error'] == app.BAD_REQUEST", "def test_get_multiple(multiple_bucket): # pylint: disable=redefined-outer-name\n for idx in range(2):\n element_number = idx + 1\n assert multiple_bucket.get(f\"key {element_number}\") == f\"value {element_number}\"", "def test_delete_bucketlist(self):\n post_data = self.post_a_bucket()\n self.assertEqual(post_data.status_code, 201)\n result_of_delete_method = self.client().delete('/bucketlists/1',\n headers=dict(Authorization='Bearer '\n + self.token())\n )\n self.assertEqual(result_of_delete_method.status_code, 200)\n response_after_removal = self.client().get('/bucketlists/1',\n headers=dict(Authorization='Bearer '\n + self.token())\n )\n self.assertEqual(response_after_removal.status_code, 400)", "def test_update_bucket(self):\n pass", "def test_patch_bucket(self):\n pass", "def test_api_get_all_bucketlists(self):\n res = self.client().post('/bucketlist', data=self.bucketlist)\n self.assertEqual(res.status_code, 201)\n res = self.client().get('/bucketlist')\n self.assertEqual(res.status_code, 200)\n self.assertIn('Go to vacation', str(res.data))", "def add_bucket_list_item(self, id, collection, item):\n if type(id) is not ObjectId:\n id = ObjectId(id)\n obj = getattr(self.db, collection)\n result = obj.update(\n {'_id': id},\n {'$addToSet': {'bucket_list': item}}\n )\n return result", "def test_buckets_returned_when_searched(self):\n with self.client:\n token = self.get_user_token()\n self.create_buckets(token)\n response = self.client.get(\n '/bucketlists/?q=T',\n headers=dict(Authorization='Bearer ' + token)\n )\n data = json.loads(response.data.decode())\n self.assertTrue(data['status'] == 'success')\n self.assertIsInstance(data['buckets'], list, 'Items must be a list')\n self.assertEqual(len(data['buckets']), 3)\n self.assertEqual(data['buckets'][0]['id'], 1)\n self.assertEqual(data['count'], 6)\n self.assertEqual(data['next'], 'http://localhost/bucketlists/?page=2')\n self.assertEqual(data['previous'], None)\n self.assertEqual(response.status_code, 200)", "def test_read_bucket(self):\n post_data = self.post_a_bucket()\n self.assertEqual(post_data.status_code, 201)\n result_of_get_method = self.client().get('/bucketlists/',\n headers=dict(Authorization='Bearer '\n + self.token())\n )\n self.assertEqual(result_of_get_method.status_code, 200)\n self.assertIn('Climb the Himalayas', str(result_of_get_method.data))", "def test_unique_item_properties_failed(self):\n check_value = [{\"a\": 1, \"b\": 3}, {\"a\": 1, \"b\": 2}]\n\n with pytest.raises(AssertionError):\n unique_item_properties(check_value, \"a\")", "def test_create_bucket(self):\n pass", "def test_api_get_bucketlist_by_id(self):\n res_post = self.client().post('/bucketlist', data=self.bucketlist)\n self.assertEqual(res_post.status_code, 201)\n res_in_json = json.loads(res_post.data.decode('UTF-8').replace(\"'\", \"\\\"\"))\n res = self.client().get(f\"/bucketlist/{res_in_json['id']}\")\n self.assertEqual(res.status_code, 200)\n self.assertIn('Go to vacation', str(res.data))", "def single_bucket(empty_bucket): # pylint: disable=redefined-outer-name\n empty_bucket.insert(\"key 1\", \"value 1\")\n return empty_bucket", "def add_bucket(bucket_name):\n pass", "def test_cart_creation_duplicate_name(self):\n cart_name = 'cart name'\n self.cart_item_manager.create_cart('123', cart_name, False)\n self.cart_item_manager.create_cart('124', cart_name, False)\n with self.assertRaises(DuplicateItemError):\n self.cart_item_manager.create_cart('123', cart_name, False)", "def add_item(item):\n # Check first if the item already exists in the inventory\n for i in get_inventory():\n if i['name'] == item['name']:\n print(f\"[ERROR] item with name {i['name']} already exists\")\n break\n else:\n print(f'[INFO] Adding item {item}')\n INVENTORY.append(item)\n # mongo.collection().insert_one(item)", "def verify_unique_names(items):\n unique_names = set([item['name'] for item in items])\n if len(unique_names) != len(items):\n raise ClientException(\"Error: Duplicate sequence names found.\", ErrorType.INVALID_SEQUENCE_DATA)", "def test_add_value_list_singlevalue(self):\n input_item = self.item_class(name=[\"foo\", \"bar\"])\n il = ItemLoader(item=input_item)\n il.add_value(\"name\", \"qwerty\")\n loaded_item = il.load_item()\n self.assertIsInstance(loaded_item, self.item_class)\n self.assertEqual(\n ItemAdapter(loaded_item).asdict(), {\"name\": [\"foo\", \"bar\", \"qwerty\"]}\n )", "def test_user_can_get_list_of_buckets(self):\n with self.client:\n response = self.client.get(\n '/bucketlists/',\n headers=dict(Authorization='Bearer ' + self.get_user_token())\n )\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 200)\n self.assertTrue(data['status'] == 'success')\n self.assertIsInstance(data['buckets'], list)\n self.assertEqual(len(data['buckets']), 0)\n self.assertEqual(data['count'], 0)\n self.assertIsInstance(data['count'], int)\n self.assertEqual(data['previous'], None)\n self.assertEqual(data['next'], None)", "def test_api_delete_bucketlist(self):\n\n res_post = self.client().post('/bucketlist', data={'name': \"Don't forget to exercise\"})\n self.assertEqual(res_post.status_code, 201)\n res_post_in_json = json.loads(res_post.data.decode('UTF-8'))\n id = res_post_in_json['id']\n res_delete = self.client().delete(f\"/bucketlist/{id}\")\n self.assertEqual(res_delete.status_code, 200)\n\n # should return 404 after delete the data\n res = self.client().get(f'/bucketlist/{id}')\n self.assertEqual(res.status_code, 404)", "def test_delete_item_successfully(self):\n self.client.post('/buckets',\n content_type='application/json',\n data=self.bucket, headers=self.header)\n self.client.post('/buckets/1/items',\n content_type='application/json',\n data=self.item, headers=self.header)\n response = self.client.delete('/buckets/1/items/1',\n headers=self.header)\n self.assertEquals(response.status_code, 200)\n self.assertIn('Item successfully deleted', response.data.decode())", "def check_if_already_exists(list_name, title, description):\n\n for item in list_name:\n if item['title'] == title:\n return 'Sorry, This title has already been used in another question'\n if item['description'] == description:\n return 'Sorry, This description has already been used in another question'", "def update_element_name(self, items, new_name):\n if new_name != '':\n for i in items:\n if i.text() == new_name:\n #print(\"Name already exists\")\n msgBox = QMessageBox()\n msgBox.setIcon(QMessageBox.Information)\n msgBox.setText(\"Element with this name already exists.\")\n msgBox.setWindowTitle(\"QMessageBox Example\")\n msgBox.setStandardButtons(QMessageBox.Ok)\n msgBox.exec()\n return False\n return new_name\n else:\n if self.list_of_elements.count() == 0:\n new_name = self.element_name+\"_\"+str(0)\n return new_name\n\n for i in range(0, self.list_of_elements.count()+1):\n new_name = self.element_name+\"_\"+str(i)\n exists = self.list_of_elements.findItems(new_name,\n QtCore.Qt.MatchExactly)\n if len(exists) == 0:\n return new_name\n return False", "def test_deletion_handles_no_bucket_found_by_id(self):\n with self.client:\n response = self.client.delete(\n '/bucketlists/1',\n headers=dict(Authorization='Bearer ' + self.get_user_token())\n )\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 404)\n self.assertTrue(data['status'] == 'failed')\n self.assertTrue(data['message'] == 'Bucket resource cannot be found')\n self.assertTrue(response.content_type == 'application/json')", "def test_item_add_failure():\n sc.menu = sc.default_menu\n with pt.raises(Exception):\n sc.current.add_item('Spam Musubi', 1)", "def test_meal_name_already_exists(self):\n\n with self.client:\n self.add_meal(\"fries\", 10000)\n response = self.add_meal(\"fries\", 10000)\n data = json.loads(response.data.decode())\n self.assertEqual(data.get('message'), \"Meal name already exists\")\n self.assertEqual(response.status_code, 409)", "def test_insert(empty_bucket): # pylint: disable=redefined-outer-name\n for idx in range(2):\n element_number = idx + 1\n empty_bucket.insert(f\"key {element_number}\", f\"value {element_number}\")\n\n elements = list(iter(empty_bucket))\n for idx, (key, value) in enumerate(reversed(elements)):\n element_number = idx + 1\n assert key == f\"key {element_number}\"\n assert value == f\"value {element_number}\"", "def test_bucket_is_deleted(self):\n with self.client:\n # Get an auth token\n token = self.get_user_token()\n response = self.client.post(\n '/bucketlists',\n data=json.dumps(dict(name='Travel')),\n headers=dict(Authorization='Bearer ' + token),\n content_type='application/json'\n )\n # Test Bucket creation\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 201)\n self.assertTrue(data['status'], 'success')\n self.assertTrue(data['name'], 'Travel')\n # Delete the created Bucket\n res = self.client.delete(\n '/bucketlists/1',\n headers=dict(Authorization='Bearer ' + token)\n )\n data = json.loads(res.data.decode())\n self.assertEqual(res.status_code, 200)\n self.assertTrue(data['status'] == 'success')\n self.assertTrue(data['message'] == 'Bucket Deleted successfully')\n self.assertTrue(res.content_type == 'application/json')", "def test_detect_duplicate_train_items(duplicate_train_items: List[JSONDict]) -> None:\n\n with pytest.raises(ValidationError) as e:\n invalid_collection = TrainCollection(items=duplicate_train_items) # noqa: F841\n\n assert e.value.errors() == [\n {\n \"loc\": (\"items\",),\n \"msg\": \"Duplicate item urls detected: ['http://www.crimsonhexagon.com/post1']\",\n \"type\": \"value_error\",\n }\n ]", "def add_item(self, item):\r\n bag_res = consts.BAG_PUT_FAILED\r\n for i in range(len(self._items)):\r\n res = self.put_item_at(i, item, allow_switch=False)\r\n if res == consts.PUT_FORBIDDEN:\r\n return consts.BAG_PUT_FAILED\r\n if res == consts.PUT_SWITCH or \\\r\n res == consts.PUT_INTO_EMPTY or \\\r\n res == consts.PUT_MERGE_TOTALLY:\r\n return consts.BAG_PUT_TOTALLY\r\n if res == consts.PUT_MERGE_PARTIALLY:\r\n bag_res = consts.BAG_PUT_PARTIALLY\r\n continue\r\n if res == consts.PUT_MERGE_FAILED or \\\r\n res == consts.PUT_SWITCH_FORBIDDEN:\r\n continue\r\n return bag_res", "def test_api_get_bucketlist_by_id_not_exist(self):\n res = self.client().get(f\"/bucketlist/99\")\n self.assertEqual(res.status_code, 404)", "def test_iter(multiple_bucket): # pylint: disable=redefined-outer-name\n assert list(iter(multiple_bucket)) == [(\"key 2\", \"value 2\"), (\"key 1\", \"value 1\")]", "def test_get_single_different(single_bucket): # pylint: disable=redefined-outer-name\n with pytest.raises(KeyError):\n single_bucket.get(\"key 2\")", "def test_create_item_missing_name(test_client, item_without_name):\n\n response = test_client.post(BASE_URL,\n data=json.dumps(item_without_name),\n content_type='application/json')\n\n data = json.loads(response.get_data())\n\n assert response.status_code == 400\n assert data['error'] == app.BAD_REQUEST", "def take_item(self, item):\r\n if len(self.items) <= 2:\r\n self.items.append(item)\r\n if self.got_both():\r\n self.working = True", "def test_item_count(self):\n self.assertEqual(len(self.items), 2)", "def test_get_bucket(self):\n pass", "def test_buckets(self):\n objectstore.bucket.Bucket.create('new_bucket', self.context)\n bucket = objectstore.bucket.Bucket('new_bucket')\n\n # creator is authorized to use bucket\n self.assert_(bucket.is_authorized(self.context))\n\n # another user is not authorized\n context2 = context.RequestContext('user2', 'proj2')\n self.assertFalse(bucket.is_authorized(context2))\n\n # admin is authorized to use bucket\n admin_context = context.RequestContext('admin_user', None)\n self.assertTrue(bucket.is_authorized(admin_context))\n\n # new buckets are empty\n self.assertTrue(bucket.list_keys()['Contents'] == [])\n\n # storing keys works\n bucket['foo'] = \"bar\"\n\n self.assertEquals(len(bucket.list_keys()['Contents']), 1)\n\n self.assertEquals(bucket['foo'].read(), 'bar')\n\n # md5 of key works\n self.assertEquals(bucket['foo'].md5, hashlib.md5('bar').hexdigest())\n\n # deleting non-empty bucket should throw a NotEmpty exception\n self.assertRaises(NotEmpty, bucket.delete)\n\n # deleting key\n del bucket['foo']\n\n # deleting empty bucket\n bucket.delete()\n\n # accessing deleted bucket throws exception\n self.assertRaises(NotFound, objectstore.bucket.Bucket, 'new_bucket')", "def test_save_item_with_duplicate_value_on_unique_field_raises(\n test_store, andy, pandy, candy\n):\n\n person = next(test_store.get_by(name=\"Andy\"))\n person.name = \"Pandy\"\n\n with pytest.raises(NotUniqueException):\n test_store.save(person)\n\n items = list(test_store.get_by())\n assert len(items) == 3\n assert andy in items\n assert pandy in items\n assert candy in items", "def test_get_buckets(self):\n pass", "def test_app_can_update_a_list(self):\n self.ne=json.dumps({\"newName\":\"pants\"})\n list_update=self.client.put('/shoppinglists/trou',\n data=self.ne,\n headers={\n 'Content-Type':'application/json',\n 'x-access-token':self.tok})\n self.assertIn(\"list doesnt exist\",str(list_update.data)) \n self.assertEqual(list_update.status_code,200)", "def test_add_same_category(self):\n response = self.client.post('/api/v1/categories',\n data=json.dumps(category[0]),\n content_type='application/json',\n headers=self.admin_headers)\n self.assertEqual(response.status_code, 409)\n self.assertIn('category with name already exist',\n str(response.data))", "def item_duplicate():\n return {'name':'chair',\n 'value':300}", "def add_item(self):\n item = LibGen.create_item()\n if not self.item_exists(item.call_number):\n self.item_list[item.call_number] = item\n print(f\"Item({item.call_number}) bas been added.\")\n else:\n print(\"This item already exists.\")", "def test_update_cart_name_duplicate(self):\n user_id = '123'\n cart_id = self.cart_item_manager.create_cart(user_id, 'Cart1', False)\n self.cart_item_manager.create_cart(user_id, 'Cart2', False)\n with self.assertRaises(DuplicateItemError):\n self.cart_item_manager.update_cart(user_id, cart_id, {'CartName': 'Cart2'})", "def add_item(todo_list, todo_new_item):\n check = True\n try:\n todo_list.append(todo_new_item)\n except todo_list:\n print(\"Could not add new item to todo list\")\n check = False\n\n return check", "def _tattle_add_item(resource, item, resourcesalloweddict, resourcesuseddict):\n\n resourcesuseddict['fungible_locks'][resource].acquire()\n\n # always unlock as we exit...\n try: \n\n # It's already acquired. This is always allowed.\n if item in resourcesuseddict[resource]:\n return\n\n if len(resourcesuseddict[resource]) > resourcesalloweddict[resource]:\n raise InternalRepyError, \"Should not be able to exceed resource count\"\n\n if len(resourcesuseddict[resource]) == resourcesalloweddict[resource]:\n # it's clobberin time!\n raise ResourceExhaustedError(\"Resource '\"+resource+\"' limit exceeded!!\")\n\n # add the item to the list. We're done now...\n resourcesuseddict[resource].add(item)\n\n finally:\n resourcesuseddict['fungible_locks'][resource].release()", "def test_get_request_on_bucketlist_resource(self):\n\n response = self.client.get(\"/bucketlists/\")\n self.assertEqual(response.status_code, 401)", "def post(self, user):\n # parse request data\n bucketlist_name = self.request.form['name']\n\n # validate bucketlist\n if not bucketlist_name:\n return \"Name cannot be empty\", 401\n\n # create bucketlist and save bucketlist\n bucketlist = Bucketlist(name=bucketlist_name, date_created=datetime.utcnow(\n ), created_by=user.username, author=user)\n bucketlist.save()\n\n return \"Successfully created bucketlist\", 201", "def test_add_new_in_stock_negative(add):\n # here first parameter is for quantity and second for price while flower name is initialised already\n\n for i in [(0, 1.1), (\"we\", \"EW\"), (0, 0)]:\n add[1].add_new_in_stock(10, 4.5), i\n assert not STOCK[-1] == {'flower_name': \"Sunflower\", 'quantity': 10, \"price\": 4.5}\n STOCK.pop()", "def test_create(self):\n responses.add(\n responses.Response(\n method='POST',\n url='https://connection.keboola.com/v2/storage/buckets',\n json=create_response\n )\n )\n name = 'my-new-bucket'\n description = 'Some Description'\n backend = 'snowflake'\n created_detail = self.buckets.create(name=name,\n description=description,\n backend=backend)\n assert created_detail['id'] == 'in.c-{}'.format(name)" ]
[ "0.8174513", "0.74510795", "0.74059886", "0.7099884", "0.7079807", "0.6917014", "0.68960243", "0.68957806", "0.6796505", "0.67483217", "0.6740296", "0.66837454", "0.66416943", "0.66326", "0.6624022", "0.65148836", "0.65080565", "0.64689416", "0.64585537", "0.6440524", "0.64242446", "0.6410709", "0.63807267", "0.6350579", "0.6320388", "0.6309799", "0.6294033", "0.62715477", "0.6227774", "0.62054074", "0.61767995", "0.6158001", "0.61553144", "0.6113789", "0.60992706", "0.6095574", "0.6062735", "0.60627115", "0.60484093", "0.6009464", "0.5995456", "0.59455234", "0.59434927", "0.58792776", "0.5870491", "0.58491683", "0.58446276", "0.58382815", "0.58229005", "0.5806238", "0.57586783", "0.5756578", "0.57439965", "0.5742754", "0.57365817", "0.5724333", "0.5718418", "0.5697598", "0.5679822", "0.56761", "0.5661519", "0.56612736", "0.5658591", "0.5649386", "0.56398547", "0.56166965", "0.5605197", "0.56041765", "0.56019944", "0.5599176", "0.5588335", "0.55863565", "0.5575413", "0.55723953", "0.5569814", "0.5566499", "0.55658436", "0.5558165", "0.5551415", "0.5543399", "0.55417323", "0.5536783", "0.55245644", "0.55230093", "0.55139667", "0.55039835", "0.549067", "0.54875773", "0.5486558", "0.54772186", "0.54731864", "0.5472464", "0.54724383", "0.5471904", "0.54670733", "0.5465468", "0.5451936", "0.54484636", "0.5447029", "0.5423661" ]
0.80040896
1
Method tests the end point for updating a bucket list item using put
def test_put_bucketlist_item(self): data = {"name": "bucketlist item name", "completed": "true"} email = "test@test.com" _pword = "test" user = User.query.filter_by(email=email).first() bucketlist = BucketList.query.filter_by(user_id=user.id, name="test bucketlist").first() item = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id, id=1).first() self.assertNotEqual(item.name, "bucketlist item name") self.assertFalse(item.completed) response = self.put_bucketlist_item(email, _pword, bucketlist.id, 1, data) result = json.loads(response.data.decode('utf-8')) item2 = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id, id=1).first() self.assertEqual(response.status, '201 CREATED') self.assertEqual(item2.name, "bucketlist item name") self.assertTrue(item2.completed)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_edit_bucketlist(self):\n post_data = self.post_a_bucket()\n self.assertEqual(post_data.status_code, 201)\n result_of_put_method = self.client().put(\n '/bucketlists/1',\n headers=dict(Authorization='Bearer '\n + self.token()\n ),\n data={\n \"name\": \"The seasons will be, summer winter and autumn\"\n })\n self.assertEqual(result_of_put_method.status_code, 201)\n result_of_get_method = self.client().get('/bucketlists/1',\n headers=dict(Authorization='Bearer '\n + self.token())\n )\n self.assertIn('The seasons will b', str(result_of_get_method.data))", "def test_bucket_is_updated(self):\n with self.client:\n # Get an auth token\n token = self.get_user_token()\n # Create a Bucket\n response = self.client.post(\n '/bucketlists',\n data=json.dumps(dict(name='Travel')),\n headers=dict(Authorization='Bearer ' + token),\n content_type='application/json'\n )\n # Test Bucket creation\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 201)\n self.assertTrue(data['status'], 'success')\n self.assertTrue(data['name'], 'Travel')\n # Update the bucket name\n res = self.client.put(\n '/bucketlists/1',\n headers=dict(Authorization='Bearer ' + token),\n data=json.dumps(dict(name='Adventure')),\n content_type='application/json'\n )\n data = json.loads(res.data.decode())\n self.assertEqual(res.status_code, 201)\n self.assertTrue(res.content_type == 'application/json')\n self.assertTrue(data['status'] == 'success')\n self.assertTrue(data['name'] == 'Adventure')\n self.assertEqual(data['id'], 1)", "def test_api_edit_bucketlist(self):\n res_post = self.client().post('/bucketlist', data={'name': 'Wake up, Eat, Code, Sleep & Repeat'})\n self.assertEqual(res_post.status_code, 201)\n res_post_in_json = json.loads(res_post.data.decode('UTF-8').replace(\"'\", \"\\\"\"))\n id = res_post_in_json['id']\n res_put = self.client().put(\n f'bucketlist/{id}',\n data={\n 'name': \"Don't forget to exercise\"\n }\n )\n self.assertEqual(res_put.status_code, 200)\n res = self.client().get(f'/bucketlist/{id}')\n self.assertIn(\"exercise\", str(res.data))", "def test_modify_item_successfully(self):\n self.client.post('/buckets',\n content_type='application/json',\n data=self.bucket, headers=self.header)\n self.client.post('/buckets/1/items',\n content_type='application/json',\n data=self.item,\n headers=self.header)\n response = self.client.put('/buckets/1/items/1',\n content_type='application/json',\n data=self.item_edit,\n headers=self.header)\n self.assertEquals(response.status_code, 200)\n self.assertIn('Item successfully updated',\n response.data.decode())", "def test_update_busketlistitem_by_id(self):\n resp = self.client.post('/bucketlists',\n data=json.dumps(self.bucketlist),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n resp_item = self.client.post('/bucketlistitems/1/items',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n update_item = self.client.put('/bucketlistitems/1/items/1',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs and museums too.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(update_item.status_code, 201)", "def test_update_bucket(self):\n pass", "def test_put_item_wrong_id(self):\r\n data = {\"name\": \"bucketlist item name\", \"completed\": \"true\"}\r\n email = \"test@test.com\"\r\n _pword = \"test\"\r\n user = User.query.filter_by(email=email).first()\r\n bucketlist = BucketList.query.filter_by(user_id=user.id, name=\"test bucketlist\").first()\r\n item = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id, id=0).first()\r\n self.assertFalse(item)\r\n\r\n response = self.put_bucketlist_item(email, _pword, bucketlist.id, 0, data)\r\n result = json.loads(response.data.decode('utf-8'))\r\n self.assertEqual(response.status, '404 NOT FOUND')\r\n self.assertEqual(\r\n result['message'],\r\n 'Bucketlist Item with ID {} not found in the database. You have requested this URI '\\\r\n '[/api/v1/bucketlist/1/items/0] but did you mean /api/v1/bucketlist/<int:bucketlist_id>/items/'\\\r\n ' or /api/v1/bucketlist/<int:bucketlist_id> or /api/v1/bucketlist ?'.format(0)\r\n )", "def test_add_item_at_using_put(self):\n pass", "def taco_test_put_update(self):\n body = '{ \"id\": 400, \"name\": \"item4\", \"content\": \"after test update\" }'\n env = self.get_env('PUT', '/item/4', body=body)\n webapi_start(env, lambda status, response_headers: self.assertEqual(status, '204'))", "def test_add_bucketlist_items(self):\r\n email = \"test@test.com\"\r\n _pword = \"test\"\r\n user = User.query.filter_by(email=email).first()\r\n bucketlist = BucketList.query.filter_by(user_id=user.id, name=\"test bucketlist\").first()\r\n item_no = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id).count()\r\n response = self.add_bucketlist_item(email, _pword, bucketlist.id, \"bucketlist item name\")\r\n result = json.loads(response.data.decode('utf-8'))\r\n self.assertEqual(response.status, '201 CREATED')\r\n self.assertEqual(result['message'], 'Bucket list item added')\r\n new_item_no = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id).count()\r\n self.assertLess(item_no, new_item_no)", "def test_update_item_good(test_client, item):\n\n response = test_client.put(GOOD_ITEM_URL,\n data=json.dumps(item),\n content_type='application/json')\n\n data = json.loads(response.get_data())\n\n assert response.status_code == 200\n assert data['item']['name'] == item['name']\n assert data['item']['value'] == item['value']", "def put_bucketlist_item(self, email, password, bucketlist_id, item_id, data):\r\n headers = self.authentication_headers(email=email, password=password)\r\n return self.client.put(\r\n '/api/v1/bucketlist/{}/items/{}'.format(bucketlist_id, item_id),\r\n content_type=\"application/json\",\r\n data=json.dumps(data),\r\n headers=headers,\r\n follow_redirects=True\r\n )", "def test_update_item_using_post(self):\n pass", "def test_app_can_update_a_list(self):\n self.ne=json.dumps({\"newName\":\"pants\"})\n list_update=self.client.put('/shoppinglists/trou',\n data=self.ne,\n headers={\n 'Content-Type':'application/json',\n 'x-access-token':self.tok})\n self.assertIn(\"list doesnt exist\",str(list_update.data)) \n self.assertEqual(list_update.status_code,200)", "def test_id_of_bucket_to_be_edited_is_invalid(self):\n with self.client:\n # Get an auth token\n token = self.get_user_token()\n # Update the bucket name\n res = self.client.put(\n '/bucketlists/bucketid',\n headers=dict(Authorization='Bearer ' + token),\n data=json.dumps(dict(name='Adventure')),\n content_type='application/json'\n )\n data = json.loads(res.data.decode())\n self.assertEqual(res.status_code, 400)\n self.assertTrue(res.content_type == 'application/json')\n self.assertTrue(data['status'] == 'failed')\n self.assertTrue(data['message'] == 'Please provide a valid Bucket Id')", "def test_create_bucketlist_item(self):\n resp = self.client.post('/bucketlists',\n data=json.dumps(self.bucketlist),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n response = self.client.get(\n \"/bucketlists/1\", headers={\n \"Authorization\": self.token})\n self.assertEqual(response.status_code, 200)\n resp_item = self.client.post('/bucketlistitems/1/items',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n result = json.loads(resp_item.data)\n self.assertEqual(result[\"message\"],\n \"Bucket list item added successfully.\")\n self.assertEqual(resp.status_code, 201)", "def taco_test_put_new(self):\n body = '{ \"id\": 400, \"name\": \"item_new\", \"content\": \"after test update\" }'\n env = self.get_env('PUT', '/item/4', body=body)\n webapi_start(env, lambda status, response_headers: self.assertEqual(status, '204'))", "def test_id_of_bucket_to_be_edited_does_not_exist(self):\n with self.client:\n # Get an auth token\n token = self.get_user_token()\n # Update the bucket name\n res = self.client.put(\n '/bucketlists/1',\n headers=dict(Authorization='Bearer ' + token),\n data=json.dumps(dict(name='Adventure')),\n content_type='application/json'\n )\n data = json.loads(res.data.decode())\n self.assertEqual(res.status_code, 404)\n self.assertTrue(res.content_type == 'application/json')\n self.assertTrue(data['status'] == 'failed')\n self.assertTrue(data['message'] == 'The Bucket with Id 1 does not exist')", "def test_patch_bucket(self):\n pass", "def put(self, user, id):\n # parse request data\n if 'name' not in self.request.form:\n return \"Bucketlist not Update\", 202\n\n bucketlist_name = self.request.form['name']\n\n # validate bucketlist\n if not bucketlist_name:\n return \"Name cannot be empty\", 401\n\n # search for the bucketlist_id\n bucketlist = Bucketlist.query.filter_by(\n id=id, created_by=user.email).first()\n\n # return 400 if bucketlist non exixtant or not belongs to this user\n if bucketlist is None:\n return 'Bucketlist not found', 202\n\n # Update bucketlist and save changes\n bucketlist.name = bucketlist_name\n bucketlist.save()\n\n return \"Successfully updated bucketlist\", 201", "def test_shelflistitem_putpatch_requires_auth(api_settings,\n assemble_custom_shelflist,\n get_shelflist_urls, api_client):\n test_lcode, test_id = '1test', 99999999\n _, _, trecs = assemble_custom_shelflist(test_lcode, [(test_id, {})])\n url = '{}{}'.format(get_shelflist_urls(trecs)[test_lcode], test_id)\n before = api_client.get(url)\n put_resp = api_client.put(url, {})\n patch_resp = api_client.patch(url, {})\n after = api_client.get(url) \n assert put_resp.status_code == 403\n assert patch_resp.status_code == 403\n assert before.data == after.data", "def test_put(self):\n url, port = self.server.address\n\n #couple of basic POSTs\n r = self.client.get(\"http://{0}:{1}/\".format(url, port))\n self.assertEqual(200, r.status_code)\n r = self.client.get(\"http://{0}:{1}\".format(url, port))\n self.assertEqual(200, r.status_code)\n r = self.client.get(\"http://{0}:{1}/200\".format(url, port))\n self.assertEqual(200, r.status_code)\n r = self.client.get(\"http://{0}:{1}/400\".format(url, port))\n self.assertEqual(400, r.status_code)\n\n r = self.client.put(\"http://{0}:{1}/400?foo=bar\".format(url, port))\n self.assertEqual(400, r.status_code)", "def _put(self, *args, **kwargs):\n return self._request('put', *args, **kwargs)", "def test_putorganizations_item(self):\n pass", "def test_delete_bucketlist_item(self):\n self.register_user()\n result = self.login_user()\n access_token = json.loads(result.data.decode())['access_token']\n\n # create a bucketlist by making a POST request\n res = self.client().post(\n '/api/v1/bucketlists/',\n headers=dict(Authorization=\"Bearer \" + access_token),\n data=self.bucketlist)\n self.assertEqual(res.status_code, 201)\n # get the json with the bucketlist\n results = json.loads(res.data.decode())\n\n # create a bucketlist item by making a POST request and add it to the created bucketlist\n res = self.client().post(\n '/api/v1/bucketlists/{}/items/'.format(results['id']),\n headers=dict(Authorization=\"Bearer \" + access_token),\n data={\n \"name\": \"Eat fried crabs\"\n })\n self.assertEqual(res.status_code, 201)\n # get the json containing the created bucketlist item\n res_item = json.loads(res.data.decode())\n\n # delete the bucketlist item we just created\n res = self.client().delete(\n '/api/v1/bucketlists/{}/items/{}'.format(results['id'], res_item['id']),\n headers=dict(Authorization=\"Bearer \" + access_token), )\n self.assertEqual(res.status_code, 200)\n\n # Test to see if it exists, should return a 404\n result = self.client().get(\n '/api/v1/bucketlists/{}/items/1'.format(results['id']),\n headers=dict(Authorization=\"Bearer \" + access_token))\n self.assertEqual(result.status_code, 404)", "def update_bucketlist():\n pass", "def test_security_on_put(self):\n # test the update url\n product = Product.objects.all()[0]\n url = '/product/xml/%s/' % product.item_number\n response = self.client.put(url,{'description':'my new description'})\n self.failUnlessEqual(response.status_code, 401)", "def test_vault_update_vault_item(self):\n pass", "def put(handler, *args, **kwargs):\n bucket_id = args[0]\n user_id = args[1]\n content = tornado.escape.json_decode(handler.request.body)\n BucketClasss = type(\"Bucket_%s\" % (bucket_id), (Bucket,), {})\n bucket = yield BucketClasss.get(user_id)\n if not bucket:\n bucket = BucketClasss()\n bucket.pkey = user_id \n if bucket_id == \"userData\":\n save_ver = int(content.get(\"data\", {}).get(\"saveVer\",0))\n current_ver = int(bucket.data.get(\"data\", {}).get(\"saveVer\",0))\n if save_ver < current_ver:\n raise Return((405, None))\n bucket.data = content \n yield bucket.put()\n raise Return((204, None))", "def test_put_method(self):\n self.getPage('/blah', method='PUT')\n self.assertStatus('200 OK')\n self.assertHeader('Content-Type', 'application/json')\n self.assertBody('{\"mystring\": \"blah\"}')", "def _put(self, item: T) -> None:\n ...", "def test_kyc_put_request(self):\n pass", "def test_get_bucketlist_items(self):\n resp = self.client.post('/bucketlists',\n data=json.dumps(self.bucketlist),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n\n resp_item = self.client.post('/bucketlistitems/1/items',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n\n self.assertEqual(resp_item.status_code, 200)\n resp_item = self.client.get('/bucketlistitems/1/items', headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp_item.status_code, 200)", "def test_delete_bucketlistitem_by_id(self):\n resp = self.client.post('/bucketlists',\n data=json.dumps(self.bucketlist),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n resp_item = self.client.post('/bucketlistitems/1/items',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n delete_item = self.client.delete('/bucketlistitems/1/items/1',\n headers={\n \"Authorization\": self.token\n })\n self.assertEqual(delete_item.status_code, 204)", "def test_duplicate_bucketlist_item(self):\n resp = self.client.post('/bucketlists',\n data=json.dumps(self.bucketlist),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n resp_item = self.client.post('/bucketlistitems/1/items',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n resp_item2 = self.client.post('/bucketlistitems/1/items',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n result = json.loads(resp_item2.data)\n self.assertEqual(result[\"message\"], \"Item with the given name exists.\")\n self.assertEqual(resp_item2.status_code, 409)", "def test_put_one(self):\n response = self.client.put('/api/v1/parcels/100')\n self.assertEqual(response.status_code, 200)", "def test_put(self):\n client = RestClient(host=self.host, username='')\n rest_url = 'some/url/'\n \n # Mock good response\n with responses.RequestsMock() as rsps:\n rsps.add(responses.PUT, f'{self.host}/{rest_url}', status=200,\n json={'value':\"good!\"})\n r = client.put(rest_url)", "def test_put_success(self):\n test_data = {\n 'first_name': 'new_first_name',\n 'last_name': 'new_last_name'\n }\n response = self.client.put(self.url, json.dumps(test_data), content_type='application/json')\n self.assertEquals(response.status_code, 200)", "def test_delete_bucketlist_item(self):\r\n email = \"test@test.com\"\r\n _pword = \"test\"\r\n user = User.query.filter_by(email=email).first()\r\n bucketlist = BucketList.query.filter_by(user_id=user.id, name=\"test bucketlist\").first()\r\n item = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id, id=1).first()\r\n self.assertTrue(item)\r\n\r\n response = self.delete_bucketlist_item(email, _pword, bucketlist.id, item.id)\r\n result = json.loads(response.data.decode('utf-8'))\r\n self.assertEqual(response.status, '200 OK')\r\n self.assertEqual(\r\n result['message'],\r\n 'Bucketlist Item with ID {} deleted'.format(item.id)\r\n )\r\n item = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id, id=1).first()\r\n self.assertFalse(item)", "def test_fail_repeated_buckelist_item(self):\r\n user = User.query.filter_by(email=\"test@test.com\").first()\r\n bucketlist = BucketList.query.filter_by(user_id=user.id, name=\"test bucketlist\").first()\r\n item_no = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id).count()\r\n response = self.add_bucketlist_item(\"test@test.com\", \"test\", bucketlist.id, \"test item\")\r\n result = json.loads(response.data.decode('utf-8'))\r\n self.assertEqual(response.status, '409 CONFLICT')\r\n self.assertEqual(result['message'], 'Bucketlist Item Exists')\r\n new_item_no = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id).count()\r\n self.assertEqual(item_no, new_item_no)", "def test_get_bucketlist_item_id(self):\n resp = self.client.post('/bucketlists',\n data=json.dumps(self.bucketlist),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n resp_item = self.client.post('/bucketlistitems/1/items',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n get_item = self.client.get('/bucketlistitems/1/items/1', headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)", "def test_put(self):\n\n url = reverse('file')\n\n data = {\n 'shard_id': self.shard1.id,\n 'link_id': \"b8866161-0b1f-4a8e-acde-07047313ec8f\",\n 'parent_datastore_id': str(self.test_datastore_obj.id),\n 'chunk_count': 1,\n 'size': 512,\n }\n\n self.client.force_authenticate(user=self.test_user_obj)\n response = self.client.put(url, data)\n\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)\n self.assertIn('file_id', response.data)\n self.assertIn('file_transfer_id', response.data)\n self.assertIn('file_transfer_secret_key', response.data)", "def test_update_company_props_using_put(self):\n pass", "def test_client_can_do_put_request(self):\n response = self.httpbin_4.test_requests_put_method()\n self.assertEqual(response.request.method, 'PUT')\n self.assertEqual(response.status_code, 200)", "def test_collection_put(testapp, execute_counter):\n initial = {\n 'title': \"Testing\",\n 'type': \"object\", # include a non-required field\n 'description': \"This is the initial insert\",\n }\n item_url = testapp.post_json('/embedding-tests', initial).location\n\n with execute_counter.expect(1):\n item = testapp.get(item_url).json\n\n for key in initial:\n assert item[key] == initial[key]\n\n update = {\n 'title': \"New Testing\",\n 'type': \"object\",\n 'description': \"This is the updated insert\",\n }\n testapp.put_json(item_url, update, status=200)\n\n res = testapp.get('/' + item['uuid']).follow().json\n\n for key in update:\n assert res[key] == update[key]", "def put(self, *args, **kwargs):\n self.request(\"put\", *args, **kwargs)", "def test_shelflistitem_update_items(method, api_settings,\n assemble_custom_shelflist,\n shelflist_solr_env,\n filter_serializer_fields_by_opt,\n derive_updated_resource, send_api_data,\n get_shelflist_urls, api_client):\n test_lcode, test_id = '1test', 99999999\n _, _, trecs = assemble_custom_shelflist(test_lcode, [(test_id, {})])\n url = '{}{}'.format(get_shelflist_urls(trecs)[test_lcode], test_id)\n before = api_client.get(url)\n serializer = before.renderer_context['view'].get_serializer()\n writable = filter_serializer_fields_by_opt(serializer, 'writable', True)\n unwritable = filter_serializer_fields_by_opt(serializer, 'writable', False)\n profile = shelflist_solr_env.profiles['shelflistitem']\n try_item = derive_updated_resource(before.data, serializer, profile,\n which_fields=writable)\n\n if method == 'put':\n req_body = ujson.dumps(try_item)\n elif method == 'patch':\n req_body = jsonpatch.make_patch(before.data, try_item)\n \n resp = send_api_data(api_client, url, req_body, method)\n after = api_client.get(url)\n\n assert resp.status_code == 200\n assert resp.data['links']['self']['href'].endswith(url)\n assert resp.data['links']['self']['id'] == test_id\n\n print(before.data)\n print(try_item)\n print(after.data)\n\n for fname in writable:\n assert after.data[fname] == try_item[fname]\n assert after.data[fname] != before.data[fname]\n\n for fname in unwritable:\n assert after.data[fname] == try_item[fname]\n assert after.data[fname] == before.data[fname]", "def test_update_risk_profile_using_put(self):\n pass", "def http_put(self, **kwargs):\n return self.rabjcallable.put(**kwargs)", "def test_creating_and_getting_a_bucketlist_for_authenticated_user(self):\n\n # test all bucketlists\n response = self.client.post(\n \"/bucketlists/\",\n data=dict(name='test_bucketlist'),\n headers={'Authorization': self.user_token}\n )\n bucketlist = json.loads(response.data)\n\n self.assertEqual(response.status_code, 200)\n self.assertEqual(bucketlist[\"name\"], 'test_bucketlist')\n\n # test single bucketlist\n self.bucketlist_id = bucketlist[\"bucketlist_id\"]\n single_bucketlist = self.client.get(\n \"/bucketlists/\" + str(self.bucketlist_id) + \"\",\n headers={'Authorization': self.user_token}\n )\n\n one_bucketlist = json.loads(single_bucketlist.data)\n\n self.assertEqual(single_bucketlist.status_code, 200)\n self.assertEqual(one_bucketlist[\"name\"], 'test_bucketlist')\n\n # test all items in bucketlist\n item = self.client.post(\n \"/bucketlists/\" + str(self.bucketlist_id) + \"/items/\",\n data=dict(name=\"test_item\"),\n headers={'Authorization': self.user_token}\n )\n\n one_item = json.loads(item.data)\n\n self.assertEqual(item.status_code, 200)\n self.assertEqual(one_item[\"name\"], 'test_item')\n\n # test single item in bucketlist\n self.item_id = one_item[\"item_id\"]\n single_item = self.client.get(\n \"/bucketlists/\" + str(self.bucketlist_id) + \"/items/\" + str(self.item_id) + \"\",\n headers={'Authorization': self.user_token}\n )\n\n created_item = json.loads(single_item.data)\n\n self.assertEqual(single_item.status_code, 200)\n self.assertEqual(created_item[\"name\"], 'test_item')\n\n # test for deletion of bucketlist\n second_bucketlist = self.client.post(\n \"/bucketlists/\",\n data=dict(name='second_bucketlist'),\n headers={'Authorization': self.user_token}\n )\n\n bucketlist_two = json.loads(second_bucketlist.data)\n\n self.assertEqual(second_bucketlist.status_code, 200)\n self.assertEqual(bucketlist_two[\"name\"], 'second_bucketlist')\n\n delete_response = self.client.delete(\n \"/bucketlists/\" + str(bucketlist_two[\"bucketlist_id\"]) + \"\",\n headers={'Authorization': self.user_token}\n )\n\n deletion = json.loads(delete_response.data)\n\n self.assertEqual(delete_response.status_code, 200)\n self.assertEqual(deletion[\"message\"], \"Deleted\")\n\n # test for deletion of an item in bucketlist\n delete_item = self.client.delete(\n \"/bucketlists/\" + str(bucketlist[\"bucketlist_id\"]) + \"/items/\" + str(one_item[\"item_id\"]) + \"\",\n headers={'Authorization': self.user_token}\n )\n\n item_deletion = json.loads(delete_item.data)\n\n self.assertEqual(delete_item.status_code, 200)\n self.assertEqual(item_deletion[\"message\"], \"Deleted\")\n\n # test for updating of bucketlist\n self.bucketlist_id = bucketlist[\"bucketlist_id\"]\n bucketlist_update = self.client.put(\n \"/bucketlists/\" + str(self.bucketlist_id) + \"\",\n data=dict(name='bucketlist_test'),\n headers={'Authorization': self.user_token}\n )\n\n updated_bucketlist = json.loads(bucketlist_update.data)\n\n self.assertEqual(bucketlist_update.status_code, 200)\n self.assertEqual(updated_bucketlist[\"name\"], 'bucketlist_test')\n\n # test update of item in bucketlist\n item = self.client.post(\n \"/bucketlists/\" + str(self.bucketlist_id) + \"/items/\",\n data=dict(name=\"test_item\"),\n headers={'Authorization': self.user_token}\n )\n\n one_item = json.loads(item.data)\n\n item_update = self.client.put(\n \"/bucketlists/\" + str(self.bucketlist_id) + \"/items/\"+ str(one_item[\"item_id\"]) + \"\",\n data=dict(name=\"item_test\"),\n headers={'Authorization': self.user_token}\n )\n\n updated_item = json.loads(item_update.data)\n\n self.assertEqual(item_update.status_code, 200)\n self.assertEqual(updated_item[\"name\"], 'item_test')", "def put_item(key, value):\n try:\n response = table.put_item( Item={ 'my-key': key, 'some-other-key': value )\n print(f\"Successfully added new item\")\n print(f\"Response : {response}\")\n except ClientError as ce:\n print(f\"Failed to creat new item - key : {key}, value : {value}\")\n print(ce)\n\ndef update_nested_item(key, value):\n \"\"\"\n Update a nested item. create \n \"\"\"\n try:\n response = table.update_item( Key={ 'my-key': key },\n UpdateExpression='SET #other-key = :new_value',\n ExpressionAttributeNames={\n '#other-key': 'New-Key'\n },\n ExpressionAttributeValues={ ':new_value': True },\n ReturnValues='ALL_NEW'\n )\n print(\"Successfully created/updated item.\")\n print(f\"Response : {response}\")\n except ClientError as ce:\n print(f\"Failed to update item : {ce}\")", "def test_bucketlist_creation(self):\n post_data = self.post_a_bucket()\n self.assertEqual(post_data.status_code, 201)\n self.assertIn('Climb the Himalayas', str(post_data.data))", "def put(self, *args, **kwargs):\n return self.handle_put_request()", "def put_object(self, bucket_name, key, data):\n url = self.__key_url(bucket_name, key)\n resp = self.infinispan_client.put(url, data=data,\n auth=self.basicAuth,\n headers=self.headers)\n logger.debug(resp)", "def test_content_type_for_editing_bucket_is_json(self):\n with self.client:\n token = self.get_user_token()\n res = self.client.put(\n '/bucketlists/1',\n headers=dict(Authorization='Bearer ' + token),\n data=json.dumps(dict(name='Adventure'))\n )\n data = json.loads(res.data.decode())\n self.assertEqual(res.status_code, 202)\n self.assertTrue(data['status'] == 'failed')\n self.assertTrue(data['message'] == 'Content-type must be json')", "def test_put_list_replace(self):\n self.story.places.add(*list(Place.objects.filter(name__in=('Humboldt Park', 'Wicker Park'))))\n self.story.save()\n self.assertEqual(self.story.places.count(), 2)\n put_data = [place.place_id for place in\n Place.objects.filter(name=\"Logan Square\")]\n self.api_client.client.login(username=self.username, password=self.password)\n uri = '/api/0.1/stories/%s/places/' % (self.story.story_id)\n response = self.api_client.put(uri, format='json', data=put_data)\n self.assertHttpAccepted(response)\n self.story = Story.objects.get(story_id=self.story.story_id)\n self.assertEqual(self.story.places.count(), 1)\n ids = [place.place_id for place in self.story.places.all()]\n self.assertEqual(ids, put_data)", "def _put(self, url, data, extra_headers=None):\n headers = {'X-Requested-By': 'Unit Tests'}\n headers.update(extra_headers)\n return self.client.put(\n url,\n content_type='application/json',\n data=utils.as_json(data),\n headers=headers,\n )", "def test_put(self):\n client = kazoo.client.KazooClient()\n zkutils.put(client, '/foo/bar')\n kazoo.client.KazooClient.create.assert_called_with(\n '/foo/bar', b'', acl=mock.ANY, makepath=True,\n sequence=False, ephemeral=False)", "def test_PUT(self):\n if not self.url:\n return\n response = self.client.put(self.url, {}, format='json')\n self.assertIn(response.status_code, [status.HTTP_405_METHOD_NOT_ALLOWED,\n status.HTTP_401_UNAUTHORIZED])", "def test_update_currency_using_put(self):\n pass", "def test_put_list_new(self):\n self.story.save()\n self.assertEqual(self.story.places.count(), 0)\n put_data = [place.place_id for place in\n Place.objects.filter(name=\"Logan Square\")]\n self.api_client.client.login(username=self.username, password=self.password)\n uri = '/api/0.1/stories/%s/places/' % (self.story.story_id)\n response = self.api_client.put(uri, format='json', data=put_data)\n self.assertHttpAccepted(response)\n self.story = Story.objects.get(story_id=self.story.story_id)\n self.assertEqual(self.story.places.count(), 1)\n ids = [place.place_id for place in self.story.places.all()]\n self.assertEqual(ids, put_data)", "def put(self, key, headers, value, metadata=None):", "def put(self):\n pass", "def put(self):\n pass", "def test_api_delete_bucketlist(self):\n\n res_post = self.client().post('/bucketlist', data={'name': \"Don't forget to exercise\"})\n self.assertEqual(res_post.status_code, 201)\n res_post_in_json = json.loads(res_post.data.decode('UTF-8'))\n id = res_post_in_json['id']\n res_delete = self.client().delete(f\"/bucketlist/{id}\")\n self.assertEqual(res_delete.status_code, 200)\n\n # should return 404 after delete the data\n res = self.client().get(f'/bucketlist/{id}')\n self.assertEqual(res.status_code, 404)", "def test_delete_item_successfully(self):\n self.client.post('/buckets',\n content_type='application/json',\n data=self.bucket, headers=self.header)\n self.client.post('/buckets/1/items',\n content_type='application/json',\n data=self.item, headers=self.header)\n response = self.client.delete('/buckets/1/items/1',\n headers=self.header)\n self.assertEquals(response.status_code, 200)\n self.assertIn('Item successfully deleted', response.data.decode())", "def put(self, item): \n self.__db.rpush(self.key, item)", "def test_delete_item_wrong_id(self):\r\n email = \"test@test.com\"\r\n _pword = \"test\"\r\n user = User.query.filter_by(email=email).first()\r\n bucketlist = BucketList.query.filter_by(user_id=user.id, name=\"test bucketlist\").first()\r\n item = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id, id=0).first()\r\n self.assertFalse(item)\r\n\r\n response = self.delete_bucketlist_item(email, _pword, bucketlist.id, 0)\r\n result = json.loads(response.data.decode('utf-8'))\r\n self.assertEqual(response.status, '404 NOT FOUND')\r\n self.assertEqual(\r\n result['message'],\r\n 'Bucketlist Item with ID {} not found in the database. You have requested this URI '\\\r\n '[/api/v1/bucketlist/1/items/0] but did you mean /api/v1/bucketlist/<int:bucketlist_id>/items/'\\\r\n ' or /api/v1/bucketlist/<int:bucketlist_id> or /api/v1/bucketlist ?'.format(0)\r\n )", "def test_update(self, mock_put):\n self.policies.update(id=333114, policy_update=self.policy_show_response)\n\n mock_put.assert_called_once_with(\n url='https://api.newrelic.com/v2/alert_policies/333114.json',\n headers=self.policies.headers,\n data=json.dumps(self.policy_show_response)\n )", "def test_bucketlist_create(self):\n res = self.client().post('/bucketlist', data=self.bucketlist)\n self.assertEqual(res.status_code, 201)\n self.assertIn('Go to vacation', str(res.data))", "def put_req(self, item):\n self.export.put_req(item)", "def test_put_request_by_owner(self):\n client = APIClient()\n client.credentials(HTTP_AUTHORIZATION=self.test_user1_token)\n response = client.post('/api/places/', self.restaurant_data, format='json')\n url = f\"/api/places/{response.data['id']}/\"\n\n response = client.put(url, self.restaurant_data, format='json')\n self.assertEqual(response.status_code, status.HTTP_200_OK)", "def test_partly_update_book(self):\n data = {'isbn':'96712116-2'}\n response = self.client.patch(self.book.get_absolute_url(), data, format='json', content_type='application/json')\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n response = self.client.get(self.book.get_absolute_url())\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertContains(response, '96712116-2')", "def test_delete_item_with_non_existing_bucket(self):\n self.client.post('/buckets',\n content_type='application/json',\n data=self.bucket, headers=self.header)\n self.client.post('/buckets/1/items',\n content_type='application/json',\n data=self.item, headers=self.header)\n response = self.client.delete('/buckets/2/items/1'\n , headers=self.header)\n self.assertEquals(response.status_code, 400)\n self.assertIn('Attempting to delete item on non existing bucket',\n response.data.decode())", "def test_single_bucketlist_item_delete_with_empty_token(self):\n self.register_user()\n result = self.login_user()\n access_token = json.loads(result.data.decode())['access_token']\n\n res = self.client().post(\n '/api/v1/bucketlists/',\n headers=dict(Authorization=\"Bearer \" + access_token),\n data=self.bucketlist)\n\n # assert that the bucketlist is created\n self.assertEqual(res.status_code, 201)\n # get the response data in json format\n results = json.loads(res.data.decode())\n\n # create a bucketlist item by making a POST request and add it to the created bucketlist\n res = self.client().post(\n '/api/v1/bucketlists/{}/items/'.format(results['id']),\n headers=dict(Authorization=\"Bearer \" + access_token),\n data={\n \"name\": \"Eat fried crabs\"\n })\n self.assertEqual(res.status_code, 201)\n # get the json containing the created bucketlist item\n res_item = json.loads(res.data.decode())\n\n # delete the bucketlist item we just created\n res = self.client().delete(\n '/api/v1/bucketlists/{}/items/{}'.format(results['id'], res_item['id']),\n headers=dict(Authorization=\"\"), )\n self.assertEqual(res.status_code, 401)\n self.assertIn('Token not provided in the header with key Authorization.', str(res.data))", "def test_PUT4(self):\n payload = {\n \"make\": \"Nissan\",\n \"model\": \"Skyline\",\n \"year\": 1999,\n \"price\": 2200\n }\n r = requests.put(self.address + \"/loremipsum/42\", json=payload)\n self.assertEqual(r.status_code, 400)", "def test_company_put_permissions(self):\n companyPK = Company.objects.get(name=self.admin.profile.company.name).pk\n url = reverse('Company-detail', kwargs={'pk': companyPK + 1})\n data = {'name': 'NewTestCompany', 'address': {'address1': '123 fake st',\n 'address2': 'fake address 2',\n 'city': 'nowhere', 'state': 'IN', 'zip': '90210'}}\n response = self.client.put(url, data, format='json')\n #This is 404 instead of 403 because there is no way to view a company\n #that you arent an employee of.\n self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)\n self.assertNotEqual(Company.objects.get(pk=companyPK).name,\n 'NewTestCompany')", "def put(data):", "def test_update():\n payload = {'age': 99}\n sample_uuid = get_sample_id()\n response = requests.put(f'http://localhost:5000/api/persons/{sample_uuid}', json=payload)\n data = response.json()\n\n assert response.status_code == 200\n for field in FIELDS:\n assert field in data", "def do_PUT(self,):\n self.http_method = 'PUT'\n # Nothing to do for now.\n pass", "def testUpdate(self):\n data = {'name': 'toto2'}\n response = requests.put(url=self.url, json=data)\n headers = response.headers\n json_data = response.json()\n\n self.assertTrue(self.place == storage.get(Place, self.place_id))\n self.assertEqual(response.status_code, 200, WRONG_STATUS_CODE_MSG)\n self.assertEqual(\n headers['Content-Type'], 'application/json', WRONG_TYPE_RETURN_MSG)\n storage.reload()\n place = storage.get(Place, self.place_id)\n self.assertEqual(place.name, 'toto2')\n self.assertIn('name', json_data, MISSING_NAME_ATTR_MSG)\n self.assertIn('number_rooms', json_data, MISSING_ROOM_NB_ATTR_MSG)\n self.assertIn('number_bathrooms', json_data,\n MISSING_BATHROOM_NB_ATTR_MSG)\n self.assertIn('price_by_night', json_data,\n MISSING_PRICE_BY_NIGHT_ATTR_MSG)\n self.assertIn('user_id', json_data, MISSING_USER_ID_ATTR_MSG)\n self.assertIn('city_id', json_data, MISSING_CITY_ID_ATTR_MSG)\n self.assertIn('created_at', json_data, MISSING_CREATED_AT_ATTR_MSG)\n self.assertIn('updated_at', json_data, MISSING_UPDATED_AT_ATTR_MSG)\n self.assertIn('__class__', json_data, MISSING_CLASS_ATTR_MSG)\n self.assertEqual(json_data['name'], 'toto2')\n storage.delete(place)\n storage.save()", "def manipulate_bucketlist():\n pass", "def test_put_stock(self):\n body = StockProduct()\n response = self.client.open(\n '/omogollo2/ServerAPI/1.0.0/stock/{productId}'.format(product_id=56),\n method='PUT',\n data=json.dumps(body),\n content_type='application/json')\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def simulate_put(app, path, **kwargs) -> _ResultBase:\n return simulate_request(app, 'PUT', path, **kwargs)", "def test_bucket_by_id_is_returned_on_get_request(self):\n with self.client:\n token = self.get_user_token()\n # Create a Bucket\n response = self.client.post(\n '/bucketlists',\n data=json.dumps(dict(name='Travel')),\n headers=dict(Authorization='Bearer ' + token),\n content_type='application/json'\n )\n # Test Bucket creation\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 201)\n self.assertTrue(data['status'], 'success')\n self.assertTrue(data['name'], 'Travel')\n response = self.client.get(\n '/bucketlists/1',\n headers=dict(Authorization='Bearer ' + token)\n )\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 200)\n self.assertTrue(data['status'] == 'success')\n self.assertTrue(data['bucket']['name'] == 'travel')\n self.assertIsInstance(data['bucket'], dict)\n self.assertTrue(response.content_type == 'application/json')", "def test_update_item_incorrect_id(test_client, item):\n\n response = test_client.put(BAD_ITEM_URL,\n data=json.dumps(item),\n content_type='application/json')\n\n data = json.loads(response.get_data())\n\n assert response.status_code == 404\n assert data['error'] == app.NOT_FOUND", "def test_update_item_incorrect_content_type(test_client, item):\n\n response = test_client.put(GOOD_ITEM_URL,\n data=json.dumps(item))\n\n data = json.loads(response.get_data())\n\n assert response.status_code == 400\n assert data['error'] == app.BAD_REQUEST", "def __setitem__(self, key, value):\n\n bucket_key = self.key_for_bucket(key)\n self.buckets[bucket_key][key] = value", "def test_update_success(self, mock_put):\n self.policies.update(\n id=self.policy_single_response['policy']['id'],\n name=self.policy_single_response['policy']['name'],\n incident_preference=self.policy_single_response['policy']['incident_preference']\n )\n\n mock_put.assert_called_once_with(\n url='https://api.newrelic.com/v2/alerts_policies/{0}.json'.format(\n self.policy_single_response['policy']['id']\n ),\n headers=self.policies.headers,\n data=json.dumps({\n \"policy\": {\n \"name\": self.policy_single_response['policy']['name'],\n \"incident_preference\": self.policy_single_response['policy']['incident_preference']\n }\n })\n )", "def test_put(self):\n self.seed_static_data()\n\n params = {\n 'id': 2,\n 'event_id': 1,\n 'tag_type': 'REGISTRATION',\n 'name': {\n 'en': 'Renamed English Name', # Rename\n 'zu': 'Zulu Name'\n },\n 'description': {\n 'en': 'Renamed English Description',\n 'zu': 'Zulu Description'\n },\n 'active': True\n }\n\n response = self.app.put(\n '/api/v1/tag', \n headers=self.user1_headers, \n data=json.dumps(params),\n content_type='application/json')\n self.assertEqual(response.status_code, 200)\n\n response = self.app.get('/api/v1/tag', headers=self.user1_headers, data={'id': 2, 'event_id': 1, 'language': 'en'})\n data = json.loads(response.data)\n\n self.assertEqual(data['id'], 2)\n self.assertEqual(data['event_id'], 1)\n self.assertEqual(data['tag_type'], 'REGISTRATION')\n self.assertDictEqual(data['name'], {\n 'en': 'Renamed English Name',\n 'zu': 'Zulu Name'\n })\n self.assertDictEqual(data['description'], {\n 'en': 'Renamed English Description',\n 'zu': 'Zulu Description'\n })", "def taco_test_post_param_update(self):\n body = '{ \"id\": 400, \"name\": \"item4\", \"content\": \"after test update\" }'\n env = self.get_env('POST', '/item/4', body=body)\n result = webapi_start(env, lambda status, response_headers: self.assertEqual(status, '204'))\n # webapi_start(env, lambda status, response_headers: self.assertEqual(status, '204'))\n debug.log('result', result)", "def put(self):\n return", "def test_single_bucketlist_item_delete_with_invalid_token(self):\n self.register_user()\n result = self.login_user()\n access_token = json.loads(result.data.decode())['access_token']\n\n res = self.client().post(\n '/api/v1/bucketlists/',\n headers=dict(Authorization=\"Bearer \" + access_token),\n data=self.bucketlist)\n\n # assert that the bucketlist is created\n self.assertEqual(res.status_code, 201)\n # get the response data in json format\n results = json.loads(res.data.decode())\n\n # create a bucketlist item by making a POST request and add it to the created bucketlist\n res = self.client().post(\n '/api/v1/bucketlists/{}/items/'.format(results['id']),\n headers=dict(Authorization=\"Bearer \" + access_token),\n data={\n \"name\": \"Eat fried crabs\"\n })\n self.assertEqual(res.status_code, 201)\n # get the json containing the created bucketlist item\n res_item = json.loads(res.data.decode())\n\n # delete the bucketlist item we just created\n res = self.client().delete(\n '/api/v1/bucketlists/{}/items/{}'.format(results['id'], res_item['id']),\n headers=dict(Authorization=access_token), )\n self.assertEqual(res.status_code, 401)\n self.assertIn('Invalid token format.', str(res.data))", "def put_req(self, item):\n self.req_q.put(item)", "def put(self,item):\n\t\ttry:\n\t\t\tself.logger.debug('Im trying to put new item to queue %s'%(item))\n\t\t\tself.queue.put(item)\n\t\t\tself.logger.debug('Successfull put new item to queue')\n\t\t\treturn True\n\t\texcept Exception, e:\n\t\t\tself.logger.error('Error method put, item: %s, error: %s'%(item,e),exc_info=True)\n\t\t\treturn False", "def updateItem(self, object):\n pass", "def test_single_bucketlist_item_delete_with_no_auth_header(self):\n self.register_user()\n result = self.login_user()\n access_token = json.loads(result.data.decode())['access_token']\n\n res = self.client().post(\n '/api/v1/bucketlists/',\n headers=dict(Authorization=\"Bearer \" + access_token),\n data={'name': 'Visit the Grand Canyon!'})\n self.assertEqual(res.status_code, 201)\n # get the bucketlist in json\n results = json.loads(res.data.decode())\n\n # create a bucketlist item by making a POST request and add it to the created bucketlist\n res = self.client().post(\n '/api/v1/bucketlists/{}/items/'.format(results['id']),\n headers=dict(Authorization=\"Bearer \" + access_token),\n data={\n \"name\": \"Eat fried crabs\"\n })\n self.assertEqual(res.status_code, 201)\n # get the json containing the created bucketlist item\n res_item = json.loads(res.data.decode())\n\n # delete the bucketlist item we just created\n res = self.client().delete(\n '/api/v1/bucketlists/{}/items/{}'.format(results['id'], res_item['id']),\n headers=dict(), )\n self.assertEqual(res.status_code, 401)\n self.assertIn('Header with key Authorization missing.', str(res.data))", "def _put(self, path=\"\", **kwargs):\n uri = force_json(self.uri + path)\n return self.client.request(uri, method=\"PUT\", **kwargs)", "def post_bucketlist():\n pass", "def put(self, path: str) -> Response:\n endpoint_ = checkEndpoint(\"PUT\", path)\n if not endpoint_[\"method\"]:\n # If endpoint and PUT method is not supported in the API\n abort(endpoint_[\"status\"])\n # If 'instances' is available in request\n params = request.args.to_dict()\n object_ = json.loads(request.data.decode(\"utf-8\"))\n if params.get(\"instances\") or object_.get(\"data\"):\n int_list = params.get(\"instances\")\n return items_put_response(path, int_list)\n return item_collection_put_response(path)" ]
[ "0.8058875", "0.80090237", "0.7962996", "0.7944334", "0.7826577", "0.7747739", "0.7705827", "0.74234086", "0.7353868", "0.72529393", "0.7214844", "0.72071826", "0.7201491", "0.7160625", "0.71453685", "0.7111688", "0.71060103", "0.70663935", "0.69522905", "0.69248813", "0.6919276", "0.69109577", "0.69079494", "0.6896508", "0.68850464", "0.68756735", "0.6859558", "0.6834577", "0.68279994", "0.6807831", "0.679875", "0.67807937", "0.677639", "0.67744964", "0.67727655", "0.6748231", "0.6744687", "0.67370856", "0.6732408", "0.6726408", "0.6660416", "0.6627137", "0.6597066", "0.65967625", "0.65808046", "0.656551", "0.6526262", "0.65217024", "0.64995265", "0.6495017", "0.64743155", "0.6473055", "0.6470004", "0.6463738", "0.6459074", "0.6423129", "0.6385486", "0.6371943", "0.6364231", "0.6358235", "0.63517106", "0.63207835", "0.6320321", "0.6320321", "0.6307493", "0.63070303", "0.63070035", "0.6295929", "0.62924683", "0.6291436", "0.62905526", "0.6285734", "0.62657213", "0.62644464", "0.6261184", "0.6252559", "0.6249848", "0.62427956", "0.62398124", "0.623918", "0.62149805", "0.62137866", "0.6201951", "0.61982197", "0.6193987", "0.6181598", "0.6164612", "0.6161436", "0.6157844", "0.6153361", "0.6149743", "0.61467546", "0.61380196", "0.6134935", "0.6132468", "0.61318254", "0.61190665", "0.61139977", "0.6106711", "0.6105889" ]
0.8477841
0
Method tests the error raised when end point for updating a bucket list item using put contains the wrong id
def test_put_item_wrong_id(self): data = {"name": "bucketlist item name", "completed": "true"} email = "test@test.com" _pword = "test" user = User.query.filter_by(email=email).first() bucketlist = BucketList.query.filter_by(user_id=user.id, name="test bucketlist").first() item = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id, id=0).first() self.assertFalse(item) response = self.put_bucketlist_item(email, _pword, bucketlist.id, 0, data) result = json.loads(response.data.decode('utf-8')) self.assertEqual(response.status, '404 NOT FOUND') self.assertEqual( result['message'], 'Bucketlist Item with ID {} not found in the database. You have requested this URI '\ '[/api/v1/bucketlist/1/items/0] but did you mean /api/v1/bucketlist/<int:bucketlist_id>/items/'\ ' or /api/v1/bucketlist/<int:bucketlist_id> or /api/v1/bucketlist ?'.format(0) )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_id_of_bucket_to_be_edited_is_invalid(self):\n with self.client:\n # Get an auth token\n token = self.get_user_token()\n # Update the bucket name\n res = self.client.put(\n '/bucketlists/bucketid',\n headers=dict(Authorization='Bearer ' + token),\n data=json.dumps(dict(name='Adventure')),\n content_type='application/json'\n )\n data = json.loads(res.data.decode())\n self.assertEqual(res.status_code, 400)\n self.assertTrue(res.content_type == 'application/json')\n self.assertTrue(data['status'] == 'failed')\n self.assertTrue(data['message'] == 'Please provide a valid Bucket Id')", "def test_update_item_incorrect_id(test_client, item):\n\n response = test_client.put(BAD_ITEM_URL,\n data=json.dumps(item),\n content_type='application/json')\n\n data = json.loads(response.get_data())\n\n assert response.status_code == 404\n assert data['error'] == app.NOT_FOUND", "def test_id_of_bucket_to_be_edited_does_not_exist(self):\n with self.client:\n # Get an auth token\n token = self.get_user_token()\n # Update the bucket name\n res = self.client.put(\n '/bucketlists/1',\n headers=dict(Authorization='Bearer ' + token),\n data=json.dumps(dict(name='Adventure')),\n content_type='application/json'\n )\n data = json.loads(res.data.decode())\n self.assertEqual(res.status_code, 404)\n self.assertTrue(res.content_type == 'application/json')\n self.assertTrue(data['status'] == 'failed')\n self.assertTrue(data['message'] == 'The Bucket with Id 1 does not exist')", "def taco_test_put_error_requires_id(self):\n body = '{ \"id\": 400, \"name\": \"item_new\", \"content\": \"after test update\" }'\n env = self.get_env('PUT', '/item', body=body)\n webapi_start(env, lambda status, response_headers: self.assertEqual(status, '501'))", "def test_update_busketlistitem_by_id(self):\n resp = self.client.post('/bucketlists',\n data=json.dumps(self.bucketlist),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n resp_item = self.client.post('/bucketlistitems/1/items',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n update_item = self.client.put('/bucketlistitems/1/items/1',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs and museums too.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(update_item.status_code, 201)", "def test_api_edit_bucketlist(self):\n res_post = self.client().post('/bucketlist', data={'name': 'Wake up, Eat, Code, Sleep & Repeat'})\n self.assertEqual(res_post.status_code, 201)\n res_post_in_json = json.loads(res_post.data.decode('UTF-8').replace(\"'\", \"\\\"\"))\n id = res_post_in_json['id']\n res_put = self.client().put(\n f'bucketlist/{id}',\n data={\n 'name': \"Don't forget to exercise\"\n }\n )\n self.assertEqual(res_put.status_code, 200)\n res = self.client().get(f'/bucketlist/{id}')\n self.assertIn(\"exercise\", str(res.data))", "def test_delete_item_wrong_id(self):\r\n email = \"test@test.com\"\r\n _pword = \"test\"\r\n user = User.query.filter_by(email=email).first()\r\n bucketlist = BucketList.query.filter_by(user_id=user.id, name=\"test bucketlist\").first()\r\n item = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id, id=0).first()\r\n self.assertFalse(item)\r\n\r\n response = self.delete_bucketlist_item(email, _pword, bucketlist.id, 0)\r\n result = json.loads(response.data.decode('utf-8'))\r\n self.assertEqual(response.status, '404 NOT FOUND')\r\n self.assertEqual(\r\n result['message'],\r\n 'Bucketlist Item with ID {} not found in the database. You have requested this URI '\\\r\n '[/api/v1/bucketlist/1/items/0] but did you mean /api/v1/bucketlist/<int:bucketlist_id>/items/'\\\r\n ' or /api/v1/bucketlist/<int:bucketlist_id> or /api/v1/bucketlist ?'.format(0)\r\n )", "def test_put_bucketlist_item(self):\r\n data = {\"name\": \"bucketlist item name\", \"completed\": \"true\"}\r\n email = \"test@test.com\"\r\n _pword = \"test\"\r\n user = User.query.filter_by(email=email).first()\r\n bucketlist = BucketList.query.filter_by(user_id=user.id, name=\"test bucketlist\").first()\r\n item = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id, id=1).first()\r\n self.assertNotEqual(item.name, \"bucketlist item name\")\r\n self.assertFalse(item.completed)\r\n\r\n response = self.put_bucketlist_item(email, _pword, bucketlist.id, 1, data)\r\n result = json.loads(response.data.decode('utf-8'))\r\n item2 = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id, id=1).first()\r\n self.assertEqual(response.status, '201 CREATED')\r\n self.assertEqual(item2.name, \"bucketlist item name\")\r\n self.assertTrue(item2.completed)", "def test_modify_item_successfully(self):\n self.client.post('/buckets',\n content_type='application/json',\n data=self.bucket, headers=self.header)\n self.client.post('/buckets/1/items',\n content_type='application/json',\n data=self.item,\n headers=self.header)\n response = self.client.put('/buckets/1/items/1',\n content_type='application/json',\n data=self.item_edit,\n headers=self.header)\n self.assertEquals(response.status_code, 200)\n self.assertIn('Item successfully updated',\n response.data.decode())", "def taco_test_put_update(self):\n body = '{ \"id\": 400, \"name\": \"item4\", \"content\": \"after test update\" }'\n env = self.get_env('PUT', '/item/4', body=body)\n webapi_start(env, lambda status, response_headers: self.assertEqual(status, '204'))", "def test_update_bucket(self):\n pass", "def test_bucket_is_updated(self):\n with self.client:\n # Get an auth token\n token = self.get_user_token()\n # Create a Bucket\n response = self.client.post(\n '/bucketlists',\n data=json.dumps(dict(name='Travel')),\n headers=dict(Authorization='Bearer ' + token),\n content_type='application/json'\n )\n # Test Bucket creation\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 201)\n self.assertTrue(data['status'], 'success')\n self.assertTrue(data['name'], 'Travel')\n # Update the bucket name\n res = self.client.put(\n '/bucketlists/1',\n headers=dict(Authorization='Bearer ' + token),\n data=json.dumps(dict(name='Adventure')),\n content_type='application/json'\n )\n data = json.loads(res.data.decode())\n self.assertEqual(res.status_code, 201)\n self.assertTrue(res.content_type == 'application/json')\n self.assertTrue(data['status'] == 'success')\n self.assertTrue(data['name'] == 'Adventure')\n self.assertEqual(data['id'], 1)", "def test_fail_repeated_buckelist_item(self):\r\n user = User.query.filter_by(email=\"test@test.com\").first()\r\n bucketlist = BucketList.query.filter_by(user_id=user.id, name=\"test bucketlist\").first()\r\n item_no = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id).count()\r\n response = self.add_bucketlist_item(\"test@test.com\", \"test\", bucketlist.id, \"test item\")\r\n result = json.loads(response.data.decode('utf-8'))\r\n self.assertEqual(response.status, '409 CONFLICT')\r\n self.assertEqual(result['message'], 'Bucketlist Item Exists')\r\n new_item_no = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id).count()\r\n self.assertEqual(item_no, new_item_no)", "def test_edit_bucketlist(self):\n post_data = self.post_a_bucket()\n self.assertEqual(post_data.status_code, 201)\n result_of_put_method = self.client().put(\n '/bucketlists/1',\n headers=dict(Authorization='Bearer '\n + self.token()\n ),\n data={\n \"name\": \"The seasons will be, summer winter and autumn\"\n })\n self.assertEqual(result_of_put_method.status_code, 201)\n result_of_get_method = self.client().get('/bucketlists/1',\n headers=dict(Authorization='Bearer '\n + self.token())\n )\n self.assertIn('The seasons will b', str(result_of_get_method.data))", "def test_update_telegram_id_fail(self):\n url = reverse('telegram_id')\n response = self.second_client.put(url, {}, content_type='application/json')\n self.assertEqual(400, response.status_code)", "def test_app_can_update_a_list(self):\n self.ne=json.dumps({\"newName\":\"pants\"})\n list_update=self.client.put('/shoppinglists/trou',\n data=self.ne,\n headers={\n 'Content-Type':'application/json',\n 'x-access-token':self.tok})\n self.assertIn(\"list doesnt exist\",str(list_update.data)) \n self.assertEqual(list_update.status_code,200)", "def test_update_item_incorrect_value_type(test_client, item_with_bad_value):\n\n response = test_client.put(GOOD_ITEM_URL,\n data=json.dumps(item_with_bad_value),\n content_type='application/json')\n\n data = json.loads(response.get_data())\n\n assert response.status_code == 400\n assert data['error'] == app.BAD_REQUEST", "def test_wrong_admin_put(self):\n\n with self.client:\n token = self.get_token()\n id = 4\n response = self.client.put('api/v1/meals/{}'.format(id),\n data=json.dumps(dict(\n meal_name=\"chips\",\n price=15000\n )),\n content_type='application/json',\n headers=({\"token\": token}))\n data = json.loads(response.data.decode())\n self.assertEqual(data.get('message'), \"Meal not found\")\n self.assertEqual(response.status_code, 400)", "def test_update_car_invalid_id():\n car_data = {\n \"id\": 1,\n \"make\": \"BMW\",\n \"model\": \"3 Series\",\n \"year\": 1998,\n \"vin\": \"JH4CU2F60AC794232\",\n }\n response = client.put(\"/11111\", data=car_data)\n assert response.status_code == STATUS_NOT_FOUND", "def taco_test_put_new(self):\n body = '{ \"id\": 400, \"name\": \"item_new\", \"content\": \"after test update\" }'\n env = self.get_env('PUT', '/item/4', body=body)\n webapi_start(env, lambda status, response_headers: self.assertEqual(status, '204'))", "def test_update_item_good(test_client, item):\n\n response = test_client.put(GOOD_ITEM_URL,\n data=json.dumps(item),\n content_type='application/json')\n\n data = json.loads(response.get_data())\n\n assert response.status_code == 200\n assert data['item']['name'] == item['name']\n assert data['item']['value'] == item['value']", "def test_security_on_put(self):\n # test the update url\n product = Product.objects.all()[0]\n url = '/product/xml/%s/' % product.item_number\n response = self.client.put(url,{'description':'my new description'})\n self.failUnlessEqual(response.status_code, 401)", "def test_add_item_at_using_put(self):\n pass", "def test_update_item_using_post(self):\n pass", "def test_update_item_incorrect_content_type(test_client, item):\n\n response = test_client.put(GOOD_ITEM_URL,\n data=json.dumps(item))\n\n data = json.loads(response.get_data())\n\n assert response.status_code == 400\n assert data['error'] == app.BAD_REQUEST", "def test_shelflistitem_putpatch_requires_auth(api_settings,\n assemble_custom_shelflist,\n get_shelflist_urls, api_client):\n test_lcode, test_id = '1test', 99999999\n _, _, trecs = assemble_custom_shelflist(test_lcode, [(test_id, {})])\n url = '{}{}'.format(get_shelflist_urls(trecs)[test_lcode], test_id)\n before = api_client.get(url)\n put_resp = api_client.put(url, {})\n patch_resp = api_client.patch(url, {})\n after = api_client.get(url) \n assert put_resp.status_code == 403\n assert patch_resp.status_code == 403\n assert before.data == after.data", "def test_vault_update_vault_item(self):\n pass", "def test_edit_non_existing_item(self):\n response = self.client.put('/api/v1/category/200',\n data=json.dumps(category[3]),\n content_type='application/json',\n headers=self.admin_headers)\n self.assertEqual(response.status_code, 404)\n self.assertIn('category with id 200 does not exist',\n str(response.data))", "def test_put_wrong_data(self):\n new_data = {\"fromMonth\": \"another\"}\n response = self.client.put(self.url + str(self.current_data[-1]['id']) + '/', data=json.dumps(new_data),\n content_type='application/json')\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST, msg=response.content.decode())", "def test_put_db_fail(self):\n test_data = {\n 'first_name': 'new_first_name',\n 'last_name': 'new_last_name'\n }\n with mock.patch('user_profile.models.UserProfile.update') as update:\n update.return_value = False\n response = self.client.put(self.url, json.dumps(test_data), content_type='application/json')\n self.assertEquals(response.status_code, 400)", "def test_put_one(self):\n response = self.client.put('/api/v1/parcels/100')\n self.assertEqual(response.status_code, 200)", "def test_delete_bucketlistitem_by_id(self):\n resp = self.client.post('/bucketlists',\n data=json.dumps(self.bucketlist),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n resp_item = self.client.post('/bucketlistitems/1/items',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n delete_item = self.client.delete('/bucketlistitems/1/items/1',\n headers={\n \"Authorization\": self.token\n })\n self.assertEqual(delete_item.status_code, 204)", "def test_api_update_book_with_id_does_not_exist(self):\n\n\t\tlogin_data = self.login_test_user()\n\t\ttoken = login_data['auth_token']\n\n\t\tbook = self.client.put(\n\t\t\tf'{URL_BOOKS}/1',\n\t\t\theaders=dict(Authorization=f'Bearer {token}'),\n\t\t\tcontent_type='application/json',\n\t\t\tdata=json.dumps(\n\t\t\t\tdict(\n\t\t\t\t\ttitle='updated book'\n\t\t\t\t)\n\t\t\t)\n\t\t)\n\n\t\tbook_res = json.loads(book.data.decode())\n\t\tself.assertTrue(book_res['message'] == 'book with id 1 does not exist')\n\t\tself.assertTrue(book_res['status'] == 'error')", "def test_attempt_to_add_uid_key_causes_error():\n starting_db = create_db(STARTING_DB_INPUT)\n starting_db.put_item(\n Item={\n \"uid\": \"I can TOTALLY update someone else's object\"\n }\n )\n with pytest.raises(ValueError):\n o_obj.update_object_in_db(\n starting_db,\n \"some_uid\",\n json.dumps({\n \"uid\": \"I can TOTALLY update someone else's object\"\n })\n )", "def test_400_bad_requests(self):\n with self.client:\n token = self.get_user_token()\n res = self.client.put(\n '/bucketlists/1',\n headers=dict(Authorization='Bearer ' + token),\n content_type='application/json'\n )\n data = json.loads(res.data.decode())\n self.assertEqual(res.status_code, 400)\n self.assertTrue(data['status'] == 'failed')\n self.assertTrue(data['message'] == 'Bad Request')", "def test_duplicate_bucketlist_item(self):\n resp = self.client.post('/bucketlists',\n data=json.dumps(self.bucketlist),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n resp_item = self.client.post('/bucketlistitems/1/items',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n resp_item2 = self.client.post('/bucketlistitems/1/items',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n result = json.loads(resp_item2.data)\n self.assertEqual(result[\"message\"], \"Item with the given name exists.\")\n self.assertEqual(resp_item2.status_code, 409)", "def update_item(id: str, obj: endpoint_model):\n # should this error if exists?\n if obj.id:\n if obj.id != id:\n raise HTTPException(status_code=400, detail=\"id in body does not match id in path\")\n else:\n obj.id = id\n new_obj = db.save(obj)\n return new_obj", "def put_item(key, value):\n try:\n response = table.put_item( Item={ 'my-key': key, 'some-other-key': value )\n print(f\"Successfully added new item\")\n print(f\"Response : {response}\")\n except ClientError as ce:\n print(f\"Failed to creat new item - key : {key}, value : {value}\")\n print(ce)\n\ndef update_nested_item(key, value):\n \"\"\"\n Update a nested item. create \n \"\"\"\n try:\n response = table.update_item( Key={ 'my-key': key },\n UpdateExpression='SET #other-key = :new_value',\n ExpressionAttributeNames={\n '#other-key': 'New-Key'\n },\n ExpressionAttributeValues={ ':new_value': True },\n ReturnValues='ALL_NEW'\n )\n print(\"Successfully created/updated item.\")\n print(f\"Response : {response}\")\n except ClientError as ce:\n print(f\"Failed to update item : {ce}\")", "def test_patch_bucket(self):\n pass", "def test_delete_item_with_non_existing_bucket(self):\n self.client.post('/buckets',\n content_type='application/json',\n data=self.bucket, headers=self.header)\n self.client.post('/buckets/1/items',\n content_type='application/json',\n data=self.item, headers=self.header)\n response = self.client.delete('/buckets/2/items/1'\n , headers=self.header)\n self.assertEquals(response.status_code, 400)\n self.assertIn('Attempting to delete item on non existing bucket',\n response.data.decode())", "def test_update_failure_http_error(self, acme_id, new_name):\n\n api_url = self.get_acme_account_url(acme_id)\n\n # Setup the mocked response\n responses.add(responses.PUT, api_url, status=400)\n\n acme = ACMEAccount(client=self.client)\n\n self.assertRaises(HTTPError, acme.update, acme_id, new_name)", "def test_update_with_invalid_data(self):\n saved_article = self.create_article()\n url = saved_article[0]\n token = saved_article[2]\n response = self.test_client.put(url, self.article_invalid_data2, format='json', HTTP_AUTHORIZATION=token)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)", "def test_put_with_not_existing_shard_id(self):\n\n url = reverse('file')\n\n data = {\n 'shard_id': '829b2d27-0039-4e9e-a12c-b9799119bfe7',\n 'link_id': \"b8866161-0b1f-4a8e-acde-07047313ec8f\",\n 'parent_datastore_id': str(self.test_datastore_obj.id),\n 'chunk_count': 1,\n 'size': 512,\n }\n\n self.client.force_authenticate(user=self.test_user_obj)\n response = self.client.put(url, data)\n\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)", "def test_add_bucketlist_items(self):\r\n email = \"test@test.com\"\r\n _pword = \"test\"\r\n user = User.query.filter_by(email=email).first()\r\n bucketlist = BucketList.query.filter_by(user_id=user.id, name=\"test bucketlist\").first()\r\n item_no = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id).count()\r\n response = self.add_bucketlist_item(email, _pword, bucketlist.id, \"bucketlist item name\")\r\n result = json.loads(response.data.decode('utf-8'))\r\n self.assertEqual(response.status, '201 CREATED')\r\n self.assertEqual(result['message'], 'Bucket list item added')\r\n new_item_no = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id).count()\r\n self.assertLess(item_no, new_item_no)", "def test_PUT4(self):\n payload = {\n \"make\": \"Nissan\",\n \"model\": \"Skyline\",\n \"year\": 1999,\n \"price\": 2200\n }\n r = requests.put(self.address + \"/loremipsum/42\", json=payload)\n self.assertEqual(r.status_code, 400)", "def test_put(self):\n url, port = self.server.address\n\n #couple of basic POSTs\n r = self.client.get(\"http://{0}:{1}/\".format(url, port))\n self.assertEqual(200, r.status_code)\n r = self.client.get(\"http://{0}:{1}\".format(url, port))\n self.assertEqual(200, r.status_code)\n r = self.client.get(\"http://{0}:{1}/200\".format(url, port))\n self.assertEqual(200, r.status_code)\n r = self.client.get(\"http://{0}:{1}/400\".format(url, port))\n self.assertEqual(400, r.status_code)\n\n r = self.client.put(\"http://{0}:{1}/400?foo=bar\".format(url, port))\n self.assertEqual(400, r.status_code)", "def test_beneficiaries_update_withoutID_that_will_fail(self):\n print('the test function name: {}'.format(sys._getframe().f_code.co_name))\n try:\n url = reverse('beneficiary:beneficiary-entity-by-id-update')\n response = self.client.get(url, content_type='application/json')\n return self.assertTrue(response.status_code, 200)\n except Exception as e:\n print(\"reason: \", e)", "def test_wrong_id(self):\n self.request.matchdict = {'user_id': int(self.request.user.id)+4}\n self.request.json_body = {}\n result = user_id_put_view(self.request)['d']\n self.assertEqual(result, error_dict('api_errors', 'not authenticated for this request'))", "def test_update(self):\n\n # test exceptions\n with self.assertRaises(TypeError) as error:\n self.client.data_object.reference.update(1, \"prop\", [self.uuid_1])\n check_error_message(self, error, self.uuid_error_message)\n\n with self.assertRaises(TypeError) as error:\n self.client.data_object.reference.update(self.uuid_1, 1, [self.uuid_2])\n check_error_message(self, error, self.name_error_message(int))\n\n with self.assertRaises(TypeError) as error:\n self.client.data_object.reference.update(self.uuid_1, \"prop\", 1)\n check_error_message(self, error, self.uuid_error_message)\n\n with self.assertRaises(TypeError) as error:\n self.client.data_object.reference.update(self.uuid_1, \"prop\", [1])\n check_error_message(self, error, self.uuid_error_message)\n\n with self.assertRaises(ValueError) as error:\n self.client.data_object.reference.update(\"my UUID\", \"prop\", self.uuid_2)\n check_error_message(self, error, self.valid_uuid_error_message)\n\n with self.assertRaises(ValueError) as error:\n self.client.data_object.reference.update(self.uuid_1, \"prop\", \"my uuid\")\n check_error_message(self, error, self.valid_uuid_error_message)\n\n with self.assertRaises(ValueError) as error:\n self.client.data_object.reference.update(self.uuid_1, \"prop\", [\"my uuid\"])\n check_error_message(self, error, self.valid_uuid_error_message)\n\n with self.assertRaises(ValueError) as error:\n self.client.data_object.reference.update(f\"http://localhost:8080/v1/objects/{self.uuid_1}\", \"prop\",\n \"http://localhost:8080/v1/objects/MY_UUID\")\n check_error_message(self, error, self.valid_uuid_error_message)\n\n with self.assertRaises(ValueError) as error:\n self.client.data_object.reference.update(\"http://localhost:8080/v1/objects/My-UUID\", \"prop\",\n f\"http://localhost:8080/v1/objects/{self.uuid_2}\")\n check_error_message(self, error, self.valid_uuid_error_message)\n \n mock_obj = mock_run_rest(status_code=204)\n replace_connection(self.client, mock_obj)\n with self.assertRaises(UnexpectedStatusCodeException) as error:\n self.client.data_object.reference.update(self.uuid_1, \"myProperty\", self.uuid_2)\n check_startswith_error_message(self, error, self.status_code_error_message('update'))\n\n \n mock_obj = mock_run_rest(side_effect=RequestsConnectionError(\"Test!\"))\n replace_connection(self.client, mock_obj)\n with self.assertRaises(weaviate.RequestsConnectionError) as error:\n self.client.data_object.reference.update(self.uuid_1, \"myProperty\", self.uuid_2)\n check_error_message(self, error, self.requests_error_message('update'))\n\n # test valid calls\n connection_mock = mock_run_rest()\n replace_connection(self.client, connection_mock)\n\n self.client.data_object.reference.update(\n \"de998e81-fa66-440e-a1de-2a2013667e77\",\n \"hasAwards\",\n \"fc041624-4ddf-4b76-8e09-a5b0b9f9f832\"\n )\n\n self.client.data_object.reference.update(\n \"4e44db9b-7f9c-4cf4-a3a0-b57024eefed0\",\n \"hasAwards\",\n [\n \"17ee17bd-a09a-49ff-adeb-d242f25f390d\",\n \"f8c25386-707c-40c0-b7b9-26cc0e9b2bd1\",\n \"d671dc52-dce4-46e7-8731-b722f19420c8\"\n ]\n )\n\n connection_mock.run_rest.assert_called()\n\n call_args_list = connection_mock.run_rest.call_args_list\n call_kwargs = call_args_list[0][1]\n\n self.assertEqual(\"/objects/de998e81-fa66-440e-a1de-2a2013667e77/references/hasAwards\", call_kwargs[\"path\"])\n self.assertEqual(REST_METHOD_PUT, call_kwargs[\"rest_method\"])\n self.assertEqual([{'beacon': 'weaviate://localhost/fc041624-4ddf-4b76-8e09-a5b0b9f9f832'}], call_kwargs[\"weaviate_object\"])\n\n call_kwargs = call_args_list[1][1]\n\n self.assertEqual(\"/objects/4e44db9b-7f9c-4cf4-a3a0-b57024eefed0/references/hasAwards\", call_kwargs[\"path\"])\n self.assertEqual(REST_METHOD_PUT, call_kwargs[\"rest_method\"])\n self.assertEqual([{'beacon': 'weaviate://localhost/17ee17bd-a09a-49ff-adeb-d242f25f390d'},\n {'beacon': 'weaviate://localhost/f8c25386-707c-40c0-b7b9-26cc0e9b2bd1'},\n {'beacon': 'weaviate://localhost/d671dc52-dce4-46e7-8731-b722f19420c8'}], call_kwargs[\"weaviate_object\"])", "def test_put_without_shard_id(self):\n\n url = reverse('file')\n\n data = {\n #'shard_id': self.shard1.id,\n 'link_id': \"b8866161-0b1f-4a8e-acde-07047313ec8f\",\n 'parent_datastore_id': str(self.test_datastore_obj.id),\n 'chunk_count': 1,\n 'size': 512,\n }\n\n self.client.force_authenticate(user=self.test_user_obj)\n response = self.client.put(url, data)\n\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)", "def test_kyc_put_request_legal(self):\n pass", "def test_request_for_a_bucket_has_integer_id(self):\n with self.client:\n response = self.client.get(\n '/bucketlists/dsfgsdsg',\n headers=dict(Authorization='Bearer ' + self.get_user_token())\n )\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 400)\n self.assertTrue(data['status'] == 'failed')\n self.assertTrue(data['message'] == 'Please provide a valid Bucket Id')", "def test_kyc_put_request(self):\n pass", "def test_validate_put_existing(client):\n response = client.put(\n '/user/1',\n data=json.dumps({\n 'name': 'Jeff Knupp',\n 'email': 'jeff@jeffknupp.com',\n }),\n headers={'Content-Type': 'application/json'}\n )\n assert response.status_code == 400\n assert response.json['message'] == INVALID_ACTION_MESSAGE", "def update_existing_key_fail(self, data, new_data):\n message = 'exists'\n rv = self.add_success(data)\n assert not in_response(rv, message)\n rv = self.add_success(new_data)\n assert not in_response(rv, message)\n rv = self.update_fail(data, message)\n assert self.verify_object(new_data)\n return rv", "def test_get_bucketlist_item_id(self):\n resp = self.client.post('/bucketlists',\n data=json.dumps(self.bucketlist),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n resp_item = self.client.post('/bucketlistitems/1/items',\n data=json.dumps(\n {\"name\": \"visit the busy surburbs.\"}),\n content_type=\"application/json\", headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)\n get_item = self.client.get('/bucketlistitems/1/items/1', headers={\n \"Authorization\": self.token\n })\n self.assertEqual(resp.status_code, 201)", "def test_update_inventory_not_found(self):\n new_inventory = {'name': 'conditioner', 'quantity': 1, 'status': 'new'}\n data = json.dumps(new_inventory)\n resp = self.app.put('/inventories/0', data=data, content_type='application/json')\n self.assertEquals(resp.status_code, status.HTTP_404_NOT_FOUND)", "def test_shelflistitem_put_data_missing_fields(fname_solr, fname_api,\n start_val, expect_error,\n api_settings,\n assemble_custom_shelflist,\n shelflist_solr_env,\n filter_serializer_fields_by_opt,\n derive_updated_resource,\n send_api_data,\n get_shelflist_urls, api_client):\n test_lcode, test_id = '1test', 99999999\n test_data = [(test_id, {fname_solr: start_val})]\n _, _, trecs = assemble_custom_shelflist(test_lcode, test_data)\n url = '{}{}'.format(get_shelflist_urls(trecs)[test_lcode], test_id)\n before = api_client.get(url)\n serializer = before.renderer_context['view'].get_serializer()\n profile = shelflist_solr_env.profiles['shelflistitem']\n writable = filter_serializer_fields_by_opt(serializer, 'writable', True)\n try_item = derive_updated_resource(before.data, serializer, profile,\n which_fields=writable)\n del(try_item[fname_api])\n req_body = ujson.dumps(try_item)\n resp = send_api_data(api_client, url, req_body, 'put')\n after = api_client.get(url)\n\n if expect_error:\n assert resp.status_code == 400\n assert before.data == after.data\n msg = '{} is not a writable field'.format(fname_api)\n assert msg in resp.data['detail']\n else:\n assert resp.status_code == 200\n assert before.data[fname_api] == start_val\n assert after.data[fname_api] is None", "def test_putorganizations_item(self):\n pass", "def test_deletion_handles_no_bucket_found_by_id(self):\n with self.client:\n response = self.client.delete(\n '/bucketlists/1',\n headers=dict(Authorization='Bearer ' + self.get_user_token())\n )\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 404)\n self.assertTrue(data['status'] == 'failed')\n self.assertTrue(data['message'] == 'Bucket resource cannot be found')\n self.assertTrue(response.content_type == 'application/json')", "def test_update_should_not_be_allowed(self):\n response = self.client.put(self.get_url(), {})\n self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)", "def test_request_for_deleting_bucket_has_integer_id(self):\n with self.client:\n response = self.client.delete(\n '/bucketlists/dsfgsdsg',\n headers=dict(Authorization='Bearer ' + self.get_user_token())\n )\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 400)\n self.assertTrue(data['status'] == 'failed')\n self.assertTrue(data['message'] == 'Please provide a valid Bucket Id')", "def test_update_order_failure(self):\n # create a order to update\n test_order = OrderFactory()\n resp = self.app.post('/orders',\n json=test_order.serialize(),\n content_type='application/json')\n self.assertEqual(resp.status_code, status.HTTP_201_CREATED)\n\n # update the order\n new_order = resp.get_json()\n new_order['product_id'] = 2\n resp = self.app.put('/orders/{}'.format(5),\n json=new_order,\n content_type='application/json')\n self.assertEqual(resp.status_code, status.HTTP_404_NOT_FOUND)", "def test_delete_bucketlist_item(self):\n self.register_user()\n result = self.login_user()\n access_token = json.loads(result.data.decode())['access_token']\n\n # create a bucketlist by making a POST request\n res = self.client().post(\n '/api/v1/bucketlists/',\n headers=dict(Authorization=\"Bearer \" + access_token),\n data=self.bucketlist)\n self.assertEqual(res.status_code, 201)\n # get the json with the bucketlist\n results = json.loads(res.data.decode())\n\n # create a bucketlist item by making a POST request and add it to the created bucketlist\n res = self.client().post(\n '/api/v1/bucketlists/{}/items/'.format(results['id']),\n headers=dict(Authorization=\"Bearer \" + access_token),\n data={\n \"name\": \"Eat fried crabs\"\n })\n self.assertEqual(res.status_code, 201)\n # get the json containing the created bucketlist item\n res_item = json.loads(res.data.decode())\n\n # delete the bucketlist item we just created\n res = self.client().delete(\n '/api/v1/bucketlists/{}/items/{}'.format(results['id'], res_item['id']),\n headers=dict(Authorization=\"Bearer \" + access_token), )\n self.assertEqual(res.status_code, 200)\n\n # Test to see if it exists, should return a 404\n result = self.client().get(\n '/api/v1/bucketlists/{}/items/1'.format(results['id']),\n headers=dict(Authorization=\"Bearer \" + access_token))\n self.assertEqual(result.status_code, 404)", "def test_put_unauthorized(self):\n\n url = reverse('file')\n\n data = {\n 'shard_id': self.shard1.id,\n 'link_id': \"b8866161-0b1f-4a8e-acde-07047313ec8f\",\n 'parent_datastore_id': str(self.test_datastore_obj.id),\n 'chunk_count': 1,\n 'size': 512,\n }\n\n self.client.force_authenticate(user=self.test_user2_obj)\n response = self.client.put(url, data)\n\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)", "def test_delete_bucketlist_item(self):\r\n email = \"test@test.com\"\r\n _pword = \"test\"\r\n user = User.query.filter_by(email=email).first()\r\n bucketlist = BucketList.query.filter_by(user_id=user.id, name=\"test bucketlist\").first()\r\n item = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id, id=1).first()\r\n self.assertTrue(item)\r\n\r\n response = self.delete_bucketlist_item(email, _pword, bucketlist.id, item.id)\r\n result = json.loads(response.data.decode('utf-8'))\r\n self.assertEqual(response.status, '200 OK')\r\n self.assertEqual(\r\n result['message'],\r\n 'Bucketlist Item with ID {} deleted'.format(item.id)\r\n )\r\n item = BucketListItem.query.filter_by(bucketlist_id=bucketlist.id, id=1).first()\r\n self.assertFalse(item)", "def test_api_update_book_validation_error(self):\n\n\t\t# create book\n\t\tadd_book = {\n\t\t\t'title': 'Hello Books',\n\t\t\t'isbn': '5698745124'\n\t\t}\n\t\tlogin_data = self.login_test_user()\n\t\ttoken = login_data['auth_token']\n\t\tres = self.client.post(\n\t\t\tf'{URL_BOOKS}',\n\t\t\theaders=dict(Authorization=f'Bearer {token}'),\n\t\t\tcontent_type='application/json',\n\t\t\tdata=json.dumps(add_book)\n\t\t)\n\t\tempty_book = {}\n\t\t# update book\n\t\tbook = self.client.put(\n\t\t\tf'{URL_BOOKS}/1',\n\t\t\theaders=dict(Authorization=f'Bearer {token}'),\n\t\t\tcontent_type='application/json',\n\t\t\tdata=json.dumps(\n\t\t\t\tempty_book\n\t\t\t)\n\t\t)\n\n\t\tbook_res = json.loads(book.data.decode())\n\t\tself.assertIn('Nothing was changed', str(book_res))", "def test_put_no_data(self):\n test_data = {}\n response = self.client.put(self.url, json.dumps(test_data), content_type='application/json')\n self.assertEquals(response.status_code, 400)", "def test_update_car_valid_id():\n car_data = {\n \"id\": 1,\n \"make\": \"BMW\",\n \"model\": \"3 Series New\",\n \"year\": 2019,\n \"vin\": \"JH4CU2F60AC794232\",\n }\n response = client.put(\"/1\", data=car_data)\n assert response.status_code == STATUS_OK\n assert response.json() == car_data\n\n # Checking data persistence with get\n response = client.get(\"/1\")\n assert response.json() == car_data", "def test_put_wrong_way_id(self):\n data = {\n 'start_time': '2019-10-29',\n 'end_time': '2019-12-29',\n 'week_day': 6,\n 'time': '23:58:59'\n }\n url = reverse('notification', kwargs={'way_id': 543, 'notification_id': self.notification.id})\n response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 400)", "def test_update_a_todo(self):\n # hit the API endpoint\n response = self.make_a_request(\n kind=\"put\",\n version=\"v1\",\n id=2,\n data=self.valid_data\n )\n self.assertEqual(response.data, self.valid_data)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n # test with invalid data\n response = self.make_a_request(\n kind=\"put\",\n version=\"v1\",\n id=3,\n data=self.invalid_data\n )\n self.assertEqual(\n response.data[\"message\"],\n \"TODO item requires state, due_date and text\"\n )\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)", "def test_shelflistitem_update_err_nonwritable(method, api_settings,\n assemble_custom_shelflist,\n shelflist_solr_env,\n filter_serializer_fields_by_opt,\n derive_updated_resource,\n send_api_data,\n get_shelflist_urls, api_client):\n test_lcode, test_id = '1test', 99999999\n _, _, trecs = assemble_custom_shelflist(test_lcode, [(test_id, {})])\n url = '{}{}'.format(get_shelflist_urls(trecs)[test_lcode], test_id)\n before = api_client.get(url)\n serializer = before.renderer_context['view'].get_serializer()\n profile = shelflist_solr_env.profiles['shelflistitem']\n try_item = derive_updated_resource(before.data, serializer, profile)\n\n if method == 'put':\n req_body = ujson.dumps(try_item)\n elif method == 'patch':\n req_body = jsonpatch.make_patch(before.data, try_item)\n\n unwritable = filter_serializer_fields_by_opt(serializer, 'writable', False)\n resp = send_api_data(api_client, url, req_body, method)\n after = api_client.get(url)\n\n assert resp.status_code == 400\n assert before.data == after.data\n for fname in unwritable:\n msg = '{} is not a writable field'.format(fname)\n assert msg in resp.data['detail']", "def put(self, user, id):\n # parse request data\n if 'name' not in self.request.form:\n return \"Bucketlist not Update\", 202\n\n bucketlist_name = self.request.form['name']\n\n # validate bucketlist\n if not bucketlist_name:\n return \"Name cannot be empty\", 401\n\n # search for the bucketlist_id\n bucketlist = Bucketlist.query.filter_by(\n id=id, created_by=user.email).first()\n\n # return 400 if bucketlist non exixtant or not belongs to this user\n if bucketlist is None:\n return 'Bucketlist not found', 202\n\n # Update bucketlist and save changes\n bucketlist.name = bucketlist_name\n bucketlist.save()\n\n return \"Successfully updated bucketlist\", 201", "def test_api_delete_bucketlist(self):\n\n res_post = self.client().post('/bucketlist', data={'name': \"Don't forget to exercise\"})\n self.assertEqual(res_post.status_code, 201)\n res_post_in_json = json.loads(res_post.data.decode('UTF-8'))\n id = res_post_in_json['id']\n res_delete = self.client().delete(f\"/bucketlist/{id}\")\n self.assertEqual(res_delete.status_code, 200)\n\n # should return 404 after delete the data\n res = self.client().get(f'/bucketlist/{id}')\n self.assertEqual(res.status_code, 404)", "def test_put(self):\n client = RestClient(host=self.host, username='')\n rest_url = 'some/url/'\n \n # Mock good response\n with responses.RequestsMock() as rsps:\n rsps.add(responses.PUT, f'{self.host}/{rest_url}', status=200,\n json={'value':\"good!\"})\n r = client.put(rest_url)", "def test_invalid_token_put(self):\n with self.client:\n id = self.get_id()\n response = self.client.put('api/v1/meals/{}'.format(id),\n data=json.dumps(dict(\n meal_name=\"chips\",\n price=15000\n )),\n content_type='application/json',\n headers=({\"token\": \"12345\"}))\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 401)\n self.assertEqual(data.get('message'), \"Invalid token.Please login\")", "def test_put(self):\n\n url = reverse('file')\n\n data = {\n 'shard_id': self.shard1.id,\n 'link_id': \"b8866161-0b1f-4a8e-acde-07047313ec8f\",\n 'parent_datastore_id': str(self.test_datastore_obj.id),\n 'chunk_count': 1,\n 'size': 512,\n }\n\n self.client.force_authenticate(user=self.test_user_obj)\n response = self.client.put(url, data)\n\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)\n self.assertIn('file_id', response.data)\n self.assertIn('file_transfer_id', response.data)\n self.assertIn('file_transfer_secret_key', response.data)", "def test_update_http_error(self, data_update, requests_mock, capsys):\n requests_mock.put(data_url, status_code=300)\n with pytest.raises(RuntimeError):\n r = operations.update(data_url, data=data_update)\n assert 'HTTP error: 300' in capsys.readouterr().out", "def test_delete_item_incorrect_id(test_client):\n\n response = test_client.delete(GOOD_ITEM_URL)\n\n data = json.loads(response.get_data())\n\n assert response.status_code == 404\n assert data['error'] == app.NOT_FOUND", "def test_put_non_id(self):\n\n data = {\n 'time': '23:38:54'\n }\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id})\n response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 400)", "def test_shelflistitem_update_items(method, api_settings,\n assemble_custom_shelflist,\n shelflist_solr_env,\n filter_serializer_fields_by_opt,\n derive_updated_resource, send_api_data,\n get_shelflist_urls, api_client):\n test_lcode, test_id = '1test', 99999999\n _, _, trecs = assemble_custom_shelflist(test_lcode, [(test_id, {})])\n url = '{}{}'.format(get_shelflist_urls(trecs)[test_lcode], test_id)\n before = api_client.get(url)\n serializer = before.renderer_context['view'].get_serializer()\n writable = filter_serializer_fields_by_opt(serializer, 'writable', True)\n unwritable = filter_serializer_fields_by_opt(serializer, 'writable', False)\n profile = shelflist_solr_env.profiles['shelflistitem']\n try_item = derive_updated_resource(before.data, serializer, profile,\n which_fields=writable)\n\n if method == 'put':\n req_body = ujson.dumps(try_item)\n elif method == 'patch':\n req_body = jsonpatch.make_patch(before.data, try_item)\n \n resp = send_api_data(api_client, url, req_body, method)\n after = api_client.get(url)\n\n assert resp.status_code == 200\n assert resp.data['links']['self']['href'].endswith(url)\n assert resp.data['links']['self']['id'] == test_id\n\n print(before.data)\n print(try_item)\n print(after.data)\n\n for fname in writable:\n assert after.data[fname] == try_item[fname]\n assert after.data[fname] != before.data[fname]\n\n for fname in unwritable:\n assert after.data[fname] == try_item[fname]\n assert after.data[fname] == before.data[fname]", "def test_user_update_procedure_failure(self):\n p1 = models.Procedure.objects.create(\n name='temp',\n overview='bla bla bla'\n )\n p1.speciality.set([self.speciality.pk])\n p1.save()\n\n res = self.client.get(PROCEDURE_URL)\n\n url = get_item_url(res.data[0]['id'])\n new_payload = {\n 'other_details': 'new details'\n }\n\n response = self.client.patch(url, new_payload, format='json')\n\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)", "def test_update_nonexist(self):\n promotion = PromotionFactory()\n promotion.id = '1cak41-nonexist'\n try:\n promotion.update()\n except KeyError:\n self.assertRaises(KeyError)", "def test_invalid_collection_put(testapp):\n missing_required = {\n 'title': \"Testing\",\n 'type': \"object\"\n }\n testapp.post_json('/embedding-tests', missing_required, status=422)\n\n nonexistent_field = {\n 'title': \"Testing\",\n 'type': \"string\",\n 'descriptionn': \"This is a descriptionn\", # typo\n }\n testapp.post_json('/embedding-tests', nonexistent_field, status=422)\n\n valid = {\n 'title': \"Testing\",\n 'type': \"object\",\n 'description': \"This is a valid object\",\n }\n invalid_update = {\n 'descriptionn': \"This is an invalid update\",\n }\n item_url = testapp.post_json('/embedding-tests', valid, status=201).location\n testapp.put_json(item_url, invalid_update, status=422)", "def test_put_wrong_notification_id(self):\n\n data = {\n 'time': '23:38:54'\n }\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': 6778})\n response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 400)", "def test_update_task(self):\n rv = TEST_CLIENT.patch(\n \"/tasks/foo\",\n json={\n \"name\": \"foo 2\",\n },\n )\n result = rv.json()\n expected = {\n \"message\": \"The specified task does not exist\",\n \"code\": \"TaskNotFound\",\n }\n self.assertDictEqual(expected, result)\n self.assertEqual(rv.status_code, 404)", "def taco_test_post_param_update(self):\n body = '{ \"id\": 400, \"name\": \"item4\", \"content\": \"after test update\" }'\n env = self.get_env('POST', '/item/4', body=body)\n result = webapi_start(env, lambda status, response_headers: self.assertEqual(status, '204'))\n # webapi_start(env, lambda status, response_headers: self.assertEqual(status, '204'))\n debug.log('result', result)", "def test_update_risk_profile_using_put(self):\n pass", "def test_update_not_my_product(self):\n post_data = {\n \"category\": {\n \"name\": \"general\",\n \"index\": 0\n },\n \"name\": \"Producto 2 modified\",\n \"description\": \"Descripcion de producto 2 modified\",\n \"selling\": True,\n \"price\": 20,\n }\n\n response = self.client.put('/api/1.0/products/2/', data=post_data, format='json')\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)", "def test_put_review_detail_fail(self):\n client = Client()\n review1_id = Review.objects.get(content='TEST_CONTENT').id\n review2_id = Review.objects.get(content='TEST_CONTENT2').id\n review3_id = Review.objects.get(content='TEST_CONTENT3').id\n review4_id = Review.objects.get(content='TEST_CONTENT4').id\n no_review_id = review1_id + review2_id + review3_id + review4_id\n response = client.put('/api/review/'+str(review1_id)+'/', {\n 'content': 'TEST_PUT_CONTENT',\n 'restaurant_name': 'TEST_REST',\n 'menu_name': 'TEST_MENU',\n 'rating': 3\n }, 'application/json')\n self.assertEqual(response.status_code, 401)\n client.login(username='TEST_USER_2',\n email='TEST_EMAIL_2', password='TEST_PW_2')\n response = client.put('/api/review/'+str(review1_id)+'/', {\n 'content': 'TEST_PUT_CONTENT',\n 'restaurant_name': 'TEST_REST',\n 'menu_name': 'TEST_MENU',\n 'rating': 3\n }, 'application/json')\n self.assertEqual(response.status_code, 403)\n client.login(username='TEST_USER_1',\n email='TEST_EMAIL_1', password='TEST_PW_1')\n response = client.put('/api/review/'+str(review1_id)+'/', {\n 'content': 'TEST_PUT_CONTENT',\n 'restaurant_name': 'TEST_REST',\n 'menu_name': 'TEST_MENU',\n 'rating': 3\n })\n self.assertEqual(response.status_code, 400)\n response = client.put('/api/review/'+str(no_review_id)+'/', {\n 'content': 'TEST_PUT_CONTENT',\n 'restaurant_name': 'TEST_REST',\n 'menu_name': 'TEST_MENU',\n 'rating': 3\n }, 'application/json')\n self.assertEqual(response.status_code, 404)\n response = client.put('/api/review/'+str(review1_id)+'/', json.dumps({\n 'content': 'TEST_PUT_CONTENT',\n 'restaurant_name': 'TEST_REST_N',\n 'menu_name': 'TEST_MENU_N',\n 'rating': 3,\n 'category': 'NEW_TEST_CATEGORY'\n }), 'application/json')\n self.assertEqual(response.status_code, 400)", "def test_put_book(session, client, book1_dict):\n json_data = json.dumps(book1_dict)\n post_response = client.post(\"/books\", data=json_data, headers={\"Content-Type\": \"application/json\"})\n assert 201 == post_response.status_code\n\n change_dict = {\n \"title\": \"Another Title2\",\n \"publish_date\": \"2010-01-10\",\n \"subject\": \"Subject\",\n \"genre\": \"Scifi Horror\"\n }\n\n json_data = json.dumps(change_dict)\n\n expected_payload = {\n \"book_id\": 1,\n \"title\": \"DifferentTitle2\",\n \"publish_date\": \"1900-01-01\",\n \"subject\": \"Reference\",\n \"genre\": \"Scifi Horror\",\n \"notes\": [],\n \"authors\": [\n {\n \"author_id\": 1,\n \"first_name\": \"Herman\",\n \"last_name\": \"Melville\",\n \"middle_name\": \"M\"\n }\n ]\n }\n\n \"\"\"put with non-existent resource\"\"\"\n get_response = client.put(\"/books/8\", data=json_data, headers={\"Content-Type\": \"application/json\"})\n assert 404 == get_response.status_code\n\n \"\"\"put with invalid id\"\"\"\n get_response = client.put(\"/books/L\", data=json_data, headers={\"Content-Type\": \"application/json\"})\n assert 400 == get_response.status_code\n\n \"\"\"put with no content\"\"\"\n get_response = client.put(\"books/1\", headers={\"Content-Type\": \"application/json\"})\n assert 400 == get_response.status_code\n # This works with postman but not here.\n # \"\"\"put with correct info.\"\"\"\n # json_data = json.dumps(change_dict)\n # get_response = client.put(\"/books/1\", data=json_data, headers={\"Content-Type\": \"application/json\"})\n # print('here!!!!!!')\n # print(get_response)\n # print(get_response.status_code)\n # print(get_response.get_json)\n # assert 200 == get_response.status_code", "def test_update_book(self):\n\n delete_books()\n\n book = create_book(\"title one\")[\"book\"]\n\n with test_client.put(\n \"/book/{}/\".format(book[\"id\"]),\n data={\n \"title\": \"title one updated\"\n }\n ) as response:\n\n self.assertEqual(\n json.loads(\n response.get_data(as_text=True)\n ),\n {\n \"status\": \"success\",\n \"book\": {\n **book,\n \"title\": \"title one updated\"\n }\n }\n )\n\n self.assertEqual(\n read_book(book[\"id\"]),\n {\n \"status\": \"success\",\n \"book\": {\n **book,\n \"title\": \"title one updated\"\n }\n }\n )\n\n \"\"\"\n clear the table, create several books, update them and read them\n \"\"\"\n\n delete_books()\n\n book_one = create_book(\"title one\")[\"book\"]\n book_two = create_book(\"title two\")[\"book\"]\n\n with test_client.put(\n \"/book/{}/\".format(book_one[\"id\"]),\n data={\n \"title\": \"title one updated\"\n }\n ) as response:\n\n self.assertEqual(\n json.loads(\n response.get_data(as_text=True)\n ),\n {\n \"status\": \"success\",\n \"book\": {\n **book_one,\n \"title\": \"title one updated\"\n }\n }\n )\n\n self.assertEqual(\n read_book(book_one[\"id\"]),\n {\n \"status\": \"success\",\n \"book\": {\n **book_one,\n \"title\": \"title one updated\"\n }\n }\n )\n\n with test_client.put(\n \"/book/{}/\".format(book_two[\"id\"]),\n data={\n \"title\": \"title two updated\"\n }\n ) as response:\n\n self.assertEqual(\n json.loads(\n response.get_data(as_text=True)\n ),\n {\n \"status\": \"success\",\n \"book\": {\n **book_two,\n \"title\": \"title two updated\"\n }\n }\n )\n\n self.assertEqual(\n read_book(book_two[\"id\"]),\n {\n \"status\": \"success\",\n \"book\": {\n **book_two,\n \"title\": \"title two updated\"\n }\n }\n )", "def test_update_telegram_id_if_validation_fail(self):\n test_data = {'telegram_id': 'test_token'}\n url = reverse('telegram_id')\n response = self.client.put(url, json.dumps(test_data), content_type='application/json')\n self.assertEqual(response.status_code, 400)", "def test_primary_key_update_failure(self):\r\n with self.assertRaises(ValidationError):\r\n TestQueryUpdateModel.objects(partition=uuid4(), cluster=3).update(cluster=5000)", "def test_task_update(self):\r\n admin = UserFactory.create()\r\n user = UserFactory.create()\r\n non_owner = UserFactory.create()\r\n app = AppFactory.create(owner=user)\r\n task = TaskFactory.create(app=app)\r\n root_task = TaskFactory.create(app=app)\r\n data = {'state': '1'}\r\n datajson = json.dumps(data)\r\n root_data = {'state': '4'}\r\n root_datajson = json.dumps(root_data)\r\n\r\n ## anonymous\r\n res = self.app.put('/api/task/%s' % task.id, data=data)\r\n assert_equal(res.status, '401 UNAUTHORIZED', res.status)\r\n ### real user but not allowed as not owner!\r\n url = '/api/task/%s?api_key=%s' % (task.id, non_owner.api_key)\r\n res = self.app.put(url, data=datajson)\r\n assert_equal(res.status, '403 FORBIDDEN', res.status)\r\n\r\n ### real user\r\n url = '/api/task/%s?api_key=%s' % (task.id, user.api_key)\r\n res = self.app.put(url, data=datajson)\r\n out = json.loads(res.data)\r\n assert_equal(res.status, '200 OK', res.data)\r\n assert_equal(task.state, data['state'])\r\n assert task.id == out['id'], out\r\n\r\n ### root\r\n res = self.app.put('/api/task/%s?api_key=%s' % (root_task.id, admin.api_key),\r\n data=root_datajson)\r\n assert_equal(res.status, '200 OK', res.data)\r\n assert_equal(root_task.state, root_data['state'])\r\n\r\n # PUT with not JSON data\r\n res = self.app.put(url, data=data)\r\n err = json.loads(res.data)\r\n assert res.status_code == 415, err\r\n assert err['status'] == 'failed', err\r\n assert err['target'] == 'task', err\r\n assert err['action'] == 'PUT', err\r\n assert err['exception_cls'] == 'ValueError', err\r\n\r\n # PUT with not allowed args\r\n res = self.app.put(url + \"&foo=bar\", data=json.dumps(data))\r\n err = json.loads(res.data)\r\n assert res.status_code == 415, err\r\n assert err['status'] == 'failed', err\r\n assert err['target'] == 'task', err\r\n assert err['action'] == 'PUT', err\r\n assert err['exception_cls'] == 'AttributeError', err\r\n\r\n # PUT with fake data\r\n data['wrongfield'] = 13\r\n res = self.app.put(url, data=json.dumps(data))\r\n err = json.loads(res.data)\r\n assert res.status_code == 415, err\r\n assert err['status'] == 'failed', err\r\n assert err['target'] == 'task', err\r\n assert err['action'] == 'PUT', err\r\n assert err['exception_cls'] == 'TypeError', err", "def test_modify_userid_404(self):\n resp = self.app.put('/users/thisuserdoesntexist',\n data=json.dumps(self.test_user1_data))\n assert resp.status_code == 404", "def test_update(self, mock_put):\n self.policies.update(id=333114, policy_update=self.policy_show_response)\n\n mock_put.assert_called_once_with(\n url='https://api.newrelic.com/v2/alert_policies/333114.json',\n headers=self.policies.headers,\n data=json.dumps(self.policy_show_response)\n )", "def test_update_success(self, mock_put):\n self.policies.update(\n id=self.policy_single_response['policy']['id'],\n name=self.policy_single_response['policy']['name'],\n incident_preference=self.policy_single_response['policy']['incident_preference']\n )\n\n mock_put.assert_called_once_with(\n url='https://api.newrelic.com/v2/alerts_policies/{0}.json'.format(\n self.policy_single_response['policy']['id']\n ),\n headers=self.policies.headers,\n data=json.dumps({\n \"policy\": {\n \"name\": self.policy_single_response['policy']['name'],\n \"incident_preference\": self.policy_single_response['policy']['incident_preference']\n }\n })\n )", "def test_update_nonexistant_product(self):\n resp = self.admin_register()\n reply = self.admin_login()\n token = reply['token']\n product_update = dict(\n prod_name='NY_jeans',\n category='denims',\n stock=50,\n price=180\n )\n resp = self.client.put(\n '/api/v1/products/1',\n content_type='application/json',\n data=json.dumps(product_update),\n headers={'Authorization': 'Bearer {}'.format(token)}\n )\n reply = json.loads(resp.data.decode())\n \n self.assertEqual(reply['message'], \"This product doesn't exists in the Inventory!\")\n self.assertEqual(resp.status_code, 400)", "def test_primary_key_update_failure(self):\n with self.assertRaises(ValidationError):\n TestQueryUpdateModel.objects(partition=uuid4(), cluster=3).update(cluster=5000)" ]
[ "0.8020481", "0.7893578", "0.78514856", "0.7821233", "0.769216", "0.74274623", "0.74184465", "0.74172276", "0.7320262", "0.7253771", "0.7236808", "0.7163079", "0.7149734", "0.7134282", "0.7058692", "0.701769", "0.69915277", "0.6915926", "0.691373", "0.6895343", "0.68850976", "0.6878846", "0.6847633", "0.68400025", "0.679686", "0.6796606", "0.6767939", "0.6750086", "0.67410725", "0.6735798", "0.6726284", "0.67159224", "0.6712318", "0.6665834", "0.66397315", "0.66375506", "0.66220015", "0.6621002", "0.6619569", "0.6618104", "0.6614102", "0.6574128", "0.6573067", "0.6572155", "0.6554987", "0.6553855", "0.65369177", "0.65291697", "0.6500255", "0.64988995", "0.6497469", "0.6493029", "0.6482661", "0.64593667", "0.64476997", "0.6444319", "0.6438113", "0.64214844", "0.6407597", "0.64058477", "0.6405789", "0.6401992", "0.6391012", "0.6387442", "0.6385948", "0.6384876", "0.63832784", "0.6381246", "0.6377051", "0.6366788", "0.63634485", "0.6359857", "0.6356137", "0.6355098", "0.63533336", "0.6352457", "0.6346372", "0.63428724", "0.63329244", "0.6332286", "0.6323161", "0.631804", "0.63130987", "0.6307447", "0.63053447", "0.6291537", "0.6286806", "0.6283423", "0.6281969", "0.62804896", "0.62740576", "0.62737274", "0.627189", "0.62694716", "0.62661725", "0.6254534", "0.6247767", "0.6246787", "0.62434995", "0.6242375" ]
0.85060626
0