query
stringlengths
7
9.5k
document
stringlengths
10
1.07M
negatives
listlengths
19
19
metadata
dict
A hash that maps from the original card id to the duplicated card id
def duplicated_cards Cache.hash_get_all("#{@batch_id}_duplicated_cards").presence || {} end
[ "def register_duplicated_card(original_card_id:, to_card_id:)\n Cache.hash_set(\"#{@batch_id}_duplicated_cards\", original_card_id, to_card_id)\n\n remapper = CardDuplicatorMapper::RemapLinkedCards.new(\n batch_id: @batch_id,\n )\n # remap the card that was just duplicated\n remapper.remap_cards(original_card_id, to_card_id)\n\n # if all have completed, then we run through them all, basically a last pass\n # to ensure we match any links that were created later than the duplicates, etc.\n # TODO: How to ensure all cards were mapped or if duplication failed?\n return unless all_cards_mapped?\n\n remapper.call\n end", "def hash_card_number\n if card_number.present?\n self.card_hash = (Digest::SHA2.new(512) << card_number.to_s).to_s\n end\n end", "def card_hash\n card_hash_indv_keys = [:question, :answer, :category]\n card_hash_array = self.string_split_card.map do |card_array|\n card_hash_indv_keys.zip(card_array).to_h\n end\n card_hash_array\n end", "def hash\n # TODO: Produce a hash (using default hash method) of the credit card's\n # serialized contents.\n # Credit cards with identical information should produce the same hash.\n self.to_s.hash\nend", "def hash\n # TODO: Produce a hash (using default hash method) of the credit card's\n # serialized contents.\n # Credit cards with identical information should produce the same hash.\n to_s.hash\n end", "def hash\n # Memoizing such a simple hash value seems silly, however the\n # profiler showed the Card#hash method as having 22% of the runtime. My\n # memoizing the hash value that was reduced to 12%.\n return @hash unless @hash.nil?\n @hash = @value.hash ^ @suit.hash\n end", "def hash\n # TODO: implement this method\n # - Produce a hash (using default hash method) of the credit card's\n # serialized contents.\n # - Credit cards with identical information should produce the same hash\n to_s.hash\n end", "def hash\n [rank, suit].hash\n end", "def hash\n @rank.hash ^ @suit.hash\n end", "def hash_dup\n save\n end", "def card_id\n card.id\n end", "def rehash() end", "def hash_key\n frame_id\n end", "def hash\n\t\t[@id].hash\n\tend", "def dynamic_card_map\n map = {}\n Dir.glob(\"data/hsreplay-cards/*.html\").each do |file|\n card_data = HsreplayCardHtmlParser.new(open(file, 'r')).card_data\n map[card_data[\"card_id\"]] = card_data[\"dbf_id\"]\n end\n map\n end", "def catelog obj\n @idnum_hash[obj.id] = obj\n end", "def compare(card)\n same = false\n if @number != card.number && @color != card.color && @shape != card.shape && @opacity != card.opacity\n same = true\n puts 'Unique'\n else\n puts 'Not Unique'\n end\n same\n end", "def set_id_to_cache_key_map\n @ids.each do |id|\n @id_to_cache_key_map[id] = {\n kit: get_kit_cache_key(id),\n saas: get_saas_cache_key(id)\n }\n end\n end", "def hash_id\n Hasher.encode(self.id)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
converts hand from array of card to array of sorted scores of said cards
def cards_by_score scores = {} @cards.each do |card| scores[card] = card.score end Hash[scores.sort_by { |card, score| -score }] end
[ "def hand_score\n cards.map {|a| a.value}.sort {|a,b| a <=> b}.last\n end", "def sort_by_suit\n \tnew_hand = []\n while @cards.size > 0\n \tpos = 0 # position of minimal card\n \tc = @cards[0] # minimal card\n \t@cards.each_with_index do |card, index|\n \t\tc1 = card\n # puts \"c: #{c.inspect} and c1: #{c1.inspect}\"\n # puts \" and c1.suit: #{c1.suit}\"\n \t\tif (c1.suit < c.suit || (c1.suit == c.suit && c1.value < c.value) )\n \t\t\tpos = index\n \t\t\tc = c1\n \t end\n \tend\n\n \tremove_card_at(pos)\n \tnew_hand << c\n end\n\n @cards = new_hand\n end", "def normalize\n # eliminate irrelevant suits\n size = @cards.size\n minsize = size - 2\n @cards.collect! do |card|\n if @suits[card.suit] >= minsize\n card\n else\n @suits[card.suit]-=1\n @suits[4]+=1\n card.to_ex\n end\n end\n # sort the hand\n @cards.sort!{|a, b| a.hash <=> b.hash}\n # c = Array.new(7){|i| @cards[i] || 0}\n # if c[0].hash<c[4].hash then t=c[0]; c[0]=c[4];c[4]=t end\n # if c[1].hash<c[5].hash then t=c[1]; c[1]=c[5];c[5]=t end\n # if c[2].hash<c[6].hash then t=c[2]; c[2]=c[6];c[6]=t end\n # if c[0].hash<c[2].hash then t=c[0]; c[0]=c[2];c[2]=t end\n # if c[1].hash<c[3].hash then t=c[1]; c[1]=c[3];c[3]=t end\n # if c[4].hash<c[6].hash then t=c[4]; c[4]=c[6];c[6]=t end\n # if c[2].hash<c[4].hash then t=c[2]; c[2]=c[4];c[4]=t end\n # if c[3].hash<c[5].hash then t=c[3]; c[3]=c[5];c[5]=t end\n # if c[0].hash<c[1].hash then t=c[0]; c[0]=c[1];c[1]=t end\n # if c[2].hash<c[3].hash then t=c[2]; c[2]=c[3];c[3]=t end\n # if c[4].hash<c[5].hash then t=c[4]; c[4]=c[5];c[5]=t end\n # if c[1].hash<c[4].hash then t=c[1]; c[1]=c[4];c[4]=t end\n # if c[3].hash<c[6].hash then t=c[3]; c[3]=c[6];c[6]=t end\n # if c[1].hash<c[2].hash then t=c[1]; c[1]=c[2];c[2]=t end\n # if c[3].hash<c[4].hash then t=c[3]; c[3]=c[4];c[4]=t end\n # if c[5].hash<c[6].hash then t=c[5]; c[5]=c[6];c[6]=t end\n end", "def sort_by_value\n \tnew_hand = []\n \twhile @cards.size > 0\n \t\tpos = 0\n \t\tc = @cards[0]\n \t\t@cards.each_with_index do |card, index|\n \t\t\tc1 = card\n \t\t\tif (c1.value < c.value || (c1.suit == c.suit && c1.suit < c.suit) )\n pos = index\n c = c1\n end\n end\n\n remove_card_at(pos)\n new_hand << c\n end\n @cards = new_hand\n\n end", "def rank_cards_played cards\n return cards unless cards.is_a?(Array)\n cards = cards.sort{|a,b| CARD_VALUES[b.try(:face_value)] <=> CARD_VALUES[a.try(:face_value)]}\n end", "def sort_by_suit(cards)\n @cards = cards\n @multi_suit_array = Array.new # An Array that will contains all suits arrays\n @hearts_suit = Array.new\n @clubs_suit = Array.new\n @spikes_suit = Array.new\n @diamonds_suit = Array.new\n @cards.each do |i| # Sort cards by suit\n if i[0].chr == 'C'\n @clubs_suit.push(i)\n elsif i[0].chr == 'D'\n @diamonds_suit.push(i)\n elsif i[0].chr == 'H'\n @hearts_suit.push(i)\n elsif i[0].chr == 'S'\n @spikes_suit.push(i)\n end\n end\n @multi_suit_array.push(@clubs_suit)\n @multi_suit_array.push(@diamonds_suit)\n @multi_suit_array.push(@hearts_suit)\n @multi_suit_array.push(@spikes_suit)\n @multi_suit_array.each do |i|\n if i.size >= 5\n return i\n end\n return []\n end\nend", "def sort_cards_by_most(array_of_ranks)\n array_of_ranks.sort_by {|rank| -@hand[rank].length}\n end", "def sorted_cards\n sorted_cards = @cards.sort_by { |card| card.rank }\n sorted_cards.group_by { |card| card.suit }\n end", "def sort!\n @cards.sort_by! { |card| [-card_rank_to_count[card.rank], -card.to_i] }\n end", "def sort\n @cards.sort\n end", "def calculate_score\n score = 0\n \n # @hand = [\"2H\", \"10C\", \"AS\"]\n @hand.each do |card|\n value = card.chop\n\n if value == 'J' || value == 'Q' || value == 'K'\n score += 10\n elsif value == 'A'\n score += 1\n else\n score += value.to_i\n end\n end\n\n score\n # card_value = @hand[suit].chop\n # convert_cards(card_value)\n end", "def order_by_value\n @hand = @hand.sort_by{|card| card.point }\n end", "def sort_scores(unsorted_scores, highest_possible_score)\n array = []\n array << highest_possible_score\n unsorted_scores.sort.reverse.each { |score| array << score }\nend", "def sorted_card_ranks\n sorted_ranks = cards.collect(&:rank).sort.reverse\n\n case type\n when FLUSH, HIGH_CARD\n sorted_ranks\n when THREE_OF_A_KIND\n ranks_with_triple_at_front(sorted_ranks)\n when ONE_PAIR\n ranks_with_pair_at_front(sorted_ranks)\n end\n end", "def compare_hands(score,curr_hand,new_hand)\r\n if curr_hand.first > score then return [score, new_hand] # new_hand has a better base score\r\n elsif curr_hand.first == score # curr_hand and new_hand have same base score\r\n # Compare the overall \"highness\" of the cards of two hands\r\n # [2♥,2♦,4♦,4♠,13♣] : [8♣,2♥,2♦,4♦,4♠] => [4,4,16,16,169] : [64,4,4,16,16] => 209 : 104 => 1\r\n kick_comp = new_hand.collect { |card| card.rank**2 }.sum <=> curr_hand.last.collect { |card| card.rank**2 }.sum\r\n return [score, new_hand] if kick_comp == 1 # new_hand has better kickers\r\n end\r\n curr_hand # If new_hand score wasn't better, return curr_hand\r\n end", "def get_suit_and_ranks(hand)\n\t\t# change 10 to T\n\t\thand1 = hand.gsub('10', 'T')\n\t\thand2 = hand1.gsub(' ','')\n\t\tsuits = []\n\t\thand2.each_char.with_index { |e, idx| suits << e unless idx.even? }\t\t#=> [\"S\", \"H\", \"S\", \"D\", \"H\"]\n\t\t\n\t\tranks = []\n\t\thand2.each_char.with_index { |e, idx| ranks << e unless idx.odd? }\t\t#=> [\"2\", \"4\", \"6\", \"4\", \"J\"]\n\t\tranks2 = []\n\t\tranks.each { |e|\tranks2 << '--23456789TJQKA'.index(e) }\t\t# => [2, 4, 6, 4, 11]\n\t\t\n\t\trsorted_ranks = ranks2.sort.reverse\t\t# => [11,6,4,2,2]\n\t\t\n\t\tranks_hsh = {}\t\t# => {11=>1, 6=>1, 4=>1, 2=>2}\n\t\trsorted_ranks.each {|e| ranks_hsh.key?(e) ? ranks_hsh[e] += 1 : ranks_hsh[e] = 1 }\n\t\treturn suits, ranks_hsh\n\tend", "def tie_breaker_cards\n matched = self.matched_cards\n sorted = @cards.sort.reverse\n # sorted so the card with highest value is first \n if five_of_a_kind? \n # All cards break the tie\n return sorted \n elsif flush?\n # All cards break the tie\n return sorted \n elsif four_of_a_kind?\n four = matched.find{ |rank, cards| cards.size == 4}\n # quads break the tie first, then the other card \n return four + [(sorted - four).first] \n elsif full_house?\n three = matched.find{ |rank, cards| cards.size == 3}\n two = matched.find{ |rank, cards| cards.size == 2}\n return three + two\n elsif straight?\n # Special case for ace, 2, 3, 4 ,5 straight, which sorts as\n # 2,3,4,5,A \n if sorted.first.rank == Rank::Ace && sorted.last.rank == Rank::Two \n ace = sorted.pop \n sorted.unshift(ace) # put ace at the back\n return [ sorted.first ] # should be the 5 now \n else\n return [ sorted.first ] \n end\n elsif three_of_a_kind?\n three = matched.find{ |rank, cards| cards.size == 3} \n return three + (sorted - three).first(2)\n elsif two_pair?\n pairs = [] \n matched.each{ |rank, cards| pairs << cards if cards.size == 2 } \n two_pair = pairs[0] + pairs[1]\n two_pair + [(sorted - two_pair).first]\n elsif pair?\n two = matched.find{ |rank, cards| cards.size == 2} \n two + (sorted - two).first(3)\n else\n sorted.first(5)\n end\n end", "def map_cards(cards)\n cards.map(&:chars).map do |(rank, suit)|\n [rank.to_i.zero? ? TEXT_VALUES[rank] : rank.to_i, suit]\n end.sort.reverse!\n end", "def best_hand\n @hands.sort_by! { |hand| hand.type }\n # p \"@hands ==\", @hands\n best = @hands.select { |hand| hand.type == @hands[-1].type }\n # p \"best ==\", best\n if best.size > 1 # must chk keycards\n best.sort_by! { |hand| hand.keycards.reverse }\n best.select! { |hand| hand.keycards == best[-1].keycards }\n # p \"best ==\", best\n if best.size > 1 # must chk sidecards\n best.sort_by! { |hand| hand.sidecards }\n best.select! { |hand| hand.sidecards == best[-1].sidecards }\n # if best.size > 1 # a tie\n # do nothing\n # end\n end\n end\n best.map { |hand| to_rs(hand.cards) }\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
check if only 5 cards in hand already
def hand? @cards_by_score.length == 5 end
[ "def high_card?\n cards.uniq(&:value).size == 5 && !flush? && !straight?\n end", "def flush()\n suits = []\n self.hand.each {|card| suits << card.suit }\n \n return suits.any? { |ele| suits.count(ele) == 5}\n end", "def complete?\n @cards.length == 5\n end", "def six_cards?(player_check)\n if (player_check.hand.length == 6) && (player_check.hand_total < 21)\n return true\n else\n return false\n end\n end", "def has_lost?\n @deck.cards.length < 4\n end", "def flush?\n per_suit = matched_suits\n for suit, count in per_suit do\n return true if count >= 5\n end \n return false \n end", "def deal5cards (n)", "def checkSize()\n\t\t#Returns true if your hand has at least 4 cards.\n\t\tif @hand.length >= 4\n\t\t\treturn true\n\t\telse\n\t\t\treturn false\n\t\tend\n\tend", "def multiple_cards(x, value=true)\n queue = (value ? @hand_values.deep_dup : @hand_suits.deep_dup)\n queue.each do |card|\n return card if queue.count(card)>=x\n end\n false\n end", "def out_of_cards?\n @pile.empty?\n end", "def run_of_five?\n index_adjusted = []\n card_number_values.each_with_index do |card_value, index|\n index_adjusted << (card_value - index)\n end\n\n value_counts = array_counts(index_adjusted).values\n value_counts.include?(5)\n end", "def is_blackjack()\n hand_value() == 21 && @cards.length == 2\n end", "def one_pair?\n cards.uniq(&:value).size == 4\n end", "def blackjack(hand)\n value(hand) == 21 && hand.length == 2\n end", "def check_one_suit(cards)\n cards.map(&:suit).uniq.size == 1\n end", "def flush?(cards)\r\n # Create a copy of the cards sorted by suit to optimize the flush check\r\n cards_copy = cards.sort_by {|card| card.suit}\r\n \r\n # If we don't even have five cards, we can prematurely return false\r\n return false if cards.size < 5\r\n \r\n # We need to take consecutive \"slices\" of the array to see if a set of 5 cards all with the same suit exists\r\n cards_copy.each_cons(5) do |slice|\r\n # Map the card slice to a new array with a list of the slice's suits\r\n suits = slice.map {|card| card.suit}\r\n \r\n # If only one suit was found, a flush was found -- return the slice containing the flush\r\n return slice if suits.uniq.length == 1\r\n end\r\n \r\n # No flush was found. Return false\r\n return false\r\n end", "def shouldHit\n # If 17 or above, lock the hand so it cannot receive any more cards\n \tif @hands[0].checkHand > 16 or @hands[0].checkHand < 0\n \t @hands[0].lock\n \tend\n \t@hands[0].canGetCards\n end", "def full_house(hand)\n if same_card(hand, 2) > 0 and same_card(hand, 3) > 0\n return 6\n end\n return 0\n end", "def total_cards_ok\n \ttotal = (@player_a.count + @player_b.count + @limbo_stack.count)\n \treturn total == 52\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
returns an array of card with given number of multiples
def get_multiples(number) cards = [] @cards_by_score.each_key do |card| if @card_scores.count(card.score) == number cards << card end end cards end
[ "def get_cards\n a = Array.new\n (0..31).each do |i|\n a << Card.new(@cards.slice(i*2,2))\n end\n a\n end", "def initialize_cards\n cards = []\n 4.times { cards += (2..14).to_a }\n cards.sample(52)\n end", "def repetitions(n)\n @hand.group_by{ |card| card.point }.select { |k, v| v.count == n }\n end", "def populate\n ((1..8).to_a * 2).shuffle\n .map {|num| Card.new(num)}\n end", "def build_deck\n 0.upto(51).map{ |i| Card.new(i)}\n end", "def set(n)\n card_values = cards.map(&:value)\n grouped = card_values.group_by(&:itself).values\n grouped.select { |pair| pair.size == n }\n end", "def create_multipliers(isbn)\n multipliers = [] # initialize an empty array to hold multipliers\n # if the number is isbn10 create an array of integers (1 - 9) to multiply each isbn digit\n # otherwise it's isbn13 so create a 12-element array of alternating 1s and 3s\n output_raw_number(isbn).length == 10 ? (multipliers = (1..9).to_a) : 6.times { multipliers.push(1); multipliers.push(3) }\n return multipliers\nend", "def combos(cards)\n cards.to_a.combination(3).to_a\n end", "def deal5cards (n)", "def create_multipliers(isbn)\n multipliers = [] # initialize an empty array to hold multipliers\n # if the number is isbn10 create an array of integers (1 - 9) to multiply each isbn digit\n # otherwise it's isbn13 so create a 12-element array of alternating 1s and 3s\n output_raw_number(isbn).length == 10 ? (multipliers = (1..9).to_a) : 6.times { multipliers.push(1, 3) }\n return multipliers\nend", "def split_cards_by_n_kind(hand, n)\n hand.partition { |card| hand.count(card) == n }\n end", "def three_five_multiples(num)\n multiples = []\n 3.upto(num - 1) do |n|\n multiples << n if n % 3 == 0 || n % 5 == 0\n end\n multiples.reduce(0, :+)\nend", "def select_every_n arr, n=1\n ans = []\n arr.each_with_index do |item, i|\n ans.push item if i%n == 0\n end\n ans\nend", "def multiples_of_3_and_5\n numbers = 1...1000\n divisible = []\n numbers.each do |num|\n if num % 3 == 0 || num % 5 == 0\n divisible << num\n end\n end\n divisible.inject { |sum, num| sum + num }\nend", "def multiples_of_five(number_array)\n array1 = []\n array2 = []\n final_array = [array1, array2]\n number_array.each do |number|\n if divide_by_five(number) == true\n array1 << number\n else\n array2 << number\n end\n end\n return final_array\n end", "def multiple_cards(x, value=true)\n queue = (value ? @hand_values.deep_dup : @hand_suits.deep_dup)\n queue.each do |card|\n return card if queue.count(card)>=x\n end\n false\n end", "def multisum(num)\n array_of_5s = []\n (1..num).each { |n| array_of_5s.push(n) if n % 5 == 0 || n % 3 == 0 }\n array_of_5s.inject(:+)\n\nend", "def multiples num1, num2, divisor\n x = []\n (num1...num2).each do |i|\n if is_divisible i, divisor\n x << i\n end\n end\n x\n end", "def generate_deck(cards_available)\r\n\tfor color in 0...3\r\n\t\tfor symbol in 0...3\r\n\t\t\tfor shading in 0...3\r\n\t\t\t\tfor amount in 1...4\r\n\t\t\t\t\tcards_available.push(Card.new color, symbol, shading, amount)\r\n\t\t\t\tend\r\n\t\t\tend\r\n\t\tend\r\n\tend\r\nend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Used in development mode for onthefly generation of public/netzke/ext.[js|css]
def ext respond_to do |format| format.js { render :text => Netzke::Core::DynamicAssets.ext_js } format.css { render :text => Netzke::Core::DynamicAssets.ext_css } end end
[ "def initial_dynamic_javascript\n res = []\n # res << %(Ext.Ajax.extraParams = {authenticity_token: '#{form_authenticity_token}'}; // Rails' forgery protection)\n res << %{Ext.ns('Netzke');}\n res << %{Ext.ns('Netzke.core');}\n res << %{Netzke.RelativeUrlRoot = '#{ActionController::Base.config.relative_url_root}';}\n res << %{Netzke.RelativeExtUrl = '#{ActionController::Base.config.relative_url_root}/extjs';}\n\n res << %{Netzke.core.directMaxRetries = '#{Netzke::Core.js_direct_max_retries}';}\n\n res.join(\"\\n\")\n end", "def netzke_init(params = {})\n Netzke::Core.platform = params[:platform] || :ext\n theme = params[:theme] || params[:ext_theme] || :default\n raw([netzke_css_include(theme), netzke_css, netzke_js_include, netzke_js].join(\"\\n\"))\n end", "def frontend_scripts\n\n @full_frontend_scripts ||= get_full_frontend_scripts\n\n end", "def initial_dynamic_javascript\n res = []\n res << %(Ext.Ajax.extraParams = {authenticity_token: '#{form_authenticity_token}'}; // Rails' forgery protection)\n res << %{Ext.ns('Netzke');}\n res << %{Netzke.RelativeUrlRoot = '#{ActionController::Base.config.relative_url_root}';}\n res << %{Netzke.RelativeExtUrl = '#{ActionController::Base.config.relative_url_root}/extjs';}\n res.join(\"\\n\")\n end", "def build_dev\n timer_block(\n 'Start [development] build for *.js files',\n 'JS time: ') do\n all_js_into_one_file\n end\n end", "def vite_client_src\n prefix_asset_with_host('@vite/client') if dev_server_running?\n end", "def netzke_js\n res = []\n if Netzke::Core.javascript_on_main_page\n res << content_for(:netzke_js_classes)\n res << \"\\n\"\n end\n res << \"Ext.onReady(function(){\"\n res << content_for(:netzke_on_ready)\n res << \"});\"\n\n javascript_tag res.join(\"\\n\")\n end", "def netzke_js(_params = {})\n res = []\n res << content_for(:netzke_js_classes)\n\n res << 'Ext.onReady(function(){'\n res << content_for(:netzke_on_ready)\n res << '});'\n\n javascript_tag(res.join(\"\\n\"))\n end", "def ext(name, config = {}) #:doc:\n comp = Netzke::ExtComponent.new(name, config)\n content_for :netzke_on_ready, raw(\"#{comp.js_component_render}\")\n raw(comp.js_component_html)\n end", "def vite_client_src\n prefix_vite_asset('@vite/client') if dev_server_running?\n end", "def install_sample_files\n super\n inject_line_before root.join('apps/web/templates/application.html.erb'), '</head>', <<-HTML\n <%= vite_client %>\n <%= vite_javascript 'application' %>\n HTML\n end", "def js\n puts 'Compressing JS files...'\n `java -jar ./_scripts/yuicompressor-2.4.2.jar ../www/_res/js/uncompressed/main.js -o ../www/_res/js/main.js`\n # --nomunge\nend", "def netzke_init(params = {})\n Netzke::Core.platform = params[:platform] || :ext\n theme = params[:theme] || params[:ext_theme]\n\n # Rails' forgery protection\n content_for :netzke_js_classes, %Q(\\n\\nExt.Ajax.extraParams = {authenticity_token: '#{form_authenticity_token}'};)\n\n raw([netzke_css_include(params), netzke_css(params), netzke_js_include(params), netzke_js(params)].join(\"\\n\"))\n end", "def request_debug_assets?; end", "def add_vendor_files\n say_quietly \"Copying files...\"\n\n %w[auth backend frontend].each do |section|\n template \"vendor/assets/javascripts/archangel/#{section}.js\"\n template \"vendor/assets/stylesheets/archangel/#{section}.css\"\n end\n end", "def copy_js_files\n available_js.each do |name|\n filename = [name, :js].compact.join(\".\")\n case name\n when 'Controller'\n template \"js/#{filename}\", File.join(\"app/assets/javascripts/controller\", \"#{plural_table_name.capitalize}.js\") \n when 'Model'\n template \"js/#{filename}\", File.join(\"app/assets/javascripts/model\", \"#{singular_table_name.capitalize}.js\")\n when 'Store'\n template \"js/#{filename}\", File.join(\"app/assets/javascripts/store\", \"#{plural_table_name.capitalize}.js\")\n else\n template \"js/#{filename}\", File.join(\"app/assets/javascripts/view\", singular_table_name, filename)\n end\n end\n end", "def required_js_files(context)\n scripts = ['right']\n config = RightRails::Config\n\n\n if config.include_scripts_automatically?\n # hooking up the 'rails' javascript module if required\n scripts << 'right/rails' if config.include_rails_module?\n\n # adding the modules if needed\n scripts += modules_registry_for(context).collect do |package|\n \"right/#{package}\"\n end\n\n # swapping to the sources in the development mode\n if config.swap_builds_and_sources? && config.dev_env?\n scripts = scripts.collect do |package|\n \"#{package}-src\"\n end\n end\n\n # loading up the locales if available\n if defined?(I18n)\n locale_file = \"#{config.locales_path}/#{I18n.locale.to_s.downcase}\"\n\n if File.exists? \"#{locale_file}.js\"\n scripts << locale_file.slice(config.public_path.size + \"/javascripts/\".size, locale_file.size)\n end\n end\n end\n\n # switching to CDN server if asked\n if !config.dev_env? && config.use_cdn_in_production?\n scripts.map! do |script|\n header = File.read(\"#{config.public_path}/javascripts/#{script}.js\", 100)\n\n if version = header[/\\d+\\.\\d+\\.\\d+/]\n script += \"-#{version}\"\n end\n\n if script.slice(0, 6) == 'right/' # plugins and modules\n script.gsub! 'right/', (\n header.include?('/ui/') ? 'ui/' : 'plugins/'\n )\n script.gsub! 'plugins/', '' if script.include?('/i18n/')\n end\n\n \"#{config.cdn_url}/#{script}.js\"\n end\n end\n\n scripts\n end", "def static_files; end", "def render_deferred_javascript_files\n #write any deffered javascript files\n return '' if @content_for_javascript_files.blank?\n js_code = \"\\n<!-- DEFFERRED Javascripts -->\\n#{@content_for_javascript_files}\"\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Used in development mode for onthefly generation of public/netzke/touch.[js|css]
def touch respond_to do |format| format.js { render :text => Netzke::Core::DynamicAssets.touch_js } format.css { render :text => Netzke::Core::DynamicAssets.touch_css } end end
[ "def build_dev\n timer_block(\n 'Start [development] build for *.js files',\n 'JS time: ') do\n all_js_into_one_file\n end\n end", "def request_debug_assets?; end", "def js\n puts 'Compressing JS files...'\n `java -jar ./_scripts/yuicompressor-2.4.2.jar ../www/_res/js/uncompressed/main.js -o ../www/_res/js/main.js`\n # --nomunge\nend", "def add_vendor_files\n say_quietly \"Copying files...\"\n\n %w[auth backend frontend].each do |section|\n template \"vendor/assets/javascripts/archangel/#{section}.js\"\n template \"vendor/assets/stylesheets/archangel/#{section}.css\"\n end\n end", "def vite_client_src\n prefix_asset_with_host('@vite/client') if dev_server_running?\n end", "def install_sample_files\n super\n inject_line_before root.join('apps/web/templates/application.html.erb'), '</head>', <<-HTML\n <%= vite_client %>\n <%= vite_javascript 'application' %>\n HTML\n end", "def copy_js_files\n available_js.each do |name|\n filename = [name, :js].compact.join(\".\")\n case name\n when 'Controller'\n template \"js/#{filename}\", File.join(\"app/assets/javascripts/controller\", \"#{plural_table_name.capitalize}.js\") \n when 'Model'\n template \"js/#{filename}\", File.join(\"app/assets/javascripts/model\", \"#{singular_table_name.capitalize}.js\")\n when 'Store'\n template \"js/#{filename}\", File.join(\"app/assets/javascripts/store\", \"#{plural_table_name.capitalize}.js\")\n else\n template \"js/#{filename}\", File.join(\"app/assets/javascripts/view\", singular_table_name, filename)\n end\n end\n end", "def recompile_coffeescript\n return unless Rails.env.development?\n \n Dir.glob File.join(Rails.root, \"app\", \"javascript\", \"**\", \"*.coffee\") do |file|\n base = File.dirname(file)\n package = File.basename base\n out_package = File.join(Rails.root, \"public\", \"javascripts\", \"#{package}.js\")\n if !File.exists?(out_package) or File.mtime(out_package) < File.mtime(file)\n manifest = open(File.join(base, \"MANIFEST\")).read.split(\"\\n\").compact\n source = manifest.map do |line_item|\n line_item.strip!\n if line_item.blank?\n nil\n else\n open(File.join(base, \"#{line_item}.coffee\")).read\n end\n end.compact.join(\"\\n\")\n output = CoffeeScript.compile source\n File.open(out_package, \"w\") {|f| f.write output }\n break \n end\n end\n end", "def timeline_min\n @js = \"\"\n @css = Kompress::CSS.new(File.open(\"#{TimelineSetter::ROOT}/public/stylesheets/timeline-setter.css\").read).css\n libs = Dir.glob(\"#{TimelineSetter::ROOT}/public/javascripts/vendor/**\")\n libs.each { |lib| @js << File.open(lib,'r').read }\n @min_html = Kompress::HTML.new(timeline_markup).html\n @js << File.open(\"#{TimelineSetter::ROOT}/public/javascripts/timeline-setter.min.js\", 'r').read\n @timeline = tmpl(\"timeline-min.erb\")\n end", "def generate_public\n copy_file \"swfobject/swfobject.js\", \"public/javascripts/swfobject.js\"\n copy_file \"swfobject/expressInstall.swf\", \"public/expressInstall.swf\"\n template \"swfobject/index_dynamic.html.erb\", \"public/#{file_name}.html\"\n end", "def vite_client_src\n prefix_vite_asset('@vite/client') if dev_server_running?\n end", "def ext\n respond_to do |format|\n format.js {\n render :text => Netzke::Core::DynamicAssets.ext_js\n }\n\n format.css {\n render :text => Netzke::Core::DynamicAssets.ext_css\n }\n end\n end", "def compile_js_files(file, prod=true)\n\t# use development style options for now, even in production\n\tif prod\n\t\toptions = {:output => {:comments => :none }}\n\t\tFile.open(file, \"w\") { |f| f.write(Uglifier.compile(concat_js_files, options)) }\n\telse\n\t\t#options = {:output => {:comments => :all, :beautify => true, :preserve_line => true}}\n\t\tFile.open(file, \"w\") { |f| f.write(concat_js_files) }\n\tend\n\n\tputs \" \\e[32mwrite #{file}\\e[0m\"\nend", "def frontend_scripts\n\n @full_frontend_scripts ||= get_full_frontend_scripts\n\n end", "def copy_javascripts\n if ::Rails.version[0..2].to_f >= 3.1\n #puts \"The javascripts do not need to be installed since Rails 3.1\"\n else\n copy_file \"../../../../../app/assets/javascripts/on_the_spot_code.js\", \"public/javascripts/on_the_spot.js\"\n copy_file \"../../../../../app/assets/javascripts/jquery.jeditable.js\", \"public/javascripts/jquery.jeditable.js\"\n copy_file \"../../../../../app/assets/javascripts/jquery.jeditable.checkbox.js\", \"public/javascripts/jquery.jeditable.checkbox.js\"\n copy_file \"../../../../../app/assets/stylesheets/on_the_spot.css\", \"public/stylesheets/on_the_spot.css\"\n end\n end", "def min_js_file\n build_path.join('scripts.min.js')\n end", "def min\n puts 'minifying js'\n min_file = \"#{ LIBPATH }lib#{ File::SEPARATOR }xui-min.js\"\n doc_file = \"#{ LIBPATH }lib#{ File::SEPARATOR }xui.js\"\n sh \"java -jar #{LIBPATH}/util/yuicompressor-2.3.6.jar --charset UTF-8 -o #{min_file} #{doc_file}\"\nend", "def asset_handler\n tags = String.new\n tags << tag(:link, rel: \"stylesheet\", href: \"/stylesheets/icons.css\")\n if controller_name == \"rich_subscriptions\"\n tags << content_tag(:script, \"\", src: \"/javascripts/#{fingerprinted_asset('map')}.js\")\n tags << tag(:link, rel: \"stylesheet\", href: \"/stylesheets/leaflet/leaflet.css\")\n tags << tag(:link, rel: \"stylesheet\", href: \"/stylesheets/leaflet/leaflet-slider.css\")\n tags << tag(:link, rel: \"stylesheet\", href: \"/stylesheets/leaflet/L.Control.Locate.min.css\")\n tags << tag(:link, rel: \"stylesheet\", href: \"/stylesheets/#{fingerprinted_asset('map')}.css\")\n else\n tags << content_tag(:script, \"\", src: \"/javascripts/#{fingerprinted_asset('application')}.js\")\n tags << tag(:link, rel: \"stylesheet\", href: \"/stylesheets/#{fingerprinted_asset('application')}.css\")\n end\n tags.html_safe\n end", "def all_js_into_one_file\n File.open(base_js_file, 'w') do |file|\n js_files.each do |js_file|\n file.write('/* file: ' + js_file + \" */\\n\") if development?\n file.write(File.read(js_file))\n end\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Main dispatcher of oldstyle (Sencha Touch) HTTP requests. The URL contains the name of the component, as well as the method of this component to be called, according to the double underscore notation. E.g.: some_grid__post_grid_data.
def endpoint_dispatch(endpoint_path) component_name, *sub_components = endpoint_path.split('__') component_instance = Netzke::Base.instance_by_config(Netzke::Core.session[:netzke_components][component_name.to_sym]) # We render text/plain, so that the browser never modifies our response response.headers["Content-Type"] = "text/plain; charset=utf-8" render :text => component_instance.invoke_endpoint(sub_components.join("__"), params), :layout => false end
[ "def render_component(url_options, general_options)\n filtered_params = [\"action\", \"controller\", \"content_item_url\"] \n params.each_pair do |k, v|\n if k.match(\"(.+)_#{url_options[:id]}_(.+)\")\n url_options[$2] = v\n elsif k.match(\"(.+[a-zA-Z]+)\")\n url_options[\"_page_#{$1}\"] = v unless filtered_params.include?($1) or general_options[:no_page_params]\n end\n end\n url = url_for(url_options)\n querystring = URI.parse(url).query\n\n env = {\n \"rack.version\" => [0, 1],\n \"rack.input\" => StringIO.new(\"\"),\n \"rack.errors\" => $stderr,\n \"rack.url_scheme\" => \"http\",\n \"rack.run_once\" => false,\n \"rack.multithread\" => false,\n \"rack.multiprocess\" => false,\n \"QUERY_STRING\" => querystring,\n \"REQUEST_METHOD\" => \"GET\",\n \"PATH_INFO\" => url,\n \"REQUEST_PATH\" => url,\n \"REQUEST_URI\" => url\n }\n\n %w(rack.session rack.session.options rack.session.record rack.request.cookie_hash rack.request.cookie_string\n SERVER_SOFTWARE HTTP_USER_AGENT HTTP_ACCEPT_ENCODING HTTP_ACCEPT_CHARSET\n HTTP_ACCEPT_LANGUAGE HTTP_KEEP_ALIVE HTTP_COOKIE HTTP_VERSION SERVER_PROTOCOL HTTP_HOST\n SERVER_NAME SERVER_PORT REMOTE_ADDR SCRIPT_NAME).each { |key| env[key] = request.env[key] }\n\n resp = ActionController::Routing::Routes.call(env)\n if resp[0] == 200\n after_render_component(resp)[2].body\n else\n raise_component_error(resp[2].body)\n end\n end", "def dynamic_dispatch(*args, **kwargs)\n method = caller[0][/`.*'/][1..-2]\n return @client.send(*(Dispatch.dynamic(method, *args, **kwargs)), @creds, @transaction, @environment)\n end", "def dispatch(uri, method = :get)\n\n debug \"Dispatching #{uri}\" if $DBG\n\n # Extract the query string.\n\n path, query = uri.split(\"?\", 2)\n path ||= \"\"\n\n # Remove trailing '/' that fucks up the dispatching\n # algorithm.\n\n path.gsub!(%r{/$}, \"\")\n\n # Try to route the path.\n\n path = @router.route(path) if @router\n\n # The characters after the last '.' in the path form the\n # extension that itself represents the expected response\n # content type.\n\n ext = File.extname(path)[1..-1] || \"html\"\n\n # The resource representation format for this request.\n\n if format = @formats.by_extension[ext]\n # Remove the extension from the path.\n path = path.gsub(/\\.(.*)$/, \"\")\n else\n # gmosx: Don't raise exception, just pass the latest part\n # as a parameter.\n # raise ActionError.new(\"Cannot respond to '#{path}' using the '#{ext}' format representation.\")\n format = @formats.by_extension[\"html\"]\n end\n\n # Try to extract the controller from the path (that may also\n # include 'nice' parameters). This algorithm tries to find\n # the bigest substring of the path that represents a mount\n # path for a controller.\n\n key = path.dup\n\n while (controller = @controllers[key]).nil?\n key = key[%r{^(/.+)/.+$}, 1] || \"\"\n end\n\n # Try to extract the action from the path. This\n # algorithm tries to find the bigest substring of the path\n # that represents an action of this controller.\n #\n # The algorithm respects action name conventions, ie\n # simple/sub/action maps to simple__sub__action.\n\n action = key = path.sub(%r{^#{key}}, '').gsub(%r{^/}, '').gsub(%r{/}, '__')\n\n while (!action.blank?) and !controller.action_or_template?(action, format)\n # gmosx: the final '_' fixes a bug user/view/_xxx_\n action = action[/^(.+)__.+$/, 1]\n action.gsub!(/_$/, \"\") if action\n end\n \n # Extract the 'nice' parameters.\n\n params = key.sub(%r{^#{action}}, '').gsub(/^__/, '').split('__')\n\n # Do we have an action?\n\n if action.blank?\n # Try to use a standard action for this http method.\n #--\n # FIXME: this is dangerous if we want to handle a post\n # method from an index (blank) action. Only perform this\n # on 'API' calls.\n #++\n=begin\n case method\n when :get\n action = \"index\"\n\n when :post\n action = \"create\"\n\n when :delete\n action = \"delete\"\n\n when :put\n action = \"update\"\n end\n=end\n action = \"index\"\n\n unless controller.action_or_template?(action, format)\n # raise ActionError.new(\"Cannot respond to '#{path}' (action: #{action}) using '#{controller}'\")\n end\n end\n\n # Pad the 'nice' parameters with nil values.\n\n if (ar = controller.instance_method(action).arity) > 0\n params.concat(Array.new(ar - params.size, nil))\n end rescue nil\n\n # Return the data.\n\n return controller, \"#{action}___super\", query, params, format\n end", "def run url\n # We need to tell Micon about the `:request` scope, so the `:request` component will be\n # destroyed automatically.\n micon.activate :request, {} do\n self.request = Request.new url\n\n # Using router to get controller class and method from the url.\n # Note that You using the `:router` component as if it's just an attribute.\n controller_class, method = router.decode url\n\n # Creating controller and calling it to generate output.\n controller = controller_class.new\n\n # Note we don't need to explicitly pass the `request` to controller, it will be automatically injected.\n controller.send method\n end\n end", "def dispatch_request(request)\n dispatch(request.uri, request.method)\n end", "def run url\n # Creating new request object from url.\n request = Request.new url\n\n # Using router to get controller class and method from the url.\n controller_class, method = router.decode url\n\n # Creating controller and calling it to generate output.\n controller = controller_class.new\n\n # Passing request to controller.\n controller.request = request\n controller.send method\n end", "def dispatch(request)\n # {{{\n benchmark_start_time = Time.now \n\n params = Aurita::Attributes.new(request)\n params[:_request] = request\n params[:_session] = Aurita::Session.new(request)\n params[:_logger] = @logger\n params[:_application] = @application\n status = 200\n response_body = ''\n response_header = {}\n\n controller = params[:controller]\n action = params[:action]\n mode = params[:mode]\n controller ||= 'App_Main'\n action ||= 'start'\n mode ||= 'default'\n\n Thread.current['request'] = params\n\n Lore::Connection.reset_query_count()\n Lore::Connection.reset_result_row_count()\n\n begin\n raise ::Exception.new('No controller given') if(controller.nil? || controller == '') \n\n model_klass = @application.get_model_klass(controller)\n controller_klass = @application.get_controller_klass(controller)\n\n raise ::Exception.new('Unknown controller: ' << controller.inspect) unless controller_klass\n \n controller_instance = controller_klass.new(params, model_klass)\n\n response = false\n @logger.debug(\"Calling model interface method #{controller}.#{action}\")\n\n element = controller_instance.call_guarded(action)\n response = controller_instance.response\n if response[:html] == '' then\n if element.is_a?(Aurita::GUI::XML::Document) then\n # Don't use default decorator for XML documents: \n response[:mode] = :none if (!response[:mode] || response[:mode] == :default)\n response[:html] = element.string \n elsif element.respond_to?(:string) then\n # Response is an instance of Aurita::GUI::Element\n response[:html] = element.string \n response[:script] << element.script if element.respond_to?(:script) && element.script\n elsif element.is_a?(Array) then\n element.each { |e|\n response[:html] << e.to_s\n }\n end\n end\n\n response_header.update(response[:http_header]) if response[:http_header]\n if response_header['status_code'] then\n status = response_header['status_code'].to_i \n response_header.delete('status_code')\n end\n\n mode = response[:mode].to_sym if response && response[:mode]\n mode ||= :default \n response[:mode] = mode\n params[:_controller] = controller_instance\n\n response_body = @decorator.render(model_klass, response, params)\n\n @num_dispatches += 1\n\n @benchmark_time = Time.now-benchmark_start_time\n @num_queries = Lore::Connection.query_count\n @num_tuples = Lore::Connection.result_row_count\n Aurita::Plugin_Register.call(Hook.dispatcher.request_finished, \n controller_instance, \n :dispatcher => self, \n :controller => controller_instance, \n :action => action, \n :time => @benchmark_time, \n :num_queries => @num_queries, \n :num_tuples => @num_tuples)\n @logger.debug(\"Num queries: #{@num_queries}\")\n rescue Exception => excep\n @logger.error(excep.message)\n @logger.error(excep.backtrace.join(\"\\n\"))\n response_body = GUI::Error_Page.new(excep).string\n response_body = ''\n end\n\n return [ status, response_header, response_body ]\n end", "def deliver_component_endpoint(params)\n cmp_name = params[:name]\n cmp_index = cmp_name.sub(\"cmp\", \"\").to_i\n\n if params[:component].present?\n current_tabs = stored_tabs\n\n # we need to instantiate the newly added child to get access to its title\n cmp_class = constantize_class_name(params[:component].camelcase)\n raise RuntimeError, \"Could not find class #{params[:component]}\" if cmp_class.nil?\n\n cmp_config = {:name => params[:name], :class_name => cmp_class.name}.merge(params[:config] || {}).symbolize_keys\n cmp_instance = cmp_class.new(cmp_config, self)\n new_tab_short_config = cmp_config.merge(:title => cmp_instance.js_config[:title] || cmp_instance.class.js_properties[:title]) # here we set the title\n\n if stored_tabs.empty? || cmp_index > stored_tabs.last[:name].sub(\"cmp\", \"\").to_i\n # add new tab to persistent storage\n current_tabs << new_tab_short_config\n else\n # replace existing tab in the storage\n current_tabs[current_tabs.index(current_tabs.detect{ |tab| tab[:name] == cmp_name })] = new_tab_short_config\n end\n\n component_session[:items] = current_tabs\n @stored_tabs = nil # reset cache\n end\n\n super(params)\n end", "def dispatch\n @source = TaliaCore::ActiveSource.find(params[:dispatch_uri], :prefetch_relations => true)\n @types = @source.types\n @types.each do |type|\n caller = type.to_name_s('_')\n self.send(caller) if(self.respond_to?(caller))\n end\n respond_to do |format|\n format.html { render :action => template_for(@source) }\n format.xml { render :text => @source.to_xml }\n format.rdf { render :text => @source.to_rdf }\n end\n end", "def url_action\n parts = []\n parts << SendgridWeb.api_namespace\n parts << [resource, verb, format].join('.')\n parts.join('/')\n end", "def on_request_uri(cli, request)\r\n if @cmdstager\r\n print_good(\"Sending #{@cmdstager}\")\r\n send_response(cli, @cmdstager)\r\n @cmdstager = nil\r\n else\r\n print_good(\"Sending payload #{datastore['PAYLOAD']}\")\r\n super\r\n end\r\n end", "def collection_url\n send route_prefix_to_method_name(\"#{class_name.model_name.route_key}_url\")\n end", "def url_proc; end", "def _dispatch(action=:to_s)\n self.action_name = action\n super(action)\n @body\n end", "def request_components; end", "def url\n base_url = \"#{endpoint(@action.service)}/#{@action.operation}\"\n base_url << \"?#{query(@action.operation)}\" if @action.method == :get\n base_url\n end", "def method_missing(method, *args)\n @uri.path = \"/#{args[0]}.#{@format.extension}\" # Should be the form of /resources\n if verb = method.to_s.match(/^http_(get|post|put|delete|head)$/)\n run_verb(verb.to_s.split(\"_\").last, args[1])\n end\n end", "def dispatch\n error_code = NO_ERRORS\n\n case params[:method]\n when 'getIntro'\n html = render_to_string :intro\n data = {'box' => \"welcome\"}\n when 'getMenu'\n render_menu and return\n when 'getSource'\n render_source and return\n when 'filter'\n render_filter and return\n else\n error_code = ERROR_UNKNOWN_METHOD\n html = \"Unable to fulfil the request! (Unknown method: #{params[:method]}\"\n end\n\n render_json(error_code, html, data)\n end", "def method_missing(method_name, params = {})\n widget, *action = method_name.to_s.split('__')\n widget = widget.to_sym\n action = !action.empty? && action.join(\"__\").to_sym\n \n if action\n if aggregatees[widget]\n # only actions starting with \"api_\" are accessible\n api_action = action.to_s.index('__') ? action : \"api_#{action}\"\n aggregatee_instance(widget).send(api_action, params)\n else\n aggregatee_missing(widget)\n end\n else\n super\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
GET /imagems/1 GET /imagems/1.xml
def show @imagem = @evento.imagems.find(params[:id]) respond_to do |format| format.html # show.html.erb format.xml { render :xml => @imagem } end end
[ "def index\n @imagems = Imagem.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @imagems }\n end\n end", "def get_image(image_id)\n request(\n :expects => 200,\n :method => 'GET',\n :parser => Fog::Parsers::Compute::Slicehost::GetImage.new,\n :path => \"images/#{image_id}.xml\"\n )\n end", "def show\n @imagem = Imagem.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @imagem }\n end\n end", "def show\n respond_to do |format|\n format.html { render :action => :index }\n format.xml { @image_set = ImageSet.find(params[:id]); render :xml => @image_set }\n end\n end", "def index\n @images = Image.find_images\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @images }\n end\n end", "def index\n @images = @site.images.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @images }\n end\n end", "def show\n @counter_images_set = CounterImagesSet.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @counter_images_set }\n end\n end", "def index\n @images = Image.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @images }\n end\n end", "def index\n @imagens = Imagen.find(:all)\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @imagens }\n end\n end", "def show\n @image_set = ImageSet.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @image_set }\n end\n end", "def index\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @flat_images }\n end\n end", "def index\n @image_sets = ImageSet.find(:all)\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @image_sets }\n end\n end", "def index\n @images = Image.all(:conditions => { :schedule_id => params[:schedule_id] })\n\n respond_to do |format|\n format.html # index.html.erb\n format.mobile\n format.xml { render :xml => @images }\n end\n end", "def index\n @remote_image_contents = RemoteImageContent.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @remote_image_contents }\n end\n end", "def show\n @masterimage = Masterimage.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @masterimage }\n end\n end", "def show\n @especie_imagem = EspecieImagem.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @especie_imagem }\n end\n end", "def show\n @moto_cover_image_name = MotoCoverImageName.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @moto_cover_image_name }\n end\n end", "def show\n @img_info = ImgInfo.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @img_info }\n end\n end", "def show\n @images_index = ImagesIndex.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @images_index }\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Public interface BlockCommand.new ||Arguments:|| `name` The String name of the command. `args` The NodeList of arguments given to the command. `raw_tokens` A TokenList that represents the entire BlockCommand, generated by the Tokenizer. You must override this method in your subclass of BlockCommand.
def initialize(*args) if self.class == BlockCommand raise TypeError, 'BlockCommand.new should not be called directly' end super end
[ "def initialize(*args)\n raise TypeError, 'BlockCommand.new should not be called directly' if self.class == BlockCommand\n super\n end", "def define(*args, &block)\n @commands << new(*args, &block)\n end", "def initialize *args, &block\n @pipes = {}\n @command = EM::SystemCommand::Builder.new *args\n\n @execution_proc = block\n end", "def initialize\n @commands_without_block = []\n @commands_with_block = []\n end", "def new(*args)\n parsed_node = Treetop::Runtime::SyntaxNode.new(*args)\n\n node_name, node_class = @node_name, @node_class # local scope for the block below\n\n # so the node knows how to build itself:\n parsed_node.meta_def :build do\n node_class.new(self)\n end\n\n # so the node can be filtered based on what kind of AST node it will build\n parsed_node.meta_def :node_to_build do\n node_name\n end\n\n parsed_node\n end", "def initialize(name, args, new_op = false, inline = false)\n @name = name\n @arguments = args\n @string = \"#{@name}(#{@arguments.map(&:string).join(', ')})\"\n @string = \"new #{@string}\" if new_op\n @variables = args.inject(Set.new) { |vars, expr| vars + expr.variables }\n @variables << name\n @inline = inline\n @is_instantiation = new_op\n @functioncalls = args.map { |arg| arg.functioncalls }.flatten\n end", "def BlockArg(name)\n BlockArg.new(name: name, location: Location.default)\n end", "def define_block_argument(name)\n create_argument(:blockarg, name)\n end", "def initialize(options)\n super(options[:names],options[:description],options[:long_desc])\n @arguments_description = options[:arguments_name] || ''\n @arguments_options = Array(options[:arguments_options]).flatten\n @arguments = options[:arguments] || []\n @skips_pre = options[:skips_pre]\n @skips_post = options[:skips_post]\n @skips_around = options[:skips_around]\n @hide_commands_without_desc = options[:hide_commands_without_desc]\n @commands_declaration_order = []\n @flags_declaration_order = []\n @switches_declaration_order = []\n @examples = options[:examples] || []\n clear_nexts\n end", "def visit_arg_block(node); end", "def visit_blockarg(node); end", "def command(*names)\n command_options = {\n :description => @next_desc,\n :arguments_name => @next_arg_name,\n :arguments_options => @next_arg_options,\n :arguments => @next_arguments,\n :long_desc => @next_long_desc,\n :skips_pre => @skips_pre,\n :skips_post => @skips_post,\n :skips_around => @skips_around,\n :hide_commands_without_desc => @hide_commands_without_desc,\n }\n @commands_declaration_order ||= []\n if names.first.kind_of? Hash\n command = GLI::Commands::CompoundCommand.new(self,\n names.first,\n command_options)\n command.parent = self\n commands[command.name] = command\n @commands_declaration_order << command\n else\n new_command = Command.new(command_options.merge(:names => [names].flatten))\n command = commands[new_command.name]\n if command.nil?\n command = new_command\n command.parent = self\n commands[command.name] = command\n @commands_declaration_order << command\n end\n yield command\n end\n clear_nexts\n @next_arguments = []\n command\n end", "def build_commands\n eval_block(&block)\n end", "def define_commands(&block)\n instance_eval(&block)\n end", "def build(&block)\n #@orig_block = block\n EventCommands.new(@list, @indent).instance_eval(&block)\n end", "def new(name, *args, &blk)\n parser = grammar.const_get(name)\n grammar.parser(parser, *args, &blk)\n end", "def define(*args, &block)\n options = args.extract_options!\n\n name = if args.first\n args.shift\n else\n anonymous = true\n self.anonymous_block_number += 1\n \"anonymous_block_#{anonymous_block_number}\"\n end\n\n block_definitions[name].tap do |block_definition|\n block_definition.reverse_merge! options, &block\n block_definition.anonymous = !!anonymous\n end\n end", "def arg_block(name = '', &block)\n _arg_block(BLOCK_ARG, name, &block)\n end", "def build_block_container(*args, &block)\n options = args.extract_options!\n name = args.first ? args.shift : self.anonymous_block_name\n block_container = BuildingBlocks::Container.new\n block_container.name = name.to_sym\n block_container.options = options\n block_container.block = block\n block_container\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
BlockCommandactive_block Since BlockCommands have the ability to contain multiple blocks, this method returns (at least in theory) the block within this BlockCommand that the Parser is currently looking as it is iterating over it. You must override this method in your subclass of BlockCommand. It must return a NodeList. TODO: At the moment, this method is not used
def active_block raise NotImplementedError, 'BlockCommand#active_block should be overridden by a subclass' end
[ "def active_block\n @nodes\n end", "def get_blocks\n @blocks\n end", "def block_node; end", "def process_block(node)\n node\n end", "def block\n (@blocks ||= Array.new) << Block.new\n @in_block = @blocks.last\n yield\n @in_block = nil\n end", "def nodelist\n @blocks\n end", "def netblock\n return @netblock\n end", "def get_blocks\n if (branches = @cur_state.branches[@tape.val])\n branches.map { |branch| branch.block } \n else\n [@cur_state.default]\n end\n end", "def is_block_element; false; end", "def BlockNode(opening, block_var, bodystmt); end", "def process_block_token(tk); end", "def block_class() Block; end", "def active_block\n current_case ? blocks[current_case] : default\n end", "def get_current_block(x, y)\n case @block_type\n when :ordinary\n Block.new(adjust_block_coordinates(x,y).to_vec2d)\n when :sliding_horizontally\n SlidingBlock.new(adjust_block_coordinates(x, y).to_vec2d,\n :horizontal) \n when :sliding_vertically\n SlidingBlock.new(adjust_block_coordinates(x, y).to_vec2d,\n :vertical) \n when :moving_enemy\n MovingEnemy.new(adjust_block_coordinates(x, y).to_vec2d)\n when :prize\n PrizeBottom.new(adjust_block_coordinates(x,y).to_vec2d)\n end\n end", "def get_current_block\n # /wow/?a=get_current_block\n Faraday.get(\"#{BASE_URL}#&a=get_current_block\").body\n end", "def _Block\n\n _save = self.pos\n begin # sequence\n while true # kleene\n _tmp = apply(:_BlankLine)\n break unless _tmp\n end\n _tmp = true # end kleene\n break unless _tmp\n\n begin # choice\n _tmp = apply(:_BlockQuote)\n break if _tmp\n _tmp = apply(:_Verbatim)\n break if _tmp\n _tmp = apply(:_HorizontalRule)\n break if _tmp\n _tmp = apply(:_Heading)\n break if _tmp\n _tmp = apply(:_BulletList)\n break if _tmp\n _tmp = apply(:_Para)\n break if _tmp\n _tmp = apply(:_Plain)\n end while false # end choice\n\n end while false\n unless _tmp\n self.pos = _save\n end # end sequence\n\n set_failed_rule :_Block unless _tmp\n return _tmp\n end", "def block_node\n case sexp_type\n when :method_add_block\n self[2]\n end\n end", "def visit_block(node); end", "def block\n\t\tdeclaration_nodes = declarations\n\t\tcompound_statement_node = compound_statement\n\t\tBlock.new(declaration_nodes, compound_statement_node)\n\tend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Writes an object to the stream. Calls the object's write() method, if available. Uses to_s otherwise.
def write( object ) if object.responds_to?(:write_to) then object.write_to(@stream) else @stream << object.to_s end end
[ "def _write(obj)\n obj.Write()\n end", "def _write(obj)\n obj.Write()\n end", "def puts( obj )\n return unless writeable?\n\n data = Marshal.dump(obj)\n @socket.write([data.size].pack('I')) + @socket.write(data)\n rescue SystemCallError\n return nil\n end", "def write_object(object)\n @encoder.encode(object, @sock)\n end", "def write_any( obj )\n write_any_log( obj , \"Write\")\n if stream_position != Position.get(obj).at\n raise \"Write #{obj.class}:0x#{obj.object_id.to_s(16)} at 0x#{stream_position.to_s(16)} not #{Position.get(obj)}\"\n end\n write_any_out(obj)\n write_any_log( obj , \"Wrote\")\n Position.get(obj)\n end", "def output_to_socket(socket, object)\n AllGems.logger.debug(\"Sending object: #{object} to socket: #{socket}\")\n socket.puts object.to_json\n end", "def io(data, io)\n io.write(obj)\n end", "def write_raw(obj,encoding = 3)\n\t @stream = \"\" #new output stream\n\t RequestStore.amf_encoding = (encoding == 3) ? 'amf3' : 'amf0'\n\t reset_referencables\n\t write(obj)\n\t @stream\n\tend", "def write *objects\n write_to_buffer(*objects)\n flush_buffer\n end", "def append obj\n @io.write obj\n end", "def send_object obj\n data = serializer.dump(obj)\n send_data [data.respond_to?(:bytesize) ? data.bytesize : data.size, data].pack('Na*')\n end", "def dump(obj, io)\n io.puts obj.send(inspect_method)\n end", "def serialize(object)\n @marshaller.dump(object)\n end", "def write_object( object )\n unless @machine.objects.has_key? object.object_id\n raise \"Object(#{object.object_id}) not linked #{object.inspect}\"\n end\n layout = object.get_layout\n type = type_word(layout)\n @stream.write_uint32( type )\n write_ref_for(layout )\n layout.each do |var|\n inst = object.instance_variable_get \"@#{var}\".to_sym\n #puts \"Nil for #{object.class}.#{var}\" unless inst\n write_ref_for(inst)\n end\n #puts \"layout length=#{layout.get_length.to_s(16)} mem_len=#{layout.word_length.to_s(16)}\"\n l = layout.get_length\n if( object.is_a? Parfait::List)\n object.each do |inst|\n write_ref_for(inst)\n end\n l += object.get_length\n end\n pad_after( l * 4)\n object.position\n end", "def << (obj)\n @buffer << obj\n flush if @buffer.size == @size\n end", "def send_object obj\n data = serializer.dump(obj)\n send_data [data.respond_to?(:bytesize) ? data.bytesize : data.size, data].pack('Na*')\n end", "def send_object(obj, options={})\n if obj.is_a? ServerException\n self.send_error(obj)\n return\n end #if\n json = case\n when obj.respond_to?(:to_json)\n obj.to_json\n when obj.respond_to?(:to_shared)\n obj.to_shared.to_json\n else\n nil\n end #case\n if json.nil?\n raise ArgumentError, \"The #{obj.class.name} object being sent \" +\n \"does not support the to_json or to_shared methods.\"\n return\n end #if\n self.send_header(options)\n self.send_bytes(json)\n end", "def store(object)\n @collector.sending_stream.puts pack(:store, object)\n end", "def write_marshal(object)\n write object.marshal\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Writes a string directly to the stream, without any filtering.
def write!( string ) @stream << string end
[ "def write(string)\n @output_stream.write(string)\n end", "def write(str)\n writing { write0 str }\n end", "def write(str); end", "def write(string)\n @buffer << string\n self\n end", "def write(str)\n str = str.to_s\n IOUtil.write(str, out_stream)\n str.bytesize\n end", "def write(str)\n end", "def write(string)\n @handler.add_to_write @socket\n @handler.set_buffer @socket, string\n end", "def write_string(string, data: nil, stream: nil)\n raise ArgumentError, \"String must be present\" if !string\n\n intbuf = write_varint(string.bytesize, data: data, stream: stream)\n\n stringbuf = BTC::Data.ensure_binary_encoding(string)\n\n data << stringbuf if data\n stream.write(stringbuf) if stream\n\n intbuf + stringbuf\n end", "def write_string(string)\n @payload << string.force_encoding(Encoding::BINARY)\n write_byte(10)\n end", "def write(string)\n length = string.length\n while 0 < length\n IO::select(nil, [@sock])\n @dumplog.log_dump('>', string[-length..-1]) if @options.has_key?(\"Dump_log\")\n length -= @sock.syswrite(string[-length..-1])\n end\n end", "def write(s)\n @socket.write(s)\n end", "def do_write(s)\n @write_buffer << s\n @write_buffer.force_encoding(Encoding::BINARY)\n\n if @sync or @write_buffer.size > BLOCK_SIZE or idx = @write_buffer.rindex($/)\n remain = idx ? idx + $/.size : @write_buffer.length\n nwritten = 0\n\n while remain > 0\n str = @write_buffer[nwritten,remain]\n begin\n nwrote = syswrite(str)\n rescue Errno::EAGAIN\n retry\n end\n remain -= nwrote\n nwritten += nwrote\n end\n\n @write_buffer[0,nwritten] = \"\"\n end\n end", "def write(str, len=nil)\n raise ArgumentError, \"str must respond to :to_s\" unless str.respond_to? :to_s\n @mem.write_string(str.to_s, len)\n end", "def write(s)\n @data += s\n end", "def write_nonblock(str)\n write_buf.append(str)\n write_buf.write_to(io)\n drain_write_buffer unless write_buf.empty?\n str.length\n end", "def output string = nil \n if string.is_a? IO\n to_io = true\n dest = string\n string = \"\"\n else\n string = \"\" unless string.respond_to? :<<\n end\n \n yield(string) if block_given? # modifiy str in place\n \n if to_io\n dest << string\n end\n string\n end", "def do_write(s)\n @wbuffer << s\n @wbuffer.force_encoding(Encoding::BINARY)\n @sync ||= false\n\n if @sync or @wbuffer.size > BLOCK_SIZE or idx = @wbuffer.rindex($/)\n remain = idx ? idx + $/.size : @wbuffer.length\n nwritten = 0\n\n while remain > 0\n str = @wbuffer[nwritten,remain]\n begin\n nwrote = syswrite(str)\n rescue Errno::EAGAIN\n retry\n end\n remain -= nwrote\n nwritten += nwrote\n end\n\n @wbuffer[0,nwritten] = \"\"\n end\n end", "def write(str)\n raise(IOError, \"not opened for writing\") unless @writable\n raise(IOError, \"not modifiable string\") if @string.frozen?\n\n str = str.to_s\n return 0 if str.empty?\n\n if @append || (@pos >= @string.length)\n # add padding in case it's needed\n str = str.rjust((@pos + str.length) - @string.length, \"\\000\") if (@pos > @string.length)\n enc1, enc2 = str.encoding, @string.encoding\n if enc1 != enc2\n str = str.dup.force_encoding(enc2)\n end\n @string << str\n @pos = @string.length\n else\n @string[@pos, str.length] = str\n @pos += str.length\n @string.taint if str.tainted?\n end\n\n str.length\n end", "def send(str)\n write(str + 0.chr)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
USE ONLY if &f is assocoative, AT YOUR OWN RISK. s is used only for the last reduce.
def reduce (s, &f) self.parallelize{|slice| slice.reduce(&f)}.reduce(s, &f) end
[ "def reduce\n \n end", "def _reduce_236(val, _values, result)\n result = nil\n \n result\nend", "def reduce_function\n 'function (key, values) { return reduce(key, values);};'\n end", "def _reduce_239(val, _values, result)\n result = nil\n \n result\nend", "def _reduce_608(val, _values, result)\n result = nil\n \n result\nend", "def _reduce_235(val, _values, result)\n result = nil\n \n result\nend", "def greduce(key_f, reduce_f)\r\n grouped = group_by { |elem| key_f.call(elem) } #returns a hash\r\n reduced = grouped.map { |k, value_array| [k, reduce_f.call(value_array)] }\r\n end", "def _reduce_610(val, _values, result)\n result = nil\n \n result\nend", "def _reduce_594(val, _values, result)\n result = nil\n \n result\nend", "def _reduce_712(val, _values, result)\n result = nil\n \n result\nend", "def _reduce_222(val, _values, result)\n result = nil\n \n result\nend", "def iterate_reduce\n accu = 0\n self.list.each do |elem|\n accu = accu + elem\n end\n\n accu\n end", "def _reduce_496(val, _values, result)\n result = nil\n \n result\nend", "def reduce_sat (assump, fxn)\n assump.each do |key, value|\n fxn.length.times do |index|\n\n term = fxn[index].split('+')\n not_key = '~'.concat(key)\n\n if (term.include?(not_key))\n if (value == 1)\n if (term.length != 1)\n term.delete(not_key)\n else \n abort(\"These assmptions led to unSAT:\\n #{assump}\")\n end # if else\n else\n term = 1\n end # if else\n elsif (term.include?(key))\n if (value == 0)\n if (term.length != 1)\n term.delete(key)\n else\n abort(\"These assmptions led to unSAT:\\n #{assump}\")\n end # if else\n else\n term = 1 \n end # if else\n end # if elsif\n\n if (term.kind_of?(Array))\n term = term.join('+')\n end # if\n\n fxn[index] = term\n end # times do\n fxn.delete(1)\n end # each do \n\n return fxn\n \n end", "def test_reduce\n assert_equal('12Fizz4BuzzFizz78FizzBuzz11Fizz1314FizzBuzz',\n @fb.reduce { |acc, res| acc + res })\n end", "def reduce(str) \n if not @h[str].nil?\n return @h[str]\n else\n reductions = []\n versions(str).each do |v|\n val = reduce(v)\n reductions.push val\n if val == 1 # this quickens the speed\n break\n end\n end\n reductions.empty? ? (@h[str] = str.length; return str.length) : (@h[str] = reductions.min; return reductions.min) \n end\nend", "def get_reduce() \n reduce_js = <<-'END_OF_REDUCE' \n\nfunction(key, values) {\n \n\t\tvar total = {};\n var rtn = {};\n\n values.forEach(function(e) {\n for (command in e) {\n if (total[command] == undefined) {\n total[command] = e[command]\n } else {\n total[command] += e[command]\n }\n }\n })\n rtn[\"sum\"] = total\n return (total);\n}; \n END_OF_REDUCE\n \n return reduce_js\nend", "def _reduce_261(val, _values, result)\n result = args val\n\n result\nend", "def test_reduce\n assert_equal('12Assign4BuzzAssign78AssignBuzz11Assign1314AssignBuzz',\n @ab.reduce { |acc, res| acc + res })\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
i^n mod p, where p is prime and i is a primitive root of p With this formula, generate a series through progressive squaring, from zero to (p1). Collect the residue, mod p and perform an xswap with each object's index number. The result will be the corresponding MOF series.
def mof(i, p) arr = [] x_swap = [] (0..(p-2)).each do |x| arr.push((i**x) % p) end (1..(p-1)).each do |n| x_swap.push((arr.index {|item| n == item }) % 12 ) end return x_swap end
[ "def mod_sqr(p0) end", "def runterholen(p,q,dividend)\r\n\tp[q.deg-1] = dividend[q.deg-1]\r\n\tp[0..-2].mod\r\nend", "def permutation_equation p\n index_hash = {}\n y = []\n p.each_with_index do |x, i|\n index_hash[x] = i\n end\n (1..p.size).each do |n|\n y.push(index_hash[index_hash[n] + 1] + 1)\n end\n y\n end", "def each_permutation\n # This is pretty ugly..\n a, p, i = self.clone, (0..self.size).to_a, 0\n while i < self.size\n p[i] -= 1\n (i % 2) == 1 ? j = p[i] : j = 0\n a.swap! i, j\n yield a\n i = 1\n while p[i].zero?\n p[i] = i\n i += 1\n end\n end\n end", "def modulo(p0) end", "def mod_sqrt(n, p, exp = 1, return_list = false)\r\n\t\tif 1 < exp or return_list\r\n\t\t\tx = mod_sqrt(n, p)\r\n\t\t\treturn x unless x\r\n\t\t\treturn [x] if 1 == exp\r\n\t\t\traise ArgumentError, \"if 1 < exp then n must be relatively prime with p\" if 0 == x\r\n\r\n\t\t\trslt = [x] if return_list\r\n\t\t\tp_power = p\r\n\t\t\tz = extended_lehmer_gcd(x << 1, p)[0]\r\n\t\t\t(exp - 1).times do\r\n\t\t\t\tx += (n - x ** 2) / p_power * z % p * p_power\r\n\t\t\t\tp_power *= p\r\n\t\t\t\trslt.push(x) if return_list\r\n\t\t\tend\r\n\r\n\t\t\treturn return_list ? rslt : x\r\n\t\tend\r\n\r\n\t\tunless (k = kronecker_symbol(n, p)) == 1\r\n\t\t\treturn nil if -1 == k\r\n\t\t\treturn 0\r\n\t\tend\r\n\r\n\t\tif 0 < p & 6\r\n\t\t\treturn power(n, (p >> 2) + 1, p) if p[1] == 1\r\n\t\t\tn %= p\r\n\t\t\tx = power(n, (p >> 3) + 1, p)\r\n\t\t\treturn x if x ** 2 % p == n\r\n\t\t\treturn x * power(2, p >> 2, p) % p\r\n\t\tend\r\n\r\n\t\t# get q and e s.t. p - 1 == 2**e * q with q odd\r\n\t\te = 0\r\n\t\tq = p - 1\r\n\t\te += 1 while 0 == q[e]\r\n\t\tq >>= e\r\n\r\n\t\t# Find generator\r\n\t\tg = 2\r\n\t\tg += 1 until -1 == kronecker_symbol(g, p)\r\n\t\tz = power(g, q, p)\t# |<z>| == 2 ** e\r\n\r\n\t\t# Initialize\r\n\t\ttemp = power(n, q >> 1, p)\r\n\t\tx = n * temp % p\t# n ** ((q + 1) / 2) mod p\r\n\t\tb = x * temp % p\t# n ** q mod p\r\n\r\n\t\t# always\r\n\t\t# n * b == x ** 2\r\n\t\tuntil 1 == b\r\n\t\t\t# Find exponent f s.t. b ** (2 ** f) == 1 (mod p)\r\n\t\t\tf = 0\r\n\t\t\tb_ = b\r\n\t\t\tuntil 1 == b_\r\n\t\t\t\tb_ = b_ ** 2 % p\r\n\t\t\t\tf += 1\r\n\t\t\tend\r\n\r\n\t\t\t# Reduce exponent\r\n\t\t\t(e - f - 1).times { z = z ** 2 % p }\r\n\t\t\te = f\r\n\t\t\tx = x * z % p\r\n\t\t\tz = z ** 2 % p\r\n\t\t\tb = b * z % p\r\n\t\tend\r\n\r\n\t\treturn x\r\n\tend", "def p(n)\r\n (return @ps[n]) if @ps[n]\r\n\r\n sum = 0\r\n @pk.each_with_index do |k, i|\r\n if k > n\r\n break\r\n end\r\n sign = ( i % 4 <= 1 ? 1 : -1)\r\n sum += sign * p( n - k )\r\n sum %= 1_000_000 #keep it under control\r\n end\r\n @ps[n] = sum\r\n return sum\r\n end", "def sieve\n @ws.fill(0)\n @ps.zip(@log_ps, @roots) do |p, lp, (s1, s2)|\n ((@m + s1) % p).step(2 * @m, p) {|i| @ws[i] += lp }\n ((@m + s2) % p).step(2 * @m, p) {|j| @ws[j] += lp }\n end\n end", "def split_into(n, p)\n [n/p + 1] * (n%p) + [n/p] * (p - n%p)\n end", "def permutationEquation(p)\n n = p.size\n (1..n).map do |x|\n res = 0\n p.each do |y|\n res = y if p[p[y - 1] - 1] == x\n end\n res\n end\nend", "def sump(p,n)\n if n >= 1\n if multiple(p,n)\n sump(p,n-1)+n\n else\n sump(p,n-1)\n end\n else\n 0\n end\nend", "def power_modulo(b, p, m)\n z=GMP::Z.new(b)\n z.powmod(p,m).to_i\n end", "def eratosthen_primes_to(n)\n raise ArgumentError unless valid_n?(n)\n\n array = (2..n).to_a\n array.each do |prime|\n square = prime**2\n break if square > n\n\n array -= square.step(n, prime).to_a\n end\n array\n end", "def spiral_memory(n)\n root = Math.sqrt(n).ceil\n root += 1 if root.even?\n root - 1 - (root**2 - n) % (root - 1)\nend", "def power_modulo(b, p, m)\n z = GMP::Z.new(b)\n z.powmod(p, m).to_i\n end", "def divmod(p0) end", "def quadratic_residues(modulus)\n (1..(modulus - 1)).map { |n| n**2 % modulus }.uniq.sort\n end", "def semifact(x)\n (1..x).reduce(1) do |memo, acc|\n memo * (2 * acc - 1)\n end\n end", "def potenz(x, n)\n check_pre((n.nat?))\n if (n == 1) then\n x\n elsif (n == 0) then\n 1\n else \n x*=potenz((x), n-1)\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Card Test cards belonging in the card class
def test_card_members card = Card.new("A", "Hearts") assert_equal(card.value, 1) assert_equal(card.face, "A") assert_equal(card.is_ace, true) end
[ "def initialize cards\n @deck = cards\n end", "def cards\n @cards\n end", "def build_deck\n 0.upto(51).map{ |i| Card.new(i)}\n end", "def test_user_start_plays_the_game\n card_1 = Card.new(:club, 'Two', 2)\n card_2 = Card.new(:club, 'Three', 3)\n card_3 = Card.new(:club, 'Four', 4)\n card_4 = Card.new(:club, 'Five', 5)\n card_5 = Card.new(:club, 'Six', 6)\n card_6 = Card.new(:club, 'Seven', 7)\n card_7 = Card.new(:club, 'Eight', 8)\n card_8 = Card.new(:club, 'Nine', 9)\n card_9 = Card.new(:club, 'Ten', 10)\n card_10 = Card.new(:club, 'Jack', 11)\n card_11 = Card.new(:club, 'Queen', 12)\n card_12 = Card.new(:club, 'King', 13)\n card_13 = Card.new(:club, 'Ace', 14)\n card_14 = Card.new(:diamond, 'Two', 2)\n card_15 = Card.new(:diamond, 'Three', 3)\n card_16 = Card.new(:diamond, 'Four', 4)\n card_17 = Card.new(:diamond, 'Five', 5)\n card_18 = Card.new(:diamond, 'Six', 6)\n card_19 = Card.new(:diamond, 'Seven', 7)\n card_20 = Card.new(:diamond, 'Eight', 8)\n card_21 = Card.new(:diamond, 'Nine', 9)\n card_22 = Card.new(:diamond, 'Ten', 10)\n card_23 = Card.new(:diamond, 'Jack', 11)\n card_24 = Card.new(:diamond, 'Queen', 12)\n card_25 = Card.new(:diamond, 'King', 13)\n card_26 = Card.new(:diamond, 'Ace', 14)\n card_27 = Card.new(:heart, 'Two', 2)\n card_28 = Card.new(:heart, 'Three', 3)\n card_29 = Card.new(:heart, 'Four', 4)\n card_30 = Card.new(:heart, 'Five', 5)\n card_31 = Card.new(:heart, 'Six', 6)\n card_32 = Card.new(:heart, 'Seven', 7)\n card_33 = Card.new(:heart, 'Eight', 8)\n card_34 = Card.new(:heart, 'Nine', 9)\n card_35 = Card.new(:heart, 'Ten', 10)\n card_36 = Card.new(:heart, 'Jack', 11)\n card_37 = Card.new(:heart, 'Queen', 12)\n card_38 = Card.new(:heart, 'King', 13)\n card_39 = Card.new(:heart, 'Ace', 14)\n card_40 = Card.new(:spade, 'Two', 2)\n card_41 = Card.new(:spade, 'Three', 3)\n card_42 = Card.new(:spade, 'Four', 4)\n card_43 = Card.new(:spade, 'Five', 5)\n card_44 = Card.new(:spade, 'Six', 6)\n card_45 = Card.new(:spade, 'Seven', 7)\n card_46 = Card.new(:spade, 'Eight', 8)\n card_47 = Card.new(:spade, 'Nine', 9)\n card_48 = Card.new(:spade, 'Ten', 10)\n card_49 = Card.new(:spade, 'Jack', 11)\n card_50 = Card.new(:spade, 'Queen', 12)\n card_51 = Card.new(:spade, 'King', 13)\n card_52 = Card.new(:spade, 'Ace', 14)\n standard_deck = [card_1, card_2, card_3, card_4, card_5, card_6, card_7,\n card_8, card_9, card_10, card_11, card_12, card_13, card_14, card_15,\n card_16, card_17, card_18, card_19, card_20, card_21, card_22, card_23,\n card_24, card_25, card_26, card_27, card_28, card_29, card_30, card_31,\n card_32, card_33, card_34, card_35, card_36, card_37, card_38, card_39,\n card_40, card_41, card_42, card_43, card_44, card_45, card_46, card_47,\n card_48, card_49, card_50, card_51, card_52]\n # shuffle_deck = standard_deck.shuffle\n # deck_1 = Deck.new(shuffle_deck[0..25])\n # deck_2 = Deck.new(shuffle_deck[26..51])\n deck_1 = Deck.new(standard_deck[0..25])\n deck_2 = Deck.new(standard_deck[26..51])\n player_1 = Player.new('Priya', deck_1)\n player_2 = Player.new('Ricky', deck_2)\n game = Game.new(player_1, player_2)\n require 'pry'; binding.pry\n # Hmmm... until method seems okay,\n # however, the if conditional isn't quite working\n # Figured it out, my if conditional is fine\n # tested with test file and pry, it should work\n # So, there must be something wrong with until\n # Which means something might be wrong with my player class\n # Realized w/ .has_lost? the return was simply a string\n # The string was fine for testing, but it doesn't help\n # the computer understand what i want\n # Which is a boolean!!!\n # Must go and update my player file.\n # FIXED! Looks like it is working.\n # Time to polish and test edge cases\n # Also - better go test Player class since I made edits\n\n # FOUND ISSUE: my user input for name isn't connecting to player class parameter\n # May need to update get chomp to runner instead\n\n # FOUND SECOND ISSUE: Though - this could be due to lack of a full deck...\n # You know what, let me add my full deck and go from there.\n\n # After updating (nuking) game class\n # Retesting!\n\n # Was working, found many errors when testing the game during run\n # Found edge case, what happens when deck empties\n\n # Looks like I may have finally gotten it working! Time to test again :)\n end", "def test_draw_card_draw_10_over_cards\n\t\t\t@card_manager.draw_card(15)\n\t\t\tassert_equal(10, @card_manager.handed_cards.length)\n\t\t\tassert_equal(0, @card_manager.pooled_cards.length)\n\t\t\tassert_equal(0, @card_manager.staged_cards.length)\n\t\t\tassert_equal(0, @card_manager.used_cards.length)\n\t\tend", "def display_deck\n @deck_array.each do |x|\n x.display_card\n end\n end", "def has_card? test_card\r\n @card_list.has_card? test_card\r\n end", "def test_current_card_returns_roundvalue\n card_1 = Card.new(\"3\", \"Hearts\")\n card_2 = Card.new(\"7\", \"Clubs\")\n card_3 = Card.new(\"Ace\", \"Spades\")\n cards = [card_1, card_2, card_3]\n deck = Deck.new(cards)\n round = Round.new(deck)\n\n assert_equal round.current_card, card_1\n\n end", "def has_card? test_card \n @card_list.has_card? test_card\n end", "def test_add_card\n # Build the Default Deck\n deck = self.build_basic_deck\n # Create the 6 of spades and add it to the deck\n card = Card.new(:spade, '6', 6)\n deck.add_card(card)\n # Check that we added the correct card\n assert_equal 5, deck.cards.length\n assert_equal card, deck.cards.last\n assert_equal 6, deck.cards.last.rank\n end", "def initialize(cards = [])\n @cards = cards\n end", "def setupCards\n\t\t#shuffle all of the individual decks of cards\n\t\t@suspects = @suspects.shuffle\n\t\t@locations = @locations.shuffle\n\t\t@weapons = @weapons.shuffle\n\n\t\t#choose the winning guess\n\t\t@suspectAnswer = @suspects.delete_at(0)\n\t\t@locationAnswer = @locations.delete_at(0)\n\t\t@weaponAnswer = @weapons.delete_at(0)\n\n\t\t#move all of the remaining cards together and shuffle them\n\t\t@suspects.concat(@locations.concat(@weapons))\n\t\t@suspects = @suspects.shuffle\n\n\t\t#distribute all of the remaining cards evenly between all of the players\n\t\t(@suspects.length).times{ |i| @players[i % @numPlayers].setCard(@suspects[i]) }\n\tend", "def getCardArray()\n\t\treturn @cards\n\tend", "def desk_cards\n desk = []\n self.public_deal.each do |i|\n desk.push(i)\n end\n self.hand_deal.each do |i|\n desk.push(i)\n end\n desk\n end", "def test_add_card\n @LiarHand.remove_card(Card.new(:Ace))\n\n assert_equal(1, @LiarHand.cards_in_hand, 'remove_card method not working for <LiarHand>')\n assert_equal(false, @LiarHand.contains?(Card.new(:Ace)), 'Ace card deleted from <LiarHand>, is it still there')\n @LiarHand.add_card(:Ace)\n assert_equal(2, @LiarHand.cards_in_hand, 'add_card method not working for <LiarHand>')\n\n @IdiotHand.remove_card(Card.new(:Two))\n\n assert_equal(1, @IdiotHand.cards_in_hand, 'remove_card method not working for <LiarHand>')\n assert_equal(false, @IdiotHand.contains?(Card.new(:Two)), 'Ace card deleted from <LiarHand>, is it still there')\n @IdiotHand.add_card(:Two)\n assert_equal(2, @IdiotHand.cards_in_hand, 'add_card method not working for <LiarHand>')\n\n @LightHand.remove_card(Card.new(:Queen))\n\n assert_equal(4, @LightHand.cards_in_hand, 'remove_card method not working for <LiarHand>')\n assert_equal(false, @LightHand.contains?(Card.new(:Queen)), 'Ace card deleted from <LiarHand>, is it still there')\n @LightHand.add_card(:Queen)\n assert_equal(5, @LightHand.cards_in_hand, 'add_card method not working for <LiarHand>')\n\n @SpiderHand.remove_card(Card.new(:King))\n\n assert_equal(4, @SpiderHand.cards_in_hand, 'remove_card method not working for <LiarHand>')\n assert_equal(false, @SpiderHand.contains?(Card.new(:King)), 'Ace card deleted from <LiarHand>, is it still there')\n @SpiderHand.add_card(:King)\n assert_equal(5, @SpiderHand.cards_in_hand, 'add_card method not working for <LiarHand>')\n\n\n end", "def initialize()\n # Cards table, containing cards hashes\n setAllCards()\n end", "def test_deck_count_cards\n card_1 = Card.new(\"What is the capital of Alaska?\", \"Juneau\")\n card_2 = Card.new(\"The Viking spacecraft sent back to Earth photographs and reports about the surface of which planet?\", \"Mars\")\n card_3 = Card.new(\"Describe in words the exact direction that is 697.5° clockwise from due north?\", \"North north west\")\n deck = Deck.new([card_1, card_2, card_3])\n assert_equal 3, deck.count\n end", "def test_it_can_hold_different_data\n card = Card.new(\"Who is better than Justin Bieber\", \"Literally Anyone\")\n\n assert_equal \"Who is better than Justin Bieber\", card.question\n assert_equal \"Literally Anyone\", card.answer\n end", "def test_deck_cards_in_category\n assert_equal [@card_1], @deck.cards_in_category(:Pop_Culture)\n assert_equal [@card_2, @card_3], @deck.cards_in_category(:Corporate_Knowledge)\n assert_equal [], @deck.cards_in_category(:Seattle_Street_Knowledge)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Checks to see whether anything currently exists at the given location.
def exists?(location_uri) r = client[connection.api.object_url(location_uri_to_fedora3_pid(location_uri), format: 'xml')].head # without disabling redirection, we should not get 3xx here (200..299).cover? r.code rescue RestClient::ExceptionWithResponse => e return false if e.response.code.eql? 404 raise e end
[ "def contains?(loc)\n false\n end", "def has_locations?\n !@locations.empty?\n end", "def location_exists\n if self.location_changed?\n if GeneticBank.exists?(self.location)\n errors.add(:location, \"already exists\")\n end\n end\n end", "def location_exists\n\t \tredirect_to('/') if current_merchant.locations.present?\n\t end", "def test_defined?(query)\n locations = find_locations(query)\n not locations.empty?\n end", "def location_has?(holding)\n !holding['location_has'].nil?\n end", "def has_location?(repo_location)\n cache.has_key? repo_location\n end", "def empty_location?(location)\n return @state[location[0]][location[1]] == nil\n end", "def exist?\n workspace_info.exist?\n end", "def saves_location_exists?\n return false unless @game\n File.exist? @game.saves_location\n end", "def has_locations?\n if trial_summary\n trial_summary.has_locations\n else\n false\n end\n end", "def placed?\n !location.nil?\n end", "def require_locations?\n @require_locations && !@location\n end", "def exists?\n return !stats.nil?\n rescue Beaneater::NotFoundError\n return false\n end", "def unavailable?\n return if shadowed?\n unavailable = UNAVAILABLE_LOCATIONS\n unavailable += %w(IN-PROCESS) unless home_location.sc_ivy?\n match?(current_location.code, unavailable)\n end", "def been_to?( location )\r\n\t\t\t@rooms.include?( location )\r\n\t\tend", "def check_for_duplicate_location(name)\n check_for_duplicate(name, @locations, \"location\")\n end", "def event_location_exists(event)\n self.get_hotelid()\n if Location.where(:event_id => event.id, :hotelid => self.hotelid).count > 0\n return true\n else\n return false\n end\n end", "def check_location\n\t\tif locations.empty? or locations.all? {|location| location.marked_for_destruction? }\n\t errors.add(:base, 'Business must have at least one location')\n\t end\n\tend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Closes the shared memory area end frees its content.
def close Shared::try("dhmdt") {Shared::shmdt(@mem)} Shared::try("shmctl") {Shared::shmctl(@id, Shared::IPC_RMID, nil)} end
[ "def close\n @mmap.close\n @mmap = nil\n end", "def dispose\n call Memory.deAlloc(self)\n end", "def close() end", "def finalize\n if @handle\n @handle.release_interface(0)\n @handle.close\n end\n end", "def sync_close(*) end", "def close\n store.close\n end", "def dispose\n if (@handle > 0)\n @fmod.invoke('System_Close', @handle)\n @fmod.invoke('System_Release', @handle)\n @handle = 0\n end\n @fmod = nil\n end", "def close\n Wrapper.Quit(@native_manager) if @native_manager\n ensure\n @native_manager = nil\n end", "def close\n self.C_Finalize\n self.unload_library\n end", "def close\n return if @closed\n\n chunks_fiber.resume(:terminate) if chunks_fiber.alive?\n cache.close! if cache\n @buffer = nil\n @closed = true\n end", "def close\n @closed = true\n end", "def close\n @storage.close\n end", "def release\n _do_if_open { _handle_closed! ; ::Dnet.blob_free(self) }\n end", "def close\n @snmp.close\n end", "def close\n close_data_file\n end", "def close\n if Net::SNMP.thread_safe\n Session.lock.synchronize {\n Wrapper.snmp_sess_close(@struct)\n Session.sessions.delete(self.sessid)\n }\n else\n Wrapper.snmp_sess_close(@struct)\n Session.sessions.delete(self.sessid)\n end\n end", "def close\n @processor.finish\n end", "def close\n return if @ptr.nil?\n\n ObjectSpace.undefine_finalizer(self) if @finalize\n Native.sd_journal_close(@ptr)\n\n @ptr = nil\n end", "def close\n CloseHandle(@handle)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Writes a string len bytes.
def write(str, len=nil) raise ArgumentError, "str must respond to :to_s" unless str.respond_to? :to_s @mem.write_string(str.to_s, len) end
[ "def write_string_length(str, len)\n Rubinius.primitive :pointer_write_string\n raise PrimitiveFailure, \"Unable to write string\"\n end", "def write(str)\n raise(IOError, \"not opened for writing\") unless @writable\n raise(IOError, \"not modifiable string\") if @string.frozen?\n\n str = str.to_s\n return 0 if str.empty?\n\n if @append || (@pos >= @string.length)\n # add padding in case it's needed\n str = str.rjust((@pos + str.length) - @string.length, \"\\000\") if (@pos > @string.length)\n enc1, enc2 = str.encoding, @string.encoding\n if enc1 != enc2\n str = str.dup.force_encoding(enc2)\n end\n @string << str\n @pos = @string.length\n else\n @string[@pos, str.length] = str\n @pos += str.length\n @string.taint if str.tainted?\n end\n\n str.length\n end", "def write(str)\n s = str.to_s\n\n @length += s.bytesize\n @headers[CONTENT_LENGTH] = @length.to_s\n @body << s\n nil\n end", "def truncate(len)\n raise(IOError, \"closing non-duplex IO for writing\") unless @writable\n raise(TypeError) unless len.respond_to?(:to_int)\n length = len.to_int\n raise(Errno::EINVAL, \"negative length\") if (length < 0)\n if length < @string.size\n @string[length .. @string.size] = \"\"\n else\n @string = @string.ljust(length, \"\\000\")\n end\n # send back what was passed, not our :to_int version\n len\n end", "def write(str)\n s = str.to_s\n\n @length += s.bytesize\n @headers[Rack::CONTENT_LENGTH] = @length.to_s\n @body << s\n end", "def write(s)\n do_write(s)\n s.bytesize\n end", "def write_varstring(str)\n write_varsize str.length\n write str\n end", "def write(str)\n str = str.to_s\n IOUtil.write(str, out_stream)\n str.bytesize\n end", "def write(str)\n writing { write0 str }\n end", "def dump_str_with_len_and_zero_pad(str, len)\r\n if str.length > len\r\n str[0...len]\r\n else\r\n str + \"\\000\" * (len - str.length)\r\n end\r\nend", "def append_string(str)\n align(4)\n @packet += [str.length].pack(\"L\") + str + \"\\0\"\n end", "def length=(length_str)\n super length_str.to_i\n end", "def write(str); end", "def write(str)\n end", "def write_string(string)\n @payload << string.force_encoding(Encoding::BINARY)\n write_byte(10)\n end", "def bytesize(string)\n string.respond_to?(:bytesize) ? string.bytesize : string.length\n end", "def bytesize(string)\n string.respond_to?(:bytesize) ? string.bytesize : string.length\n end", "def byte_string(byte, length)\n\t([byte]*length).pack('C*')\nend", "def stringio_length(io)\n io.string.length\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
As a user, if 'modify' is selected, I am prompted to enter an id for the contact to be modified. As a user, when an id is entered, I am prompted to type 'yes' or 'no' to confirm my selection. As a user, if 'yes' is typed, I am prompted to change 'firstname', 'lastname', 'email' or 'notes' by number. You shouldn't be able to change the 'id'.
def modify_contact puts "Please enter the id of the contact to modify: " id = gets.chomp.to_i puts "Please confirm 'yes' or 'no' to modify this contact: " puts @rolodex.display_contact(id) answer = gets.chomp.downcase if answer == 'yes' puts "Please enter the number of the attribute you would like to edit: \n 1. First Name \n 2. Last Namme \n 3. Email \n 4. Note" option = gets.chomp.to_i puts "Please provide the edit:" new_attribute = gets.chomp @rolodex.modify_contact(id, option, new_attribute) puts "Edit complete: " puts @rolodex.display_contact(id) elsif answer == 'no' return else puts "That is not a valid answer, please try again." end end
[ "def modify_option(number)\n\t\t# if number == 1\n\t\t# \tputs \"Please enter new first name\"\n\t # \t\tfname = gets.chomp\n\t # \t\tputs @fname\n\t # \t\tconfirm_modify\n\n\t # \t\t@fname=fname\n\n\t # \telsif number == 2\n\t # \t\tputs \"Please enter new last name\"\n\t # \t\tlname = gets.chomp\n\t # \t\tconfirm_modify\n\t # \telsif number == 3\n\t # \t\tputs \"Please enter new email\"\n\t # \t\temail = gets.chomp\n\t # \t\tconfirm_modify\n\t # \telsif number == 4\n\t # \t\tputs \"Please enter new note\"\n\t # \t\tnote = gets.chomp\n\t # \t\tconfirm_modify\n\t # \telse\n\t # \t\tinput_validator\n\t # \t\tmodify_contact\n\t # \tend\n\tend", "def modify_contact\n contact = retrieve_contact_by_email\n if contact\n attr_code = attr_menu\n if attr_code\n print \"You have chosen to change the contact's #{@@attr_ops[attr_code]}. Is that correct? \"\n confirm = gets.chomp.downcase\n if confirm == 'yes'\n print \"Please provide the new value for #{@@attr_ops[attr_code]}: \"\n new_value = gets.chomp\n contact = @rolodex.modify(contact, attr_code, new_value)\n if contact\n puts \"Contact successfully updated:\\n#{contact}\\n\"\n end\n elsif confirm == 'no'\n print \"Update canceled. \"\n else\n puts \"Error: only 'yes' and 'no' are valid responses.\"\n end\n end\n end \n return contact\n end", "def modify_contact\n # prompt user for id\n puts \"What id to modify?\"\n id = gets.chomp.to_i\n # get contact from rolodex\n contact = @rolodex.get_contact_by_id(id)\n # call modify\n modify(contact)\n end", "def edit_client\n prompt = TTY::Prompt.new(symbols: {marker: \">\"})\n system('clear')\n puts \"Editing client #{@name} - ID #{@id}\\n\\n\\n\"\n @name = prompt.ask(\"Name:\", default: @name) do |q|\n q.validate(/^[\\w ]+$/)\n q.messages[:valid?] = \"Invalid Name, must be alphanumeric\"\n end\n @phone = prompt.ask(\"Phone number:\", default: @phone) do |q|\n q.validate(/^\\d+$/)\n q.messages[:valid?] = \"Invalid Phone number, must be numeric\"\n end\n @email = prompt.ask(\"Email address:\", default: @email) do |q|\n q.validate(/^[\\w\\-\\.]+@([\\w\\-]+\\.)+[\\w\\-]{2,4}$/)\n q.messages[:valid?] = \"Invalid Email address format (must be something@domain.tld)\"\n end\n save()\n end", "def edit_patron(selected)\n\tedit_patron_name(selected)\n\tedit_patron_email_address(selected)\nend", "def show_edit_menu\n puts \" You are now in edit mode\"\n puts \" edit name - edit the name of this contact\"\n puts \" edit email - edit the email of this contact\"\n puts \" add phone - add a phone number to this contact\"\n puts \" edit importance - edit the importance of this contact\"\n print \"> \"\n end", "def edit_data\n puts \"1-add student\"\n puts \"2-delete student\"\n puts \"3-edit student data\"\n user_selection = gets.chomp\n case user_selection\n when \"1\"\n add_student\n when \"2\"\n delete_student\n when \"3\"\n edit_student_data\n else\n puts @dash+\"error-invalid-selection\"+@dash\n user_menu\n end\n end", "def edit_patron_email_address(selected)\n\tp = Patron.find(selected)\n\tprint \"To edit the patron email address please enter here: \"\n\temail_address = gets.chomp\n\tp.update_attributes(email_address: email_address)\nend", "def prompt_contact_id\n print \"\\nEnter contact I.D.:\".green\n while true\n contact_id = gets.chomp.to_i\n if contact_id == 0\n puts \"You didn't enter an integer.\".red\n print \"Please enter I.D. number as an integer: \".green\n\n else\n puts \"You entered contact I.D. -\".dark_grey + \" #{contact_id}\"\n print \"Confirm I.D. entry ('yes' or 'no'): \".yellow\n user_contact_id_conformation = gets.chomp.downcase\n\n if user_contact_id_conformation == 'yes'\n return contact_id\n elsif user_contact_id_conformation == 'no'\n print \"Re-enter contact I.D.:\".green\n else\n print \"You have neither entered 'yes' or 'no'. Please re-enter contact's I.D. and try again:\".red\n end\n\n end\n end\n end", "def edit_patron_email(selected_patron, model)\n print \"New email: >>\"\n email = gets.chomp\n saved = selected_patron.update_attributes(email: email)\n record_save_result(saved, selected_patron, model)\nend", "def edit_patron_record(selected_patron, model)\n selection = \"\"\n while selection != \"back\"\n puts \"\\n\\n --- Edit #{selected_patron.name} ---\\n\\n\"\n print \"What would you like to edit?\\n\"\n print \"#{selected_patron.record_edit_display}\\nBack. Go back to selected patron\\n >>\"\n selection = gets.chomp.downcase\n selection = valid_selection(selection, [1,2,3])\n case selection\n when \"1\"\n edit_patron_name(selected_patron, model)\n when \"2\"\n edit_patron_email(selected_patron, model)\n when \"3\"\n check_in_or_out_patron(selected_patron, model)\n when \"back\"\n #go back to selected_patron_record\n else\n puts \"Something broke - patron edit record selection\"\n end\n end\nend", "def editRecord\r\n puts \"Enter the ID of the Record you wish to edit: \"\r\n $businessMethods.editToArray gets.chomp,createRecord\r\n menuOptions false\r\n end", "def pet_edit(pet)\n if @prompt.select(\"Edit pet name? \", @yes_or_no)\n pet[\"name\"] = @prompt.ask(\"Name: \")do |q|\n # error handling requiring input\n q.required true\n q.validate /[a-z]+/\n # error handling message\n q.messages[:valid?] = \"Name need to start with a letter.\"\n q.messages[:required?] = \"Required pet name\"\n q.modify :capitalize\n end\n end\n\n if @prompt.select(\"Edit pet age? \", @yes_or_no)\n pet[\"age\"] = @prompt.ask(\"Age: \", convert: :integer) do |q|\n # error handling requiring input\n q.required true\n # error handling message\n q.messages[:required?] = \"Required pet age\"\n q.messages[:convert?] = \"Age has to be a number\"\n end\n end\n\n if @prompt.select(\"Edit Observations? \", @yes_or_no)\n pet[\"observations\"] = @prompt.ask(\"Observations: \") do |q|\n q.modify :capitalize\n end\n end\n \n @db.edit(\"pets\", pet)\n return pet\n end", "def prompt_edit\n puts 'Enter \"Remove City\" to delete a city from the network'\n puts 'Enter \"Remove Route\" to delete a route from the network'\n puts 'Enter \"Add City\" to add a city to the network'\n puts 'Enter \"Add Route\" to add a route to the network'\n puts 'Enter \"Edit City\" to make changes to a city in the network'\n puts 'Enter \"B\" to go back'\n gets.chomp.downcase\n end", "def edit_book_author(selected)\n\tb = Book.find(selected)\n\tprint \"To edit the book author please enter here: \"\n\tauthor = gets.chomp\n\tb.update_attributes(author: author)\nend", "def edit_patron_name(selected_patron, model)\n print \"New name: >>\"\n name = gets.chomp\n saved = selected_patron.update_attributes(name: name)\n record_save_result(saved, selected_patron, model)\nend", "def ask_questions\r\n\tuser_info = {}\r\n\tputs \"What is the name of the client? (Enter a name)\"\r\n\tuser_info[:name] = gets.chomp\r\n\r\n\tputs \"What is the client's gender? (Enter M or F)\"\r\n\tuser_info[:gender] = gets.chomp.downcase\r\n\r\n\tputs \"What is the age of client #{user_info[:name]}? (Enter a number)\"\r\n\tuser_info[:age] = gets.chomp\r\n\r\n\tputs \"How many children does #{user_info[:name]} have? (Enter a number)\"\r\n\tuser_info[:children_count] = gets.chomp\r\n\r\n\tif user_info[:gender] == \"m\"\r\n\t\tputs \"What decor theme does Prince #{user_info[:name]} wish to encorporate?\"\r\n\t\tuser_info[:decor_theme] = gets.chomp\r\n\telse\r\n\t\tputs \"What decor theme does Princess #{user_info[:name]} wish to encorporate?\"\r\n\t\tuser_info[:decor_theme] = gets.chomp\r\n\tend\r\n\r\n\tputs user_info\r\n\r\n\tputs \"Yea wanna edit any of the above information? If not, just type 'none' (Enter name of the Key you wish to edit Ex.'decor_theme')\"\r\n\tedit_want = gets.chomp.downcase.to_sym\r\n\t\r\n\tif edit_want == :none\r\n\t\tputs \"if statement\"\r\n\telse\r\n\t\tputs \"Type your new change for the #{edit_want} field\"\r\n\t\tuser_info[edit_want] = gets.chomp \r\n\tend\r\n\tuser_info[:age] = user_info[:age].to_i\r\n\tuser_info[:children_count] = user_info[:children_count].to_i\r\n\treturn user_info\r\nend", "def edit_ingredients\n @prompt.select(\"Do you want to add or delete ingredients?\") do |menu|\n menu.choice \"Add ingredient(s)\", -> {add_ingredients}\n menu.choice \"Delete ingredient(s)\", -> {delete_ingredients}\n menu.choice \"Back to menu\", -> {nav_menu}\n end\n end", "def edit_user\n return unless has_permission :admin\n @page_title = 'Change Name/Email Address'\n @user = User.find(params[:id])\n case request.method\n when :post\n @user.first_names = params[:user][:first_names]\n @user.last_name = params[:user][:last_name]\n @user.email = params[:user][:email]\n if @user.save\n redirect_to :action => 'edit_user', :id => params[:id] \n flash[:notice] = \"User's info updated\"\n return\n end\n end\nend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns the type of this comment. Inline comments correspond to `:inline`: whatever Block comments correspond to `:document`: =begin hi i am a document =end
def type case text when /^#/ :inline when /^=begin/ :document end end
[ "def type\n if text.start_with?(\"#\".freeze)\n :inline\n elsif text.start_with?(\"=begin\".freeze)\n :document\n end\n end", "def nodeType\n COMMENT_NODE\n end", "def commentable_type\n read_attribute(:commentable_type) == 'Topic' ? Topic.find(commentable_id).class.to_s : read_attribute(:commentable_type)\n end", "def type\n CMark.node_get_type(@pointer)\n end", "def type\n @type ||= Parser.statement_type(statement)\n end", "def type\n @type = begin\n if path_as_markdown.exist?\n :markdown\n elsif path_as_erb.exist?\n :erb\n else\n :unknown\n end\n end\n end", "def determine_paragraph_type\n @paragraph_type = \\\n case\n when blank?\n :blank\n when definition_list? # order is important! A definition_list is also an unordered_list!\n :definition_term\n when (ordered_list? or unordered_list?)\n :list_item\n when property_drawer_begin_block?\n :property_drawer_begin_block\n when property_drawer_end_block?\n :property_drawer_end_block\n when property_drawer_item?\n :property_drawer_item\n when metadata?\n :metadata\n when block_type\n if block_should_be_exported?\n case block_type.downcase.to_sym\n when :center, :comment, :example, :html, :quote, :src\n block_type.downcase.to_sym\n else\n :comment\n end\n else\n :comment\n end\n when title?\n :title\n when raw_text? # order is important! Raw text can be also a comment\n :raw_text\n when comment?\n :comment\n when table_separator?\n :table_separator\n when table_row?\n :table_row\n when table_header?\n :table_header\n when inline_example?\n :inline_example\n when horizontal_rule?\n :horizontal_rule\n else :paragraph\n end\n end", "def type\n silent = @value[1] == ::Sass::Engine::SASS_COMMENT_CHAR\n loud = !silent && @value[2] == ::Sass::Engine::SASS_LOUD_COMMENT_CHAR\n if silent\n :silent\n elsif loud\n :loud\n else\n :normal\n end\n end", "def type\n return :annotation unless self[:type]\n self[:type].downcase.to_sym\n end", "def type\n self[ :type ] || :text\n end", "def type\n document._type = document.class.name if typed?\n end", "def type\n\t\treturn WordNet::Synset.linktype_table[ self.linkid ][ :type ]\n\tend", "def document_type\n instance.class.tire.document_type\n end", "def is_type? type\n type == @document.type\n end", "def comment?\n type == COMMENT_NODE\n end", "def type\n # Default to 'text/plain'\n if @media_type_raw.nil? or @media_subtype_raw.nil?\n 'text/plain'\n else\n \"#{@media_type_raw}/#{@media_subtype_raw}\".downcase\n end\n end", "def type\n DocInfoClasses.key(self.class)\n end", "def content_type\n return @entry_data[:content_type] unless @entry_data == nil\n content = @entry.css('content')\n content.empty? ? \"\" : content[0]['type']\n end", "def document_type_class_name\n @document_type\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Compares comments. Two comments are identical if they correspond to the same source range.
def ==(other) other.is_a?(Source::Comment) && @location == other.location end
[ "def compareCommentNodes(n1, n2, opts, differences, status = EQUIVALENT)\n return true if opts[:ignore_comments]\n t1 = n1.content\n t2 = n2.content\n if opts[:collapse_whitespace]\n t1 = collapse(t1)\n t2 = collapse(t2)\n end\n unless t1 == t2\n status = UNEQUAL_COMMENTS\n addDifference(n1.parent, n2.parent, t1, t2, opts, differences)\n end\n status\n end", "def old_comments\n\t# copy comments\n\tloop {\n\t $old_line = r_old\n\t break if $old_line.eof\n\n\t\t# copy all comments, but skip labels\n\t if $old_line.not_dis\n\t\tif $old_line.raw !~ />:/\n\t\t puts $old_line.raw\n\t\tend\n\t\tnext\n\t end\n\n\t break if $old_line.addr >= $new_line.addr\n\t}\nend", "def ==( other )\n other.kind_of? Comment and\n (other <=> self) == 0\n end", "def old_comments_OLD\n\t# copy comments\n\tloop {\n\t $old_line = r_old\n\t break if $old_line.eof\n\n # copy all comments, but skip labels\n\t if $old_line.not_dis\n if $old_line.raw !~ />:/\n puts $old_line.raw\n end\n next\n\t end\n\n\t break if $old_line.addr >= $new_line.addr\n\t}\nend", "def adjacent_comments(comments, buffer)\n comments = comments.sort_by { |comment| comment.location.begin_pos }\n current_chunk = 0\n last_line_seen = -100\n chunks_to_comment = comments.chunk do |comment|\n line = comment.location.begin_pos.line\n if last_line_seen.next == line\n last_line_seen = line\n current_chunk\n else\n last_line_seen = line\n current_chunk += 1\n end\n end\n chunks_to_comment.map &:last\n end", "def check_conflict_and_comment()\n results = check_conflict()\n\n results.each do |result|\n next if result[:mergeable]\n message = \"<p>This PR conflicts with <a href=\\\"#{result[:pull_request][:html_url]}\\\">##{result[:pull_request][:number]}</a>.</p>\"\n table = '<table><thead><tr><th width=\"100%\">File</th><th>Line</th></tr></thead><tbody>' + result[:conflicts].map do |conflict|\n file = conflict[:file]\n line = conflict[:line]\n line_link = \"#{result[:pull_request][:head][:repo][:html_url]}/blob/#{result[:pull_request][:head][:ref]}/#{file}#L#{line}\"\n \"<tr><td>#{file}</td><td><a href=\\\"#{line_link}\\\">#L#{line}</a></td></tr>\"\n end.join('') + '</tbody></table>'\n puts (message + table)\n warn(\"<div>\" + message + table + \"</div>\")\n end\n\n results\n end", "def comments_range; end", "def parse_with_comments(source_buffer); end", "def adjust_comments\n scan_tokens do |prev, token, post, i|\n next unless token[0] == :COMMENT\n before, after = @tokens[i - 2], @tokens[i + 2]\n if before && after &&\n ((before[0] == :INDENT && after[0] == :OUTDENT) ||\n (before[0] == :OUTDENT && after[0] == :INDENT)) &&\n before[1] == after[1]\n @tokens.delete_at(i + 2)\n @tokens.delete_at(i - 2)\n elsif ![\"\\n\", :INDENT, :OUTDENT].include?(prev[0])\n @tokens.insert(i, [\"\\n\", Value.new(\"\\n\", token[1].line)])\n end\n end\n end", "def pair_comments(post)\n replacements = post.replacements.order(id: :asc).to_a\n comments = post.comments.where(creator: User.system).where_regex(:body, \"replaced this post\").order(id: :asc).to_a\n pairs = []\n\n while replacements.present? && comments.present?\n # Take the Cartesian product of all possible replacement-comment pairs, filter for matching pairs, and take the pair\n # with the closest matching created_at times.\n replacement, comment = replacements.product(comments).select do |replacement, comment|\n data = parse_comment(comment)\n\n if replacement.old_md5.present? && replacement.md5.present?\n data.old_source == replacement.original_url && data.replacer == replacement.creator && data.old_md5 == replacement.old_md5 && data.new_md5 == replacement.md5\n elsif replacement.old_md5.present?\n data.old_source == replacement.original_url && data.replacer == replacement.creator && data.old_md5 == replacement.old_md5\n else\n data.old_source == replacement.original_url && data.replacer == replacement.creator\n end\n end.min_by do |replacement, comment|\n [(replacement.updated_at - comment.created_at).abs, replacement.id, comment.id]\n end\n\n pairs << [replacement, comment]\n replacements -= [replacement]\n comments -= [comment]\n end\n\n [pairs, replacements]\nend", "def test_html_check_comment_text\n ct = CodeTerminator::Html.new\n p \"3 test if text in comment is the same as the text of comment in code\"\n p errors = ct.match(\"exercises/html/check_comment_text.html\",\"<html><head></head><body><!-- This is a comment --></body></html>\")\n assert_equal errors.empty? , true\n end", "def diff_comment?(activity)\n commented?(activity) && \\\n activity.key?(\"commentAnchor\") && \\\n activity.key?(\"diff\")\n end", "def autocorrect_preceding_comments(corrector, comment); end", "def comments\n block.split(\"\\n\").select{|l| l =~ /^\\s*#/}.map{|l| l.gsub(/^\\s*#\\s*/, \"\").strip}\n end", "def formatComments(theLines)\n\n\ttheLines.each_with_index do |theLine, theIndex|\n\n\t\tif (theIndex >= 2)\n\t\t\n\t\t\tline0 = theLines[theIndex - 2];\n\t\t\tline1 = theLines[theIndex - 1];\n\t\t\tline2 = theLines[theIndex - 0];\n\t\t\t\n\t\t\tif (line0[:text].empty? &&\n\t\t\t\tline1[:text].empty? &&\n\t\t\t\tline2[:text].empty? &&\n\n\t\t\t\tline0[:comment] =~ /^\\/\\/==========+/ &&\n\t\t\t\tline2[:comment] =~ /^\\/\\/----------+/ &&\n\t\t\t\tline1[:comment] =~ /^\\/\\/\\s*(.*)/ )\n\n\n\t\t\t\t# Fix up standard blocks\n\t\t\t\ttheComment = $1;\n\t\t\t\tcommentKey = $1.chomp('.').downcase;\n\n\t\t\t\tif (COMMENT_STANDARD.has_value?(theComment))\n\t\t\t\t\t# Accept as-is\n\n\t\t\t\telsif (COMMENT_STANDARD.has_key?(commentKey))\n\t\t\t\t\ttheComment = COMMENT_STANDARD[commentKey];\n\n\n\n\t\t\t\t# Fix up function blocks\n\t\t\t\telsif (theComment =~ /^(.*)\\s:\\s(.*)$/)\n\t\t\t\t\ttheName = $1.strip;\n\t\t\t\t\ttheDesc = $2.strip.chomp(\".\");\n\n\n\t\t\t\t\t# Normalise the description\n\t\t\t\t\tif (theName =~ /^(\\w+)::\\1$/)\n\t\t\t\t\t\ttheDesc = \"Constructor.\";\n\t\t\t\t\t\n\t\t\t\t\telsif (theName =~ /^(\\w+)::~\\1$/)\n\t\t\t\t\t\ttheDesc = \"Destructor.\";\n\t\t\t\t\t\n\t\t\t\t\telsif (theDesc.empty?)\n\t\t\t\t\t\ttheDesc = \"MISSING DESCRIPTION.\";\n\t\t\t\t\tend\n\t\t\t\t\n\t\t\t\t\tif (!theDesc.end_with?(\".\", \"?\", \"!\"))\n\t\t\t\t\t\ttheDesc += \".\";\n\t\t\t\t\tend\n\t\t\t\t\t\n\t\t\t\t\ttheComment = theName + \" : \" + theDesc;\n\t\t\t\tend\n\n\n\n\t\t\t\t# Rewrite the the comment\n\t\t\t\ttheLines[theIndex - 2][:comment] = COMMENT_BAR_TOP;\n\t\t\t\ttheLines[theIndex - 1][:comment] = COMMENT_BAR_CONTENT + theComment;\n\t\t\t\ttheLines[theIndex - 0][:comment] = COMMENT_BAR_BOTTOM;\n\t\t\tend\n\t\t\n\t\tend\n\tend\n\n\treturn theLines;\n\nend", "def adjust_comments\n scan_tokens do |prev, token, post, i|\n next 1 unless token[0] == :COMMENT\n before, after = @tokens[i - 2], @tokens[i + 2]\n if before && after &&\n ((before[0] == :INDENT && after[0] == :OUTDENT) ||\n (before[0] == :OUTDENT && after[0] == :INDENT)) &&\n before[1] == after[1]\n @tokens.delete_at(i + 2)\n @tokens.delete_at(i - 2)\n next 0\n elsif prev[0] == \"\\n\" && [:INDENT].include?(after[0])\n @tokens.delete_at(i + 2)\n @tokens[i - 1] = after\n next 1\n elsif ![\"\\n\", :INDENT, :OUTDENT].include?(prev[0])\n @tokens.insert(i, [\"\\n\", Value.new(\"\\n\", token[1].line)])\n next 2\n else\n next 1\n end\n end\n end", "def stringify_comment_array comments\n ctxt = String.new('')\n started = false\n skip = nil\n comments.lines.each { |l|\n # Trim the comment and minimum leading whitespace\n p = l.force_encoding('UTF-8').encode('UTF-8', invalid: :replace, replace: '?').gsub(/^#+/, '')\n if p.strip.empty?\n next unless started\n ctxt.concat p\n else\n here = p.index(/[^ \\t]/)\n skip = here if skip.nil? || here < skip\n ctxt.concat p[skip..-1]\n end\n started = true\n }\n ctxt\n end", "def foldable_comment_block_ranges; end", "def compare_docstring_tags(d1, d2); end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
HTTParty raises an errors after time limit defined by ::default_timeout if it cannot connect to server, then it raises Net::OpenTimeout if it cannot read response from server, then it raises Net::ReadTimeout
def handle_timeouts yield rescue Net::OpenTimeout, Net::ReadTimeout {} end
[ "def http_read_timeout\n @http_read_timeout ||= 5\n end", "def http_open_timeout\n @http_open_timeout ||= 2\n end", "def check_for_timeouts!\n return if (!@timeout_at or Time.now < @timeout_at or @timed_out)\n\n @timed_out = true\n @timeout_at = nil\n\n if (@connected and @active_message)\n message_callback(:timeout, \"Response timed out before send could complete\")\n error_notification(:timeout, \"Response timed out\")\n debug_notification(:timeout, \"Response timed out\")\n send_callback(:on_error)\n elsif (!@connected)\n remote_options = @options\n interpreter = @interpreter\n\n if (self.proxy_connection_initiated?)\n remote_options = @options[:proxy]\n end\n\n message = \"Timed out before a connection could be established to #{remote_options[:host]}:#{remote_options[:port]}\"\n\n if (interpreter)\n message << \" using #{interpreter.label}\"\n end\n\n connect_notification(false, message)\n debug_notification(:timeout, message)\n error_notification(:timeout, message)\n\n send_callback(:on_error)\n else\n interpreter = @interpreter\n\n if (interpreter and interpreter.respond_to?(:close))\n interpreter.close\n else\n send_callback(:on_disconnect)\n end\n end\n\n self.close_connection\n end", "def check_for_timeouts!\n return if (!@timeout_at or Time.now < @timeout_at or @timed_out)\n\n @timed_out = true\n @timeout_at = nil\n\n if (@connected and @active_message)\n message_callback(:timeout, \"Response timed out before send could complete\")\n error_notification(:timeout, \"Response timed out\")\n debug_notification(:timeout, \"Response timed out\")\n send_callback(:on_error)\n elsif (!@connected)\n remote_options = @options\n interpreter = @interpreter\n \n if (@connecting_to_proxy)\n remote_options = @options[:proxy]\n end\n \n message = \"Timed out before a connection could be established to #{remote_options[:host]}:#{remote_options[:port]}\"\n \n if (interpreter)\n message << \" using #{interpreter.label}\"\n end\n \n connect_notification(false, message)\n debug_notification(:timeout, message)\n error_notification(:timeout, message)\n\n send_callback(:on_error)\n else\n interpreter = @interpreter\n\n if (interpreter and interpreter.respond_to?(:close))\n interpreter.close\n else\n send_callback(:on_disconnect)\n end\n end\n\n close_connection\n end", "def configure_timeout(options, env)\n req = request_options(env)\n options[:inactivity_timeout] = request_timeout(:read, req)\n options[:connect_timeout] = request_timeout(:open, req)\n end", "def read_timeout=(timeout); end", "def http_With_Timeout(url)\n begin\n Timeout::timeout(5) do\n return JSON.parse(Net::HTTP.get(URI(url)))\n end\n rescue Timeout::Error\n return false\n end\n end", "def handle_timed_out_requests\n while @timeout && !@waiting.empty? && (Time.now - @waiting_since.first) >= @timeout\n waiting_since = @waiting_since.shift\n @waiting.shift.resolve(TimeoutError.new(\"Waited %<secs>f seconds\" % { secs: Time.now - waiting_since }))\n end\n end", "def set_timeout(http, opts)\n\n to = o(opts, :timeout) || o(opts, :to)\n to = to.to_i\n\n return if to == 0\n\n http.open_timeout = to\n http.read_timeout = to\n end", "def set_pending_connect_timeout(value); end", "def request_with_timeout(req)\n params = {\n timeout: opts[:connect_timeout],\n retry_exceptions: [Errno::ECONNREFUSED],\n log: self.log,\n }\n Ncio::Support::RetryAction.retry_action(params) do\n connection.request(req)\n end\n end", "def test_no_request_retry_when_timeout_between_varnish_and_nginx_timeout\n response = Typhoeus.get(\"http://127.0.0.1:9442/backend_call_count?id=post-between-varnish-timeout\")\n assert_response_code(200, response)\n assert_equal(\"0\", response.body)\n\n response = Typhoeus.get(\"http://127.0.0.1:9080/api/between-varnish-timeout\", http_options)\n assert_response_code(504, response)\n\n # Ensure that the backend has only been called once.\n response = Typhoeus.get(\"http://127.0.0.1:9442/backend_call_count?id=post-between-varnish-timeout\")\n assert_response_code(200, response)\n assert_equal(\"1\", response.body)\n\n # Wait 5 seconds for any possible retry attempts that might be pending, and\n # then ensure the backend has still only been called once.\n sleep 5\n response = Typhoeus.get(\"http://127.0.0.1:9442/backend_call_count?id=post-between-varnish-timeout\")\n assert_response_code(200, response)\n assert_equal(\"1\", response.body)\n end", "def check_timeout\n return if dead? or @socket.closed?\n\n error('connection-timeout') if ($time - @rtime) >= @auth.timeout\n\n self\n end", "def stub_timeout(url)\n stub_request(:get, url).to_timeout\nend", "def read_timeout; end", "def test_no_request_retry_get\n response = Typhoeus.get(\"http://127.0.0.1:9442/backend_call_count?id=get-timeout\")\n assert_response_code(200, response)\n assert_equal(\"0\", response.body)\n\n response = Typhoeus.get(\"http://127.0.0.1:9080/api/timeout\", http_options)\n assert_response_code(504, response)\n\n # Ensure that the backend has only been called once.\n response = Typhoeus.get(\"http://127.0.0.1:9442/backend_call_count?id=get-timeout\")\n assert_response_code(200, response)\n assert_equal(\"1\", response.body)\n\n # Wait 5 seconds for any possible retry attempts that might be pending, and\n # then ensure the backend has still only been called once.\n sleep 5\n response = Typhoeus.get(\"http://127.0.0.1:9442/backend_call_count?id=get-timeout\")\n assert_response_code(200, response)\n assert_equal(\"1\", response.body)\n end", "def max_network_retry_delay; end", "def timeout(seconds, &block)\n Timeout.timeout seconds, Pinglish::TooLong, &block\n end", "def read_timeout=(sec); end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
A singleuse login link for Express accounts to access their Stripe dashboard
def login_link(**options) ::Stripe::Account.create_login_link(processor_id) rescue ::Stripe::StripeError => e raise Pay::Stripe::Error, e end
[ "def stripe_dashboard\n dashboard_link = Stripe::Account.create_login_link(@current_user.stripe_user_id)\n redirect_to dashboard_link.url\n end", "def dashboard\n account = Stripe::Account.retrieve(current_user.stripe_user_id)\n login_links = account.login_links.create\n redirect_to login_links.url\n end", "def connect\n connector = StripeConnect.new( current_user )\n connect_url, error = connector.connect_url( redirect_uri: confirm_users_url )\n\n if connect_url.nil?\n flash[:error] = error\n redirect_to user_path( current_user )\n else\n redirect_to connect_url\n end\n end", "def create\n if @current_user.stripe_user_id.nil?\n account = Stripe::Account.create({\n type: 'express',\n country: 'US',\n email: `#{@current_user.email}`,\n capabilities: {\n card_payments: {\n 'requested': true\n },\n transfers: { 'requested': true }\n }\n })\n # Store current users stripe id in user database\n @current_user.update_column(:stripe_user_id, account.id)\n\n # Create temporary account link and redirect to express onboarding\n account_link = Stripe::AccountLink.create({\n account: account.id,\n refresh_url: 'http://localhost:3000?/account/refresh/',\n return_url: 'http://localhost:3000?/account/return/',\n type: 'account_onboarding'\n })\n redirect_to account_link.url\n else\n # Create temporary account link and redirect to express onboarding\n account_link = Stripe::AccountLink.create({\n account: @current_user.stripe_user_id,\n refresh_url: 'http://localhost:3000?/account/refresh/',\n return_url: 'http://localhost:3000?/account/return/',\n type: 'account_onboarding'\n })\n redirect_to account_link.url\n end\n end", "def standalone\n connector = StripeStandalone.new(current_customer)\n account = connector.create_account!(params[:country])\n\n if account\n flash[:notice] = \"Standalone StripeAccount account created! <a target='_blank' rel='platform-account' href='https://dashboard.stripe.com/test/applications/users/#{account.id}'>View in dashboard &raquo;</a>\"\n else\n flash[:alert] = 'Unable to create StripeAccount account!'\n end\n redirect_to customer_path(current_customer)\n end", "def stripe_link\n @merchant = merchant_params\n authorize @merchant\n \tif (@merchant.stripe_id == nil)\n \t\trender json: {url: stripe_connect_url, type: \"connect\"}, status: :ok\n \telse\n \t begin \n \t\t render json: {url: stripe_dashboard_url(@merchant.stripe_id), type: \"dashboard\"}, status: :ok\n \t rescue Exception => e\n \t Log.create(context: \"stripe_link\", current_user: @merchant.id, message: e.message, log_type: Log::ERROR)\n \t render json: {error: \"Error retrieving stripe link\"}, status: :internal_server_error\n \t end\n \tend\n end", "def stripe_connect_button\n link_to stripe_url, class: \"btn btn-primary btn-block\" do\n content_tag :span, \"Connect with Stripe\"\n end\n end", "def goto_login_page\n\tvisit('http://nickcornovan.com/wp-admin')\nend", "def idt_login\n show do\n title \"Prepare to order primer\"\n \n check \"Go to the <a href='https://www.idtdna.com/site/account' target='_blank'>IDT website</a>, log in with the lab account. (Username: #{IDT_USER}, password is #{IDT_PASS}).\"\n warning \"Ensure that you are logged in to this exact username and password!\"\n end\n end", "def stripe_plan_url\n return nil if stripe_plan_id.nil?\n\n \"https://dashboard.stripe.com/#{ Rails.env.production? ? '' : 'test/' }plans/#{ stripe_plan_id }\"\n end", "def web\n _login(false)\n end", "def login_into_alchemy\n visit '/alchemy/admin/login'\n fill_in('alchemy_user_session_login', :with => 'jdoe')\n fill_in('alchemy_user_session_password', :with => 's3cr3t')\n click_on('Login')\n end", "def log_in\n visit \"https://www.vodafone.es/autonomos/es/\"\n within '#loginForm' do\n fill_in \"uuid\", :with => @user\n fill_in \"password\", :with => @password\n click_link(\"Entrar\")\n end\n end", "def login\n options = {\n type: 'OneView',\n file_env_var: 'ONEVIEW_AUTH_FILE',\n env_var_url: 'ONEVIEW_URL',\n filename: '/login.json'\n }\n credentials = load_authentication_settings(options)\n credentials[:hardware_variant] ||= 'C7000'\n credentials[:hardware_variant] = credentials[:hardware_variant].to_s.capitalize\n credentials\nend", "def stripe_account; end", "def login\n redirect_to '/auth/azureactivedirectory'\n end", "def sign_in_admin\n get \"/en/users/sign_in\"\n post '/en/users/sign_in', 'user[email]' => 'admin@test.com', 'user[password]' => '12345678'\n follow_redirect!\n end", "def signin\n click_signin_link\n submit_credentials VALID_USERNAME, PASSWORD\nend", "def update\n if @current_user.stripe_user_id.present?\n\n # Create temporary account link and redirect to express onboarding\n account_link = Stripe::AccountLink.create({\n account: @current_user.stripe_user_id,\n refresh_url: 'http://localhost:3000?/account/refresh/',\n return_url: 'http://localhost:3000?/account/return/',\n type: 'account_onboarding'\n })\n redirect_to account_link.url\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
converts 0terminated ASCII string to ruby string
def asciiz_to_str(asciiz) zero_byte_idx = asciiz.index("\x00") if zero_byte_idx != nil return asciiz[0, zero_byte_idx] else return asciiz end end
[ "def cstring(off=0)\n self[ off, (self.index(\"\\x00\") || self.size) ]\n end", "def string(str)\n TYPE_STRING +\n word(str.length) +\n str.encode!(\"ASCII\")\n end", "def read_string io\n buf = ''\n while (c = io.read(1)) != \"\\0\"\n buf += c\n end\n buf\n end", "def weirdFixString(str)\n idx = str.index(\"\\000\\000\")\n idx.nil? ? str : str[0..idx]\n end", "def read_string(io, chars=nil)\n if chars\n io.read chars\n else\n str = ''\n while c = io.read(1)\n break if c == \"\\x00\"\n str += c\n end\n\n str\n end\n end", "def decode_string(bytes)\n bytes.map(&:chr)\n .join\n .gsub(/#{0.chr}*$/, '')\n end", "def clean_str(s)\n \tclean_s = \"\"\n \ts.each_byte {|c| clean_s += \"#{c.chr}\" unless c == 0}\n \tclean_s\n end", "def cstring(off=0)\n self[ off, (self.index(\"\\x00\") || self.size) ]\n end", "def read_ascii(cursor, size)\n return @data[cursor..cursor+size-1].gsub(TRAILING_00_REGEXP, '').strip\n end", "def read_ascii(cursor_begin, cursor_end)\n return @data[cursor_begin..cursor_end].gsub(TRAILING_00_REGEXP, '').strip\n end", "def to_ascii8(str)\n str.force_encoding(\"ASCII-8BIT\")\nend", "def get_nul_terminated\n raise IncompleteBufferException if not @buffy[@idx..-1] =~ /^([^\\0]*)\\0/\n str = $1\n raise IncompleteBufferException if @idx + str.size + 1 > @buffy.size\n @idx += str.size + 1\n str\n end", "def read_unicode_string(length=nil)\n length ||= read_int if length.nil?\n return '' if length.nil? || length <= 0\n read(length * 2)\n .encode('UTF-8', 'UTF-16BE', universal_newline: true)\n .delete(\"\\000\")\n end", "def quoted_printable_decode(str)\n str.unpack(\"M*\").first\n end", "def make_readable(string)\n # See String#encode\n encoding_options = {\n :invalid => :replace, # Replace invalid byte sequences\n :undef => :replace, # Replace anything not defined in ASCII\n :replace => '', # Use a blank for those replacements\n :universal_newline => true # Always break lines with \\n\n }\n return string.encode(Encoding.find('ASCII'), encoding_options)\n end", "def escape_bytea(str)\n # each_byte used instead of [] for 1.9 compatibility\n str.gsub(/[\\000-\\037\\047\\134\\177-\\377]/n){|b| \"\\\\#{sprintf('%o', b.each_byte{|x| break x}).rjust(3, '0')}\"}\n end", "def read_pstring\n len = read_byte\n str = ''\n read_bytes len do |byte|\n str << byte.chr\n end\n str\n end", "def string(str)\n old = @pos\n @buffer << str.force_encoding('ASCII-8BIT')\n @pos = @buffer.bytesize\n return @pos, old\n end", "def to_native( str )\n\tstr.dup\nend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
parses a number param and returns the value raises an exception if the param cannot be converted to a number examples: nil => 0 3 => 3 "MB_OK" => 0 "SOME_CONSTANT | OTHER_CONSTANT" => 17 "tuna" => !!!!!!!!!!Exception Parameter "consts_mgr" is a ConstantManager
def param_to_number(v, consts_mgr = @consts_mgr) if v.class == NilClass then return 0 elsif v.kind_of? Integer then return v # ok, it's already a number elsif v.kind_of? String then dw = consts_mgr.parse(v) # might raise an exception if dw != nil return dw else raise ArgumentError, "Param #{v} (class #{v.class}) cannot be converted to a number. It's a string but matches no constants I know." end else raise "Param #{v} (class #{v.class}) should be a number but isn't" end end
[ "def getinteger(arg)\n arg.strip!\n if arg.match(/^\\d+$/)\n arg.to_i\n else\n $stderr.puts \"ERROR: Not a number : #{arg}\"\n exit STATE_UNKNOWN\n end\nend", "def parse_numeric_constant\n if peek?(:LIT_INT)\n ExprInt.new(expect(:LIT_INT))\n else\n ExprFloat.new(expect(:LIT_FLOAT))\n end\n end", "def convert_integer(name, value)\n if value.is_a?(String) && /\\A\\d+\\z/ !~ value\n log_warn(\"#{value} is not a valid integer for #{name}\")\n return nil\n end\n\n value.to_i\n end", "def parse_numeric(name)\n return shift if peek.is_a?(Numeric)\n\n unless peek =~ NUMERIC && $& == peek\n raise MalformattedArgumentError, \"Expected numeric value for '#{name}'; got #{peek.inspect}\"\n end\n\n value = $&.index(\".\") ? shift.to_f : shift.to_i\n if @switches.is_a?(Hash) && switch = @switches[name]\n if switch.enum && !switch.enum.include?(value)\n raise MalformattedArgumentError, \"Expected '#{name}' to be one of #{switch.enum.join(', ')}; got #{value}\"\n end\n end\n value\n end", "def number\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 1 )\n value = nil\n __DEC_NUMBER1__ = nil\n __HEX_NUMBER2__ = nil\n\n begin\n # at line 22:2: ( DEC_NUMBER | HEX_NUMBER )\n alt_1 = 2\n look_1_0 = @input.peek( 1 )\n\n if ( look_1_0 == DEC_NUMBER )\n alt_1 = 1\n elsif ( look_1_0 == HEX_NUMBER )\n alt_1 = 2\n else\n raise NoViableAlternative( \"\", 1, 0 )\n end\n case alt_1\n when 1\n # at line 22:4: DEC_NUMBER\n __DEC_NUMBER1__ = match( DEC_NUMBER, TOKENS_FOLLOWING_DEC_NUMBER_IN_number_180 )\n # --> action\n value = __DEC_NUMBER1__.text.to_i \n # <-- action\n\n when 2\n # at line 23:4: HEX_NUMBER\n __HEX_NUMBER2__ = match( HEX_NUMBER, TOKENS_FOLLOWING_HEX_NUMBER_IN_number_187 )\n # --> action\n value = __HEX_NUMBER2__.text[2..-1].to_i(16) \n # <-- action\n\n end\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 1 )\n\n end\n \n return value\n end", "def expect_number\r\n error_at(\"expected a number\", $token.pos) if $token.kind != TK_NUM\r\n val = $token.val\r\n $token = $token.next\r\n val\r\nend", "def to_number( word )\n begin\n return Integer( word )\n rescue\n puts $!\n end\n begin\n return Float( word )\n rescue\n puts $!\n end\n nil\n end", "def match_number\n @token.value if match? :FLOAT or match? :INTEGER\n end", "def to_number_or_nil(value)\n # case/when copied from Puppet::Parser::Scope::number?\n case value\n when /^-?\\d+(:?\\.\\d+|(:?\\.\\d+)?e\\d+)$/\n value.to_f\n when /^0x[0-9a-f]+$/i\n value.to_i(16)\n when /^0[0-7]+$/\n value.to_i(8)\n when /^-?\\d+$/\n value.to_i\n else\n nil\n end\n end", "def check_params(n)\n raise 'input must be a number' unless n.is_a?(Numeric)\nend", "def num_const!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 22)\n\n type = NUM_CONST\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 366:12: ( DIGIT )* '.' ( DIGIT )+ ( ( 'E' | 'e' ) ( '+' | '-' )? ( DIGIT )+ )?\n # at line 366:12: ( DIGIT )*\n loop do # decision 2\n alt_2 = 2\n look_2_0 = @input.peek(1)\n\n if (look_2_0.between?(?0, ?9)) \n alt_2 = 1\n\n end\n case alt_2\n when 1\n # at line 366:12: DIGIT\n digit!\n\n else\n break # out of loop for decision 2\n end\n end # loop for decision 2\n match(?.)\n # at file 366:23: ( DIGIT )+\n match_count_3 = 0\n loop do\n alt_3 = 2\n look_3_0 = @input.peek(1)\n\n if (look_3_0.between?(?0, ?9)) \n alt_3 = 1\n\n end\n case alt_3\n when 1\n # at line 366:23: DIGIT\n digit!\n\n else\n match_count_3 > 0 and break\n eee = EarlyExit(3)\n\n\n raise eee\n end\n match_count_3 += 1\n end\n\n # at line 366:30: ( ( 'E' | 'e' ) ( '+' | '-' )? ( DIGIT )+ )?\n alt_6 = 2\n look_6_0 = @input.peek(1)\n\n if (look_6_0 == ?E || look_6_0 == ?e) \n alt_6 = 1\n end\n case alt_6\n when 1\n # at line 366:31: ( 'E' | 'e' ) ( '+' | '-' )? ( DIGIT )+\n if @input.peek(1) == ?E || @input.peek(1) == ?e\n @input.consume\n else\n mse = MismatchedSet(nil)\n recover(mse)\n raise mse\n end\n\n\n # at line 366:41: ( '+' | '-' )?\n alt_4 = 2\n look_4_0 = @input.peek(1)\n\n if (look_4_0 == ?+ || look_4_0 == ?-) \n alt_4 = 1\n end\n case alt_4\n when 1\n # at line \n if @input.peek(1) == ?+ || @input.peek(1) == ?-\n @input.consume\n else\n mse = MismatchedSet(nil)\n recover(mse)\n raise mse\n end\n\n\n\n end\n # at file 366:52: ( DIGIT )+\n match_count_5 = 0\n loop do\n alt_5 = 2\n look_5_0 = @input.peek(1)\n\n if (look_5_0.between?(?0, ?9)) \n alt_5 = 1\n\n end\n case alt_5\n when 1\n # at line 366:52: DIGIT\n digit!\n\n else\n match_count_5 > 0 and break\n eee = EarlyExit(5)\n\n\n raise eee\n end\n match_count_5 += 1\n end\n\n\n end\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 22)\n\n end", "def is_literal_number?(value); end", "def ensure_number(wannabe_number)\n\n wannabe_number or raise(NilEvaluationError)\n\n if wannabe_number.is_a?(String)\n get_value_from_variable(wannabe_number)\n\n elsif wannabe_number.respond_to?(:number)\n wannabe_number.number\n\n else\n wannabe_number\n end\n end", "def parse_int_param(config, param_name, default_value = nil, min = 0, max = INT_MAX)\n param_value = default_value\n begin\n value = config[param_name]\n param_value = Integer(value) if value\n if !(min..max).include?(param_value)\n raise ConfigException, \"Configuration parameter #{param_name}, #{param_value} must be in #{min}..#{max} range.\"\n end\n rescue ArgumentError\n raise ConfigException, \"Configuration parameter #{param_name}, #{param_value} must be an integer\"\n end\n\n param_value\n end", "def _parse_numeric( str )\n Integer(str) rescue (Float(str) rescue nil)\n end", "def convert_string_to_number(str); end", "def string_to_int(param, default = 0)\n param.nil? ? default : Integer(param)\n end", "def number\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 6 )\n return_value = NumberReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n\n root_0 = nil\n __NUM22__ = nil\n\n tree_for_NUM22 = nil\n stream_NUM = ANTLR3::AST::RewriteRuleTokenStream.new( @adaptor, \"token NUM\" )\n\n begin\n # at line 34:11: NUM\n __NUM22__ = match( NUM, TOKENS_FOLLOWING_NUM_IN_number_199 )\n stream_NUM.add( __NUM22__ )\n # AST Rewrite\n # elements: NUM\n # token labels: \n # rule labels: return_value\n # token list labels: \n # rule list labels: \n # wildcard labels: \n\n return_value.tree = root_0\n stream_return_value = return_value ? subtree_stream( \"rule return_value\", return_value.tree ) : subtree_stream( \"token return_value\" )\n\n root_0 = @adaptor.create_flat_list\n # 34:15: -> ^( NUMBER NUM )\n # at line 34:18: ^( NUMBER NUM )\n root_1 = @adaptor.create_flat_list\n root_1 = @adaptor.become_root( @adaptor.create_from_type( NUMBER, \"NUMBER\" ), root_1 )\n\n @adaptor.add_child( root_1, stream_NUM.next_node )\n\n @adaptor.add_child( root_0, root_1 )\n\n\n\n return_value.tree = root_0\n # - - - - - - - rule clean up - - - - - - - -\n return_value.stop = @input.look( -1 )\n\n\n return_value.tree = @adaptor.rule_post_processing( root_0 )\n @adaptor.set_token_boundaries( return_value.tree, return_value.start, return_value.stop )\n\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n return_value.tree = @adaptor.create_error_node( @input, return_value.start, @input.look(-1), re )\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 6 )\n\n end\n \n return return_value\n end", "def my_parse_int(string)\n string.respond_to?(:to_i) ? string.to_i : \"NaN\"\nend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
RESEARCHERS don't have a profile, but they shouldn't have a U.Va. computing id.
def virginia_borrower? # @profile !~ /^[a-z]{2,3}([0-9][a-z]{1,2})?$/i profile.match?(/Virginia Borrower|Other VA Faculty|Alum/i) || profile.blank? end
[ "def lib_profile?\n return false if uid.blank?\n\n Rails.cache.fetch(\"people/drupal/#{org_code}/#{uid}\", expires_in: 24.hours) do\n benchmark \"People Page (#{uid})\" do\n response = profile_client.head uid\n\n response.success?\n end\n end\n end", "def find_profile (profile_name)\n # Find the matching profile\n profile_data = @master_data.select { |p, d| p[/^#{profile_name}/] }\n profile_count = profile_data.count\n if profile_count > 1\n puts \"Profile match not specific enough. Please refine match\"\n exit 1\n elsif profile_count < 1\n puts \"Unable to find profile\"\n exit 1\n end\n return profile_data.first\nend", "def profile_not_found\n\n end", "def check_for_non_match(profile)\n raise Exceptions::ServiceException, \"Profile object must not be nil.\" if profile.nil?\n url = Util::Config.get('endpoints.base_url') + sprintf(Util::Config.get('endpoints.check_for_non_match'), profile.id)\n url = build_url(url)\n payload = {\n profile: profile.to_hash\n }.to_json\n response = RestClient.post(url, payload, get_headers())\n JSON.parse(response.body)[\"data\"]\n end", "def orcid_profiles\n q = {'orcidData' => {'$exists' => true, '$not' => {'$size' => 0}} }\n profiles.find(q).to_a\n end", "def has_person_for_solr\n role.person.blank? ? 0 : 1\n end", "def get_volunteers_not_assigned_to_poc(poc_id_param)\n User.find_by_sql(\"select distinct u.* from users u join reports_tos rt on\n (rt.user_id = u.id) join user_role_maps rm on (u.id = rm.user_id) join roles r on\n (rm.role_id = r.id) where rm.user_id != \" + poc_id_param + \" and r.role = '\" + Role.VOLUNTEER.to_s + \"' and u.is_deleted=0\");\n end", "def profile_except_wizard_completion_count\n [self.bio, self.custom_url, self.school_id, self.majors, self.gpa, self.gender, self.extracurriculars].reject { |member| member.blank? }.count\n end", "def inactive_reviewers\n unless @results_with_inactive_users\n @results_with_inactive_users =\n self.design_review_results.select { |drr| drr.reviewer && !drr.reviewer.active? }\n end\n @results_with_inactive_users.collect { |drr| drr.reviewer }\n end", "def has_profile?\n vips_image.get_fields.include?('icc-profile-data')\n end", "def profile_search?\n profile_search_term.present?\n end", "def for email\n reject do |reviewer|\n reviewer.emails.include? email\n end\n end", "def has_only_profile?\n profile.present? && tag_name.blank?\n end", "def validate_candidate_data(profile_info, search_info)\n validation_result = [\n validate_string(profile_info, search_info,:name),\n validate_string(profile_info, search_info,:title),\n validate_string(profile_info, search_info,:hourly_rate),\n validate_string(profile_info, search_info,:earnings),\n validate_string(profile_info, search_info,:has_badge),\n validate_string(profile_info, search_info,:success_rate),\n validate_string(profile_info, search_info,:location),\n validate_description(profile_info, search_info, :description),\n validate_empty(profile_info, search_info,:skill_list)\n ].all?\n raise('Validation failed.') unless validation_result\n puts 'Random profile validation passed.' if validation_result\n end_section\n end", "def research_starters_match_in_profile\n @info.available_related_content_types.include? 'rs'\n end", "def orcid_search(profile, pubs=nil)\n # ORCID API search is case insensitive\n legal_ln = profile['names']['legal']['lastName'].downcase\n orcids4name = orcid_search_by_name(legal_ln)\n # TODO: If CAP provides an alternate last name (like a maiden name), issue\n # an additional ORCID API query for it. As of Nov 2015, there is no\n # CAP data for alternative last name (e.g. maiden name).\n #\n # Use one search query to get as many ORCIDS matching last name as possible,\n # then filter them by first name; avoid multiple ORCID API requests on\n # variations in first names (with or without middle names or abbreviations).\n orcid_matches = orcid_filter(profile, orcids4name)\n if orcid_matches.empty? && pubs\n # Try to find an ORCID using publication data\n unless pubs['publications'].empty?\n articles = pubs['publications'].select {|p| p['type'] == 'article' }\n pubIds = articles.map {|a| {pmid: a['pubMedId'], doi: a['doiId']} }\n pubIds.each do |pub|\n if pub[:pmid]\n orcids4pmid = orcid_search_by_pmid(pub[:pmid])\n orcid_matches = orcid_filter(profile, orcids4pmid)\n unless orcid_matches.empty?\n msg = \"Found ORCID using publication PMID: #{pub[:pmid]}\\n\"\n orcid_search_logger.info(msg)\n break\n end\n end\n if pub[:doi]\n orcids4doi = orcid_search_by_doi(pub[:doi])\n orcid_matches = orcid_filter(profile, orcids4doi)\n unless orcid_matches.empty?\n msg = \"Found ORCID using publication DOI: #{pub[:doi]}\\n\"\n orcid_search_logger.info(msg)\n break\n end\n end\n end\n end\n end\n orcid_matches\nend", "def vrn_not_found\n @search = params[:vrn]\n end", "def reviewer; fund_source ? fund_source.organization : nil; end", "def reviewer\n User.find(self.reviewer_id)\n rescue\n nil\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Indicate whether the user can request item scanning.
def can_request_scanning? true # TODO: Should this be !virginia_borrower? ? end
[ "def can_user_start?\n item.can_user_start?(user)\n end", "def item_usable?\r\n user.usable?(item) && item_effects_valid?\r\n end", "def control_scanning?\n control_scanning\n end", "def check_if_user_can_perform_action_on_resources\n if @item.is_a?(Typus.user_class)\n check_if_user_can_perform_action_on_user\n elsif typus_user.cannot?(params[:action], @resource.model_name)\n not_allowed\n end\n end", "def permit_available?\n permits_available >= 1\n end", "def can_pass?(inventory)\n inventory.include?(@required_item)\n end", "def granted?\n deg_status == 9\n end", "def can_activate?(item)\r\n item.activatable_by?(self)\r\n end", "def check_if_user_can_perform_action_on_resources\n if @item && @item.is_a?(Typus.user_class)\n check_if_user_can_perform_action_on_user\n else\n not_allowed if admin_user.cannot?(params[:action], @resource.model_name)\n end\n end", "def allow?(item)\n @whitelist.include?(item) || !@blacklist.include?(item)\n end", "def check_item_condition?\n # disallow usage if item button disabled\n return false if !$game_system.item_button\n # disallow usage\n item_condition = false\n # if using direct hotkeys\n if BlizzABS::Config::DIRECT_HOTKEYS\n # check direct hotkeys\n item_condition = self.item_hotkeys?\n # if item button pressed\n elsif Input.trigger?(Input::Item)\n # allow usage\n item_condition = true\n end\n # return result\n return item_condition\n end", "def can_activate?(item)\n item.activatable_by?(self)\n end", "def can_buy?(item)\r\n item.buyable_by?(self)\r\n end", "def is_available?\n count_available > 0\n end", "def can_buy?(item)\n item.buyable_by?(self)\n end", "def can_send_request?\n !self.suspended? and self.is_paid? and self.is_still_paid? and self.is_available?\n end", "def granted_to(action, item)\n case action\n when :create\n # item may nil if action is :create\n return false unless logged_in?\n return true if permit? 'developer'\n when :show\n return true\n when :update\n return false unless logged_in?\n return true if permit? 'superadmin | developer'\n when :destroy\n return false unless logged_in?\n return false unless item.can_destroyed?\n return true if permit? 'developer'\n end\n # otherwise prevent action\n return false\n end", "def user_may_view_item?\n @pcp_item.pcp_step.released? || @pcp_item.pcp_step.in_commenting_group?\n end", "def request_permitted?(item)\n true\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
GET /skydata/1 GET /skydata/1.json
def show @skydatum = Skydatum.find(params[:id]) respond_to do |format| format.html # show.html.erb format.json { render json: @skydatum } end end
[ "def get_surf_data\n url = \"http://magicseaweed.com/api/#{ENV['MAGIC_SEAWEED_API_KEY']}/forecast/?spot_id=6128&units=UK\"\n uri = URI(url)\n\n response = Net::HTTP.get(uri)\n ActiveSupport::JSON.decode(response) if response != ''\n end", "def retrieve_jon_snow_data\n # Fetching data from the API\n @got_data = ParseJson.new(HTTParty::get('https://anapioficeandfire.com/api/characters/583').body).json_data\n end", "def get_data\n uri = URI(\"https://taskboardv2.herokuapp.com/station1.json\")\n http = Net::HTTP.new(uri.host, uri.port)\n http.use_ssl = true\n response = http.request(Net::HTTP::Get.new(uri.request_uri))\n JSON.parse(response.body)\nend", "def index\n @skies = Sky.all\n end", "def weather_json(url)\r\n if @debug\r\n sample_json\r\n else\r\n open(url).read\r\n end\r\n end", "def index\n @weather_station = WeatherStation.find(params[:weather_station_id])\n @meteo_data = []\n @meteo_data = @weather_station.meteo_datums.order('created_at desc').limit(100) unless @weather_station.blank?\n\n render 'api/v1/meteo_data/index', params: [@weather_station, @meteo_data]\n end", "def index\n @datapoints = Datapoint.all\n\n render json: @datapoints\n end", "def show\n render 'api/v1/meteo_data/show', params: @meteo_datum\n end", "def new\n @skydatum = Skydatum.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @skydatum }\n end\n end", "def show\n @fotky = Fotky.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @fotky }\n end\n end", "def index\n @skyfarers = Skyfarer.all\n end", "def show\n @dataset = Dataset.find(params[:id])\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @dataset }\n end\n end", "def show\n @dataset = Dataset.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @dataset }\n end\n end", "def get_client_throughput_time_series_data(args = {}) \n get(\"/clients.json/stats/throughput\", args)\nend", "def accl_data\n @response = HTTParty.get(\"http://192.168.20.30/machine/data\")\n render json: @response\n end", "def show\n @sensor = Sensor.find_by_sensor_id(params[:id])\n\n render json: @sensor\n end", "def show\n @traces = Trace.find_by(id: params[:id])\n url_string = ''\n response = HTTParty.get(url_string)\n data = {\n \"latitude\": @traces.latitude,\n \"longitude\": @traces.longitude,\n \"elevation\": response.body.to_i\n }\n render json: data\n end", "def show\n @water_info = WaterInfo.find(params[:id])\n\n\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @water_info }\n end\n end", "def show\n @earth = Earth.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @earth }\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
GET /skydata/new GET /skydata/new.json
def new @skydatum = Skydatum.new respond_to do |format| format.html # new.html.erb format.json { render json: @skydatum } end end
[ "def new\n @earth = Earth.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @earth }\n end\n end", "def new\n @cloud = Cloud.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @cloud }\n end\n end", "def create\n @skydatum = Skydatum.new(params[:skydatum])\n\n respond_to do |format|\n if @skydatum.save\n format.html { redirect_to @skydatum, notice: 'Skydatum was successfully created.' }\n format.json { render json: @skydatum, status: :created, location: @skydatum }\n else\n format.html { render action: \"new\" }\n format.json { render json: @skydatum.errors, status: :unprocessable_entity }\n end\n end\n end", "def new\n @cloud = Cloud.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @cloud }\n end\n end", "def new\n @getdatum = Getdatum.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @getdatum }\n end\n end", "def new\n @hoge = Hoge.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @hoge }\n end\n end", "def new\n @wood = Wood.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @wood }\n end\n end", "def new\n @sensor = Sensor.new\n\n render json: @sensor\n end", "def new\n @spdatum = Spdatum.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @spdatum }\n end\n end", "def new\n @garbage = Garbage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @garbage }\n end\n end", "def new\n @dataset = Dataset.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @dataset }\n end\n end", "def new\n @newse = Newse.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @newse }\n end\n end", "def new\n @graphic_datum = GraphicDatum.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @graphic_datum }\n end\n end", "def new\n @server = Server.new\n @creating_new = true\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @server }\n\n end\n end", "def new\n @beasts = Beast.all\n @noise = Noise.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @noise }\n end\n end", "def new\n @raw_datum = RawDatum.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @raw_datum }\n end\n end", "def new\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @life_jacket }\n end\n end", "def new\n @rawdatum = Rawdatum.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @rawdatum }\n end\n end", "def new\n @sun = Sun.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @sun }\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
POST /skydata POST /skydata.json
def create @skydatum = Skydatum.new(params[:skydatum]) respond_to do |format| if @skydatum.save format.html { redirect_to @skydatum, notice: 'Skydatum was successfully created.' } format.json { render json: @skydatum, status: :created, location: @skydatum } else format.html { render action: "new" } format.json { render json: @skydatum.errors, status: :unprocessable_entity } end end end
[ "def create\n @meteo_datum = MeteoDatum.new(meteodatum_params)\n @meteo_datum.weather_station_id = params[:weather_station_id]\n\n if @meteo_datum.save\n render json: @meteo_datum, status: :created\n else\n render json: @meteo_datum.errors, status: :unprocessable_entity\n end\n end", "def create\n @sky_track = SkyTrack.new(sky_track_params)\n\n respond_to do |format|\n if @sky_track.save\n format.html { redirect_to @sky_track, notice: 'Sky track was successfully created.' }\n format.json { render action: 'show', status: :created, location: @sky_track }\n else\n format.html { render action: 'new' }\n format.json { render json: @sky_track.errors, status: :unprocessable_entity }\n end\n end\n end", "def new\n @skydatum = Skydatum.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @skydatum }\n end\n end", "def create\n\n #Can also be created with:\n # => curl -X POST -H \"Content-Type: application/json; charset=UTF-8\" -d '{\"sensor_datum\": {\"ppm\": \"400\",\"device_id\": \"1\"}}' localhost:3000/sensor_data.json\n # => curl -X POST -H \"Content-Type: application/json; charset=UTF-8\" -d '{\"sensor_datum\": {\"ppm\": \"400\", \"device_address\": \"42\"}}' localhost:3000/sensor_data.json\n\n @sensor_datum = SensorDatum.new(sensor_datum_params)\n authorize @sensor_datum\n\n @sensor_datum.resolve_device_id\n @sensor_datum.resolve_experiment_id\n\n respond_to do |format|\n if @sensor_datum.save\n format.html { redirect_to @sensor_datum, notice: 'Sensor datum was successfully created.' }\n format.json { render :show, status: :created, location: @sensor_datum }\n else\n format.html { render :new }\n format.json { render json: @sensor_datum.errors, status: :unprocessable_entity }\n end\n end\n end", "def get_surf_data\n url = \"http://magicseaweed.com/api/#{ENV['MAGIC_SEAWEED_API_KEY']}/forecast/?spot_id=6128&units=UK\"\n uri = URI(url)\n\n response = Net::HTTP.get(uri)\n ActiveSupport::JSON.decode(response) if response != ''\n end", "def transmit_data\n body = {body: {datum:{data_type:\"float\", value: rand, virtual_entity_id: 1}}}\n self.class.post(API+'/data', body)\n end", "def create\n @snow_datum = SnowDatum.new(snow_datum_params)\n\n respond_to do |format|\n if @snow_datum.save\n format.html { redirect_to @snow_datum, notice: 'Snow datum was successfully created.' }\n format.json { render :show, status: :created, location: @snow_datum }\n else\n format.html { render :new }\n format.json { render json: @snow_datum.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n respond_to do |format|\n if @input_datum.update(params[:input_data].keys, params[:input_data].values)\n format.html { redirect_to @input_datum, notice: 'Input datum was successfully created.' }\n format.json { render :show, status: :created, location: @input_datum }\n else\n format.html { render :new }\n format.json { render json: @input_datum.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @weather_datum = WeatherDatum.new(weather_datum_params)\n\n respond_to do |format|\n if @weather_datum.save\n format.html { redirect_to @weather_datum, notice: 'Weather datum was successfully created.' }\n format.json { render :show, status: :created, location: @weather_datum }\n else\n format.html { render :new }\n format.json { render json: @weather_datum.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @fotky = Fotky.new(params[:fotky])\n\n respond_to do |format|\n if @fotky.save\n format.html { redirect_to @fotky, notice: 'Fotky was successfully created.' }\n format.json { render json: @fotky, status: :created, location: @fotky }\n else\n format.html { render action: \"new\" }\n format.json { render json: @fotky.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @whisky = Whisky.new(whisky_params)\n\n respond_to do |format|\n if @whisky.save\n format.html { redirect_to @whisky, notice: 'Whisky was successfully created.' }\n format.json { render :show, status: :created, location: @whisky }\n else\n format.html { render :new }\n format.json { render json: @whisky.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @skytouch_user = SkytouchUser.new(skytouch_user_params)\n\n respond_to do |format|\n if @skytouch_user.save\n format.html { redirect_to @skytouch_user, notice: 'Skytouch user was successfully created.' }\n format.json { render :show, status: :created, location: @skytouch_user }\n else\n format.html { render :new }\n format.json { render json: @skytouch_user.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @sensor_data = SensorData.new(sensor_data_params)\n\n respond_to do |format|\n if @sensor_data.save\n format.html { redirect_to @sensor_data, notice: 'Sensor data was successfully created.' }\n format.json { render :show, status: :created, location: @sensor_data }\n else\n format.html { render :new }\n format.json { render json: @sensor_data.errors, status: :unprocessable_entity }\n end\n end\n end", "def destroy\n @skydatum = Skydatum.find(params[:id])\n @skydatum.destroy\n\n respond_to do |format|\n format.html { redirect_to skydata_url }\n format.json { head :no_content }\n end\n end", "def post(path, json, params = {})\n if path.include?('covid19')\n request = Net::HTTP::Post.new(path, @headers)\n else\n request = Net::HTTP::Post.new('/v2' + path, @headers)\n end\n request.add_field('Content-Type', 'application/json')\n request.body = json\n params.each do |k, v|\n request[k] = v\n end\n send_request(request)\n end", "def update\n @skydatum = Skydatum.find(params[:id])\n\n respond_to do |format|\n if @skydatum.update_attributes(params[:skydatum])\n format.html { redirect_to @skydatum, notice: 'Skydatum was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @skydatum.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @satellite = Satellite.new(satellite_params)\n\n if @satellite.save\n render json: @satellite, status: :created\n else\n render json: @satellite.errors, status: :unprocessable_entity\n end\n end", "def show\n @skydatum = Skydatum.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @skydatum }\n end\n end", "def create\n unless logged_in?\n render :file => 'public/401', :status => :unauthorized, :layout => false and return\n end\n\n @user = User.find(weather_station_params[:user_id])\n @weather_station = @user.weather_stations.create(weather_station_params)\n\n respond_to do |format|\n if @weather_station.save\n # Automatically update the station with weather reports\n historical_dates = (1..18).collect {|i| Date.today - i}\n @weather_station.update_from_darksky(historical_dates, true)\n\n format.html { redirect_to @weather_station, notice: 'Weather station was successfully created.' }\n format.json { render :show, status: :created, location: @weather_station }\n else\n format.html { render :new }\n format.json { render json: @weather_station.errors, status: :unprocessable_entity }\n end\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
PUT /skydata/1 PUT /skydata/1.json
def update @skydatum = Skydatum.find(params[:id]) respond_to do |format| if @skydatum.update_attributes(params[:skydatum]) format.html { redirect_to @skydatum, notice: 'Skydatum was successfully updated.' } format.json { head :no_content } else format.html { render action: "edit" } format.json { render json: @skydatum.errors, status: :unprocessable_entity } end end end
[ "def update\n respond_to do |format|\n if @sky_track.set(sky_track_params)\n format.html { redirect_to @sky_track, notice: 'Sky track was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @sky_track.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n spice = Spice.find_by(id: params[:id])\n spice.update(spice_params)\n render json: spice\nend", "def update_tenant_circle(args = {}) \n put(\"/tenantcircles.json/#{args[:circleId]}\", args)\nend", "def update\n @fotky = Fotky.find(params[:id])\n\n respond_to do |format|\n if @fotky.update_attributes(params[:fotky])\n format.html { redirect_to @fotky, notice: 'Fotky was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @fotky.errors, status: :unprocessable_entity }\n end\n end\n end", "def update_skydrive_object object_id, options={}\n response = put(\"/#{object_id}\", options)\n end", "def update\n @sensor = Sensor.find_by_sensor_id(params[:id])\n\n if @sensor.update_attributes(params[:sensor])\n head :no_content\n else\n render json: @sensor.errors, status: :unprocessable_entity\n end\n end", "def update\n @species = Species.find(params[:id])\n\n if @species.update(species_params)\n head :no_content\n else\n render json: @species.errors, status: :unprocessable_entity\n end\n end", "def update\n if @sensor.update(sensor_params)\n render json: { status: :ok }\n else\n render json: { status: :unprocessable_entity }\n end\n end", "def update_data website\n dest = get_dest_path website.preview\n data_dir = File.join(dest, \"_data\")\n FileUtils.mkdir_p(data_dir)\n data_file = File.join(data_dir, \"scribae.json\")\n comps = Component.all\n data = {\n \"components\" => comps\n }\n File.open(data_file,'w') do |f| \n f.write data.to_json\n end\n end", "def update\n if @spice.update(spice_params)\n head :no_content\n else\n render json: @spice.errors, status: :unprocessable_entity\n end\n end", "def update_mobile_carrier(args = {}) \n put(\"/mobile.json/#{args[:carrierId]}\", args)\nend", "def update\n if @satellite.update(satellite_params)\n render json: @satellite, status: :ok\n else\n render json: @satellite.errors, status: :unprocessable_entity\n end\n end", "def update\n @seat = Seat.find(params[:id])\n\n if @seat.update(seat_params)\n head :no_content\n else\n render json: @seat.errors, status: :unprocessable_entity\n end\n end", "def update\n authorize! :update, @water_fountain\n respond_to do |format|\n if @water_fountain.update(water_fountain_params)\n format.json { head :no_content }\n else\n format.json { render json: { error: @water_fountain.errors.full_messages }, status: :unprocessable_entity }\n end\n end\n end", "def update_data\n region = Region.find(params[:id])\n data = MultiJson.load(@region.json_data || \"{}\")\n\n # TODO: merge incoming data\n\n region.json_data = MultiJson.dump(data, :pretty => true)\n\n redirect_to :action => :show, :id => id\n end", "def update_aos_version(args = {}) \n put(\"/aosversions.json/#{args[:aosVersionId]}\", args)\nend", "def update\n @json = Json.find(params[:id])\n\n if @json.update(json_params)\n head :no_content\n else\n render json: @json.errors, status: :unprocessable_entity\n end\n end", "def update\n @datapoint = Datapoint.find(params[:id])\n\n if @datapoint.update(datapoint_params)\n head :no_content\n else\n render json: @datapoint.errors, status: :unprocessable_entity\n end\n end", "def hput(path, data)\n\t\t\trequest = Net::HTTP::Put.new(\"#{@server.path}#{path}\")\n\t\t\trequest.add_field('Content-Type', 'application/json')\n\t\t\trequest.body = data.to_json\n\t\t\tresponse = @http.request(request)\n\t\t\treturn response.code.to_i\n\t\tend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
DELETE /skydata/1 DELETE /skydata/1.json
def destroy @skydatum = Skydatum.find(params[:id]) @skydatum.destroy respond_to do |format| format.html { redirect_to skydata_url } format.json { head :no_content } end end
[ "def delete_json(path)\n url = [base_url, path].join\n resp = HTTParty.delete(url, headers: standard_headers)\n parse_json(url, resp)\n end", "def destroy\n @json.destroy\n\n head :no_content\n end", "def destroy\n @json_datum.destroy\n respond_to do |format|\n format.html { redirect_to json_data_url }\n format.json { head :no_content }\n end\n end", "def delete_floor_plan(args = {}) \n delete(\"/files.json/floorplan/images\", args)\nend", "def destroy\n @jsonuserdata.destroy\n respond_to do |format|\n format.html { redirect_to jsonuserdata_url }\n format.json { head :no_content }\n end\n end", "def delete_tenant_circle(args = {}) \n delete(\"/tenantcircles.json/#{args[:circleId]}\", args)\nend", "def destroy\n @trafficdatum.destroy\n respond_to do |format|\n format.html { redirect_to trafficdata_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @jsonfile = Jsonfile.find(params[:id])\n @jsonfile.destroy\n\n respond_to do |format|\n format.html { redirect_to(jsonfiles_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @dataset_datum.destroy\n respond_to do |format|\n format.html { redirect_to dataset_data_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @game_stash_datum = GameStashDatum.find(params[:id])\n @game_stash_datum.destroy\n\n respond_to do |format|\n format.html { redirect_to game_stash_data_url }\n format.json { head :no_content }\n end\n end", "def delete_aos_version(args = {}) \n delete(\"/aosversions.json/#{args[:aosVersionId]}\", args)\nend", "def destroy\n @snow_datum.destroy\n respond_to do |format|\n format.html { redirect_to snow_data_url, notice: 'Snow datum was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @getdatum = Getdatum.find(params[:id])\n @getdatum.destroy\n\n respond_to do |format|\n format.html { redirect_to getdata_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @dataset = Dataset.find(params[:id])\n @dataset.destroy\n\n respond_to do |format|\n format.html { redirect_to datasets_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @raw_datum = RawDatum.find(params[:id])\n @raw_datum.destroy\n\n respond_to do |format|\n format.html { redirect_to raw_data_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @snowdatum.destroy\n respond_to do |format|\n format.html { redirect_to snowdata_url, notice: 'Snowdatum was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def delete\n self.class.headers 'Authorization' => \"OAuth #{ENV['sfdc_token']}\"\n self.class.headers 'Content-Type' => \"application/json\"\n response = self.class.delete(SObject.root_url+\"/sobjects/#{@object_name}/#{@Id}\")\n raise response.parsed_response[0]['message'] if response.code.to_i > 299\n nil\n end", "def destroy\n @gethotelstaticdatagd = Gethotelstaticdatagd.find(params[:id])\n @gethotelstaticdatagd.destroy\n\n respond_to do |format|\n format.html { redirect_to gethotelstaticdatagds_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @kota_stone.destroy\n respond_to do |format|\n format.html { redirect_to kota_stones_url }\n format.json { head :no_content }\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Baseline implementation for the disable_xpn_host REST call
def disable_xpn_host request_pb, options = nil raise ::ArgumentError, "request must be provided" if request_pb.nil? uri, _body, query_string_params = transcode_disable_xpn_host_request request_pb response = @client_stub.make_post_request( uri: uri, params: query_string_params, options: options ) result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true yield result, response if block_given? result end
[ "def disable_xpn_host request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_disable_xpn_host_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end", "def disable_host(host)\n # TODO: Check Status\n toggle_host(host, 'Disable')\n end", "def disable_xpn_resource request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n uri, body, query_string_params = transcode_disable_xpn_resource_request request_pb\n response = @client_stub.make_post_request(\n uri: uri,\n body: body,\n params: query_string_params,\n options: options\n )\n result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true\n\n yield result, response if block_given?\n result\n end", "def disable_xpn_resource request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_disable_xpn_resource_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end", "def disable_host(host)\n if @live_hosts.size <= @min_hosts\n puts \"Will not take #{host} down, alreay at lower limit #{@min_hosts}\"\n return\n end\n\n #puts \"Disabling host '#{host}'\"\n get_balancer_manager.disable_host(host)\n end", "def disable_smart_proxy\n params = {:enabled => false}\n smart_proxy(params)\n end", "def disable\n debug \"Call 'disable' for Pacemaker service '#{name}' on node '#{hostname}'\"\n unmanage_primitive name\n end", "def disable_proxy\n broadcast('@type' => 'disableProxy')\n end", "def enable_xpn_host request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n uri, _body, query_string_params = transcode_enable_xpn_host_request request_pb\n response = @client_stub.make_post_request(\n uri: uri,\n params: query_string_params,\n options: options\n )\n result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true\n\n yield result, response if block_given?\n result\n end", "def disable\n Puppet.debug \"Call 'disable' for Pacemaker service '#{name}' on node '#{hostname}'\"\n unmanage_primitive name\n end", "def enable_xpn_host request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_enable_xpn_host_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end", "def ignore_localhost=(value)\n VCR.request_ignorer.ignore_localhost = value\n end", "def no_proxy\n @no_proxy\n end", "def pending_decommission(proxy)\n end", "def cmd_noop(param)\n send_response \"200\"\n end", "def disable!\n run(:disable)\n end", "def allow_actions_from_host(hostname)\n end", "def tag_resource_as_disabled\n resource.tag_add('v2v_transformation_host/false')\n resource.tag_remove('v2v_transformation_host/true')\n resource.tag_remove('v2v_transformation_method/vddk')\n resource.tag_remove('v2v_transformation_method/ssh')\n end", "def disable\n {\n method: \"HeadlessExperimental.disable\"\n }\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Baseline implementation for the disable_xpn_resource REST call
def disable_xpn_resource request_pb, options = nil raise ::ArgumentError, "request must be provided" if request_pb.nil? uri, body, query_string_params = transcode_disable_xpn_resource_request request_pb response = @client_stub.make_post_request( uri: uri, body: body, params: query_string_params, options: options ) result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true yield result, response if block_given? result end
[ "def disable_xpn_resource request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_disable_xpn_resource_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end", "def action_disable\n notify_if_service do\n service_resource.run_action(:disable)\n end\n end", "def tag_resource_as_disabled\n resource.tag_add('v2v_transformation_host/false')\n resource.tag_remove('v2v_transformation_host/true')\n resource.tag_remove('v2v_transformation_method/vddk')\n resource.tag_remove('v2v_transformation_method/ssh')\n end", "def disable!\n run(:disable)\n end", "def disable_waitress_resources\n @resources = false\n end", "def disable\n self.stop if self.status == :running\n output = supervisorctl(:remove, @resource[:name])\n rescue Puppet::ExecutionFailure\n raise Puppet::Error, \"Could not disable #{self.name}: #{output}\"\n end", "def disable_xpn_host request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_disable_xpn_host_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end", "def disable_xpn_host request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n uri, _body, query_string_params = transcode_disable_xpn_host_request request_pb\n response = @client_stub.make_post_request(\n uri: uri,\n params: query_string_params,\n options: options\n )\n result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true\n\n yield result, response if block_given?\n result\n end", "def disable!\n set_enabled!(false)\n end", "def unoccupy_ressource(ressource_id)\n end", "def disable!\n if enabled?\n client.disable_device!(self.id, self.udid, mac: self.platform == 'mac')\n # disable request doesn't return device json, so we assume that the new status is \"r\" if response succeeded\n self.status = \"r\"\n end\n end", "def disable\n debug \"Call 'disable' for Pacemaker service '#{name}' on node '#{hostname}'\"\n unmanage_primitive name\n end", "def disable!\n @logger.debug(\"Disabling certificate #{@name}\")\n @enabled = false\n end", "def disable\n @enabled = !_disable_provider\n end", "def disable\n Puppet.debug \"Call 'disable' for Pacemaker service '#{name}' on node '#{hostname}'\"\n unmanage_primitive name\n end", "def disable\n authorize! @extension, :disable?\n @extension.update_attribute(:enabled, false)\n ExtensionDisabledNotifier.perform_async(@extension.id)\n redirect_to \"/\", notice: t(\"extension.disabled\", extension: @extension.name)\n end", "def disable\n {\n method: \"Security.disable\"\n }\n end", "def disable\n enable_disable(params[:ecommerce_id], false, \"suspendu. Les changements prendront effet dans 15 minutes.\")\n end", "def disable\n {\n method: \"Fetch.disable\"\n }\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Baseline implementation for the enable_xpn_host REST call
def enable_xpn_host request_pb, options = nil raise ::ArgumentError, "request must be provided" if request_pb.nil? uri, _body, query_string_params = transcode_enable_xpn_host_request request_pb response = @client_stub.make_post_request( uri: uri, params: query_string_params, options: options ) result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true yield result, response if block_given? result end
[ "def enable_xpn_host request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_enable_xpn_host_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end", "def disable_xpn_host request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_disable_xpn_host_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end", "def disable_xpn_host request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n uri, _body, query_string_params = transcode_disable_xpn_host_request request_pb\n response = @client_stub.make_post_request(\n uri: uri,\n params: query_string_params,\n options: options\n )\n result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true\n\n yield result, response if block_given?\n result\n end", "def enable(session, id, enabled)\n write_task('rvpe.host.enable', session, true) do\n call_one_xmlrpc('one.host.enable', session, id, enabled)\n end\n end", "def enable_host(host)\n # TODO: Check Status\n toggle_host(host, 'Enable')\n end", "def enable_xpn_resource request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n uri, body, query_string_params = transcode_enable_xpn_resource_request request_pb\n response = @client_stub.make_post_request(\n uri: uri,\n body: body,\n params: query_string_params,\n options: options\n )\n result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true\n\n yield result, response if block_given?\n result\n end", "def enable_xpn_resource request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_enable_xpn_resource_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end", "def enable_host_only_network(net_options); end", "def enable_smart_proxy(host=nil, port=nil)\n params = {:enabled => true}\n params[:host] = host unless host.nil?\n params[:port] = port unless port.nil?\n smart_proxy(params)\n end", "def set_hostname_verification (enable)\n not_bool_response = fail_response(13001, \"NaServer::set_hostname_verification: invalid argument \" + enable + \"specified\")\n return not_bool_response unless !!enable == enable\n not_serv_cert_response = fail_response(13001, \"NaServer::set_hostname_verification: server certificate verification is not enabled\")\n return not_serv_cert_response unless server_cert_verification_enabled?\n @enable_hostname_verification = enable\n end", "def allow_actions_from_host(hostname)\n end", "def enable(params)\n host = get_param(params, :host)\n if @environment.in_dry_run_mode\n if params.has_key?(:service)\n notify(:msg => \"[#{@name}] Would enable Nagios notifications for service #{params[:service]} on host #{host}\",\n :tags => [:nagios, :dryrun])\n else\n notify(:msg => \"[#{@name}] Would enable Nagios notifications for host #{host}\",\n :tags => [:nagios, :dryrun])\n end\n else\n if params.has_key?(:service)\n notify(:msg => \"[#{@name}] Enabling notifications for service #{params[:service]} on host #{host}\",\n :tags => [:nagios, :trace])\n request = Net::HTTP::Put.new(\"/hosts/#{host}/#{params[:service]}/command/ENABLE_SVC_NOTIFICATIONS\")\n else\n notify(:msg => \"[#{@name}] Enabling notifications for host #{host}\",\n :tags => [:nagios, :trace])\n request = Net::HTTP::Put.new(\"/hosts/#{host}/command/ENABLE_HOST_SVC_NOTIFICATIONS\")\n end\n request.body = \"{}\"\n request[\"Content-Type\"] = \"application/json\"\n response = @nagix.request(request)\n if response.code.to_i >= 300\n raise \"Got response #{response.code} from Nagix server\"\n end\n end\n status(params)\n end", "def enable\n CircleCi.request(\"#{base_path}/enable\").post\n end", "def host_authorization=(_arg0); end", "def host_allowed?(arg)\n true\n end", "def enable_xack\n getok('XACK ON')\n @xack = true\n end", "def enable(params)\n host = get_param(params, :host)\n if @environment.in_dry_run_mode\n if params.has_key?(:service)\n services = params[:services]\n if services == :all\n notify(:msg => \"[#{@name}] Would enable Nagios notifications for all services on host #{host}\",\n :tags => [:nagios, :dryrun])\n else\n notify(:msg => \"[#{@name}] Would enable Nagios notifications for service(s) #{params[:services]} on host #{host}\",\n :tags => [:nagios, :dryrun])\n end\n else\n notify(:msg => \"[#{@name}] Would enable Nagios notifications for host #{host}\",\n :tags => [:nagios, :dryrun])\n end\n else\n real_host = find_host(params)\n if real_host.nil?\n raise \"Host #{host} does not seem to be present in Nagios\"\n end\n if params.has_key?(:services)\n services = params[:services]\n if services == :all\n notify(:msg => \"[#{@name}] Enabling notifications for all services on host #{host}\",\n :tags => [:nagios, :trace])\n exec(\"COMMAND [#{Time.now.to_i}] ENABLE_HOST_SVC_NOTIFICATIONS;#{real_host}\\n\\n\")\n else\n notify(:msg => \"[#{@name}] Enabling notifications for service(s) #{params[:services]} on host #{host}\",\n :tags => [:nagios, :trace])\n arrayify(services).each do |service|\n exec(\"COMMAND [#{Time.now.to_i}] ENABLE_SVC_NOTIFICATIONS;#{real_host};#{service}\\n\\n\")\n end\n end\n else\n notify(:msg => \"[#{@name}] Enabling notifications for host #{host}\",\n :tags => [:nagios, :trace])\n exec(\"COMMAND [#{Time.now.to_i}] ENABLE_HOST_NOTIFICATIONS;#{real_host}\\n\\n\")\n end\n end\n status(params)\n end", "def enable\n CircleCi.request(conf, \"#{base_path}/enable\").post\n end", "def async_enable(host, options = {})\n job = Job.new(:enable_node)\n async(:enable, job, host, options)\n job.ticket\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Baseline implementation for the enable_xpn_resource REST call
def enable_xpn_resource request_pb, options = nil raise ::ArgumentError, "request must be provided" if request_pb.nil? uri, body, query_string_params = transcode_enable_xpn_resource_request request_pb response = @client_stub.make_post_request( uri: uri, body: body, params: query_string_params, options: options ) result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true yield result, response if block_given? result end
[ "def enable_xpn_resource request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_enable_xpn_resource_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end", "def set_enabled # rubocop:disable AbcSize\n converge_by(\"#{enabled_msg} #{new_resource}\") do\n Chef::Log.info \"#{enabled_msg} #{new_resource}\"\n load_balancer.client['LocalLB.NodeAddressV2'].set_session_enabled_state([new_resource.node_name], [enabled_state])\n current_resource.enabled(new_resource.enabled)\n new_resource.updated_by_last_action(true)\n\n new_resource.updated_by_last_action(true)\n end\n end", "def action_enable\n notify_if_service do\n service_resource.run_action(:enable)\n end\n end", "def disable_xpn_resource request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_disable_xpn_resource_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end", "def enable_xpn_host request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_enable_xpn_host_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end", "def disable_xpn_resource request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n uri, body, query_string_params = transcode_disable_xpn_resource_request request_pb\n response = @client_stub.make_post_request(\n uri: uri,\n body: body,\n params: query_string_params,\n options: options\n )\n result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true\n\n yield result, response if block_given?\n result\n end", "def tag_resource_as_enabled\n resource.tag_add('v2v_transformation_host/true')\n resource.tag_add('v2v_transformation_method/vddk') if vddk_transport_supported?\n resource.tag_add('v2v_transformation_method/ssh') if ssh_transport_supported?\n resource.tag_remove('v2v_transformation_host/false')\n end", "def enable_xpn_host request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n uri, _body, query_string_params = transcode_enable_xpn_host_request request_pb\n response = @client_stub.make_post_request(\n uri: uri,\n params: query_string_params,\n options: options\n )\n result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true\n\n yield result, response if block_given?\n result\n end", "def enable!\n run(:enable)\n end", "def enable\n CircleCi.request(\"#{base_path}/enable\").post\n end", "def enable\n @enabled = _enable_provider\n end", "def enable\n @partner = Partner.find(params[:id])\n \n respond_to do |format|\n if @partner.update_attribute(:enabled, true)\n format.html { redirect_to partners_url, notice: \"Partner Enabled\" }\n format.json { head :ok }\n else\n format.html { redirect_to partners_url, notice: \"There was an error enabling this Partner\" }\n format.json { render json: @partner.errors, status: :unprocessable_entity }\n end\n end\n \n end", "def enable\n authorize! @extension, :disable?\n @extension.update_attribute(:enabled, true)\n redirect_to owner_scoped_extension_url(@extension), notice: t(\"extension.enabled\", extension: @extension.name)\n end", "def enable\n @action_source = ActionSource.find(params[:id])\n @action_source.disabled = false\n @action_source.save\n\n respond_to do |format|\n format.html { redirect_to(action_sources_url) }\n format.xml { head :ok }\n end\n end", "def set_enabled_state\n converge_by(\"#{enabled_state_message} #{new_resource}\") do\n Chef::Log.info(\"#{enabled_state_message} #{new_resource}\")\n load_balancer.client['LocalLB.VirtualServer'].set_enabled_state([new_resource.vs_name], [enabled_state])\n\n new_resource.updated_by_last_action(true)\n end\n end", "def enable!\n set_enabled!(true)\n end", "def enable\n CircleCi.request(conf, \"#{base_path}/enable\").post\n end", "def enable(probe)\n Ruby.primitive :taskprobe_enable\n raise PrimtiveFailure, \"TaskProbe#enable primitive failed\"\n end", "def enable\n enable_disable(params[:ecommerce_id], true, \"activé\")\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Baseline implementation for the get_xpn_host REST call
def get_xpn_host request_pb, options = nil raise ::ArgumentError, "request must be provided" if request_pb.nil? uri, _body, _query_string_params = transcode_get_xpn_host_request request_pb response = @client_stub.make_get_request( uri: uri, options: options ) result = ::Google::Cloud::Compute::V1::Project.decode_json response.body, ignore_unknown_fields: true yield result, response if block_given? result end
[ "def get_xpn_host request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_get_xpn_host_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Cloud::Compute::V1::Project.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end", "def host_meta\n @host_meta ||= begin\n request = Net::HTTP::Get.new(host_meta_uri.path)\n http = Net::HTTP.new(host_meta_uri.host, host_meta_uri.port)\n http.use_ssl = (host_meta_uri.port==443)\n response = http.start {|http| http.request(request) }\n raise OpenTransact::UndiscoverableResource unless response.code==\"200\"\n MultiXml.parse(response.body)[\"XRD\"]\n end\n end", "def host\n api['host']\n end", "def get_host\n @host\n end", "def build_host\n host = @host_value || base_value.host\n raise UnresolvableResourceError, 'no HTTP host specified' if host.blank?\n host\n end", "def enable_xpn_host request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n uri, _body, query_string_params = transcode_enable_xpn_host_request request_pb\n response = @client_stub.make_post_request(\n uri: uri,\n params: query_string_params,\n options: options\n )\n result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true\n\n yield result, response if block_given?\n result\n end", "def full_host=(_arg0); end", "def enable_xpn_host request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_enable_xpn_host_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end", "def fetch_from_host(params)\n if @host =~ URI::regexp\n \n uri = URI(@host)\n @calls+=1\n uri.query = URI.encode_www_form(params)\n\n res = Net::HTTP.get_response(uri)\n return res\n else\n return nil\n end\n end", "def get_host_by_displayname(displayname)\n host = nil\n host_json = rpc(\"getHost\", {\"displayName\" => URI::encode(displayname)})\n host_resp = JSON.parse(host_json)\n# p host_resp\n if host_resp[\"status\"] == 200\n host = host_resp[\"data\"]\n# puts(\"Found host matching #{displayname}\")\n end\n host\nend", "def get_host_by_displayname(displayname)\n host = nil\n host_json = rpc(\"getHost\", {\"displayName\" => URI::encode(displayname)})\n #puts(host_json)\n host_resp = JSON.parse(host_json)\n if host_resp[\"status\"] == 200\n host = host_resp[\"data\"]\n# puts(\"Found host matching #{displayname}\")\n end\n host\nend", "def public_hostname\n get_proxy.get_public_hostname\n end", "def get_ip_address\n rpc_get_fact_direct('host_ip')\n end", "def resolve_hostname; end", "def requested_http_host\n @client_headers['Host']\n end", "def get_public_hostname\n rpc_get_fact_direct('public_hostname')\n end", "def ipn_endpoint; end", "def external_host_provider\n @external_host_provider\n end", "def info\n self.class.call('domain.host.info', @hostname)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Baseline implementation for the get_xpn_resources REST call
def get_xpn_resources request_pb, options = nil raise ::ArgumentError, "request must be provided" if request_pb.nil? uri, _body, query_string_params = transcode_get_xpn_resources_request request_pb response = @client_stub.make_get_request( uri: uri, params: query_string_params, options: options ) result = ::Google::Cloud::Compute::V1::ProjectsGetXpnResources.decode_json response.body, ignore_unknown_fields: true yield result, response if block_given? result end
[ "def get_xpn_resources request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_get_xpn_resources_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Cloud::Compute::V1::ProjectsGetXpnResources.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end", "def xlist_available_resources()\n @@memcache_id = self.name + \"/list_resources\"\n if Rails.cache.exist? @@memcache_id\n binobj = Rails.cache.read(@@memcache_id)\n # deserialize from Json\n obj = HTTParty::Parser.call(binobj, :json)\n return obj\n else\n obj = self.list_available_resources()\n # Save a Json, Marshal.dump or :raw does not work\n Rails.cache.write(@@memcache_id, obj.body)\n return obj\n end\n end", "def xenresources(nodeid)\n perform_request(action: 'node-xenresources', nodeid: nodeid)\n returned_parameters\n end", "def xenresources(nodeid)\n perform_request(:action => 'node-xenresources', :nodeid => nodeid)\n returned_parameters\n end", "def resource_objects\n @body['provision']['resource']\n end", "def resources\n @resources ||= @response[@resource_field].to_a\n end", "def domainResourceGet args\n \tif not args.has_key?(:ResourceID)\n \t\traise \"ResourceID argument missing from argument list\"\n \tend\n \t\n \tmake_request this_method, args\n end", "def rq_sp_resources_all\n rq_sp_resources = Array.new\n item_sp_resource_invs.each do |item_sp_resource_inv|\n rq_sp_resources << item_sp_resource_inv.sp_resource\n end\n return rq_sp_resources\n end", "def fetch_paginated_resources resource_format, resource_key = nil\n resources = []\n page = 1\n loop do\n resource = fetch_resource(resource_format % page, resource_key)\n break if resource.empty?\n page += 1\n resources += resource\n end\n resources\n end", "def index\n @external_resources = ExternalResource.all\n end", "def search_pnx\n responses = []\n # from 1 until the max index specified, search pnx, retrieving\n # the num results specified each time\n (0..@config['max_index']).each do |i|\n index = i * @config['bulk_size'] + 1\n response = get_search_response(index, @config['bulk_size'])\n raise \"Primo X-Services returned response #{response.code}\" unless response.code == '200'\n responses.push(response)\n end\n responses\n end", "def resource_index\n if self.class.included_modules.include? APPI::FiltersResources\n apply_filter_params resource_class.all\n else\n resource_class.all\n end\n end", "def fetch_resources_fallback\n result = Hpricot.XML( open( feed ) ) #more infos but less resources max. 31 #TODO: loop trough annotations to get even more!\n (result/:item).each do |resource|\n i = Resource.factory( type, :feed=> resource )\n self.resources << i\n end\n end", "def resource_parameters\n @resource_parameters ||= []\n end", "def generate_resources_list\n resources = chefsorted_objects(:resource)\n generate_full_list(resources, 'Resource', 'resources')\nend", "def resource_operations; end", "def aggregated_resources\n resources = Resource.aggregated_resources(uri_prefix: params.require(:uri_prefix))\n render json: resources, each_serializer: AggregatedResourceEachSerializer\n end", "def resource_list\n self.resources\n end", "def resources\n '.Resources|with_entries(.value.LogicalResourceId = .key)[]'\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Baseline implementation for the list_xpn_hosts REST call
def list_xpn_hosts request_pb, options = nil raise ::ArgumentError, "request must be provided" if request_pb.nil? uri, body, query_string_params = transcode_list_xpn_hosts_request request_pb response = @client_stub.make_post_request( uri: uri, body: body, params: query_string_params, options: options ) result = ::Google::Cloud::Compute::V1::XpnHostList.decode_json response.body, ignore_unknown_fields: true yield result, response if block_given? result end
[ "def list_xpn_hosts request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_list_xpn_hosts_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Cloud::Compute::V1::XpnHostList.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end", "def list_hosts\n VSphereAutomation::VCenter::HostApi.new(api_client).list.value\n end", "def network_hosts_all\n begin\n print_verbose \"Retrieving all network hosts\"\n response = RestClient.get \"#{@url}network/hosts\", {:params => {:token => @token}}\n details = JSON.parse(response.body)\n print_good \"Retrieved #{details['count']} network hosts\"\n details\n rescue => e\n print_error \"Could not retrieve network hosts: #{e.message}\"\n end\nend", "def list_hosts\n Com::Vmware::Vcenter::Host.new(vapi_config).list()\n end", "def test_get_all_hosts\n rest_response = RestClient.get(\"#{RESTAPI_NETWORK}/hosts?token=#{@@token}\")\n check_rest_response(rest_response)\n result = JSON.parse(rest_response.body)\n assert(result['count'])\n assert(result['hosts'])\n assert_not_equal(0, result['count'])\n end", "def get_puppetdb_hosts\n curl = setup_curl(\"#{@puppetdb_url}/v3/nodes\")\n curl.get\n servers_junk = JSON.parse(curl.body_str)\n servers_array = []\n servers_junk.each { |server| servers_array << server['name'] }\n @puppetdb_hosts = servers_array\n end", "def index_hosts\n load_service\n return if (@service.blank?)\n\n # Preload hosts\n @hosts = Host.where(:_id.in => @service.host_ids)\n\n respond_to do |format|\n format.html\n end\n end", "def list_hosts(cmd)\n output = one_off(\"#{cmd} --list-hosts\").gsub!(/\\s+hosts.*:\\n/, '').strip\n output.split(\"\\n\").map(&:strip)\n end", "def cmd_resolve_hosts argv\n setup argv\n name = @hash['name']\n response = @api.resolve_hosts(name)\n msg response\n return response\n end", "def list(fqdn, opts = {})\n call('domain.host.list', fqdn, opts).map do |host|\n self.new(host['name'], host)\n end\n end", "def list_hosts( zone_id)\n request(\n :expects => 200,\n :method => 'GET',\n :parser => Fog::Parsers::Zerigo::DNS::ListHosts.new,\n :path => \"/api/1.1/zones/#{zone_id}/hosts.xml\"\n )\n end", "def host_list\n return @host_list if defined?(@host_list)\n\n if !self.hosts.blank?\n @host_list = self.hosts.split(/[,\\s]+/).compact\n else\n @host_list = []\n end\n\n @host_list\n end", "def get_all_hosts()\n results = @zabbix.host.get({\"output\" => [\"name\"], \"sortfield\" => \"name\",})\n host_names = []\n results.each { |result| host_names << result['name'] }\n return host_names\n end", "def query_host_group_list\n get_resource_list('compute', 'os-aggregates', 'aggregates', name = 'name', id = 'hosts')\n puts 'The list of Host Groups in this PowerVC cloud are:'\n headers = ['Host Groups']\n print_table(headers, @resource_print_list)\n footer\n puts 'The list of hosts are:'\n headers = ['Host Names']\n print_table(headers, @resource_id_print_list)\n footer\n end", "def list_hosts(domain, compact_list_optional)\n validate_list([[\"Domain\", domain, :domain_format]])\n options = {\"Domain\" => domain}\n optional_fields = [ [\"compact_list_optional\", compact_list_optional] ]\n options = set_optional_fields(optional_fields, options)\n\n connection = Connection.new\n connection.post(\"Domain/Host/List\", options)\n end", "def all\n @hosts\n end", "def entries\n extract_detail(:hostlist).map do |host|\n host =~ /^@/ ? HostGroup.new(host) : ExecuteHost.new(host)\n end\n end", "def list_hosts(folder = '')\n rows = JSON.parse(http_request(@uri + '/view.py', {\n wato_folder: folder,\n\tsearch: 'Search',\n\tfilled_in: 'filter',\n\thost_address_prefix: 'yes',\n\tview_name: 'searchhost',\n\toutput_format: 'json',\n\t}))\n rows.shift # skip the header\n rows.map { |r| r[1] }\n end", "def index\n @host_addresses = HostAddress.all\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Baseline implementation for the move_disk REST call
def move_disk request_pb, options = nil raise ::ArgumentError, "request must be provided" if request_pb.nil? uri, body, query_string_params = transcode_move_disk_request request_pb response = @client_stub.make_post_request( uri: uri, body: body, params: query_string_params, options: options ) result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true yield result, response if block_given? result end
[ "def move_disk request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_move_disk_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end", "def move_disk(from,to)\n @towers[to] << @towers[from][-1]\n @towers[from].pop\n nil\nend", "def move (from, to)\n disk = @positions[from - 1].pop\n @positions[to - 1] << disk\n @moves = @moves + 1\n end", "def move_disk(number_of_disks,from,to,via)\n if number_of_disks == 0\n to.unshift(from.shift)\n else\n move_disk(number_of_disks-1,from,via,to) \n to.unshift(from.shift) unless from.empty?\n move_disk(number_of_disks-1,via,to,from) \n end\n to\nend", "def http_move(request, response)\n path = request.path\n\n move_info = @server.copy_and_move_info(request)\n\n if move_info['destinationExists']\n return false unless @server.emit('beforeUnbind', [move_info['destination']])\n end\n\n return false unless @server.emit('beforeUnbind', [path])\n return false unless @server.emit('beforeBind', [move_info['destination']])\n return false unless @server.emit('beforeMove', [path, move_info['destination']])\n\n if move_info['destinationExists']\n @server.tree.delete(move_info['destination'])\n @server.emit('afterUnbind', [move_info['destination']])\n end\n\n @server.tree.move(path, move_info['destination'])\n\n # Its important afterMove is called before afterUnbind, because it\n # allows systems to transfer data from one path to another.\n # PropertyStorage uses this. If afterUnbind was first, it would clean\n # up all the properties before it has a chance.\n @server.emit('afterMove', [path, move_info['destination']])\n @server.emit('afterUnbind', [path])\n @server.emit('afterBind', [move_info['destination']])\n\n # If a resource was overwritten we should send a 204, otherwise a 201\n response.update_header('Content-Length', '0')\n response.status = move_info['destinationExists'] ? 204 : 201\n\n # Sending back false will interupt the event chain and tell the server\n # we've handled this method.\n false\n end", "def move_disk from, to\n @towers[to-1] << @towers[from-1].pop\n end", "def move_disk(n, dst, buff)\n if n > 0\n move_disk(n - 1, buff, dst)\n move_top_to(dst)\n buff.move_disk(n - 1, dst, self)\n end\n end", "def move(io, context)\n location = context[:location]\n metadata = context[:metadata]\n upload_options = context[:upload_options] || {}\n\n storage.move(io, location, shrine_metadata: metadata, **upload_options)\n end", "def move\n return NotFound unless(resource.exist?)\n return BadRequest unless request.depth == :infinity\n\n return BadRequest unless dest = request.destination\n if status = dest.validate(host: request.host,\n resource_path: resource.path)\n return status\n end\n\n resource.lock_check if resource.supports_locking?\n\n return resource.move dest.path_info, request.overwrite?\n end", "def move(path, initheader = nil)\n request(Move.new(path, initheader))\n end", "def post_move_folder(src, dest, opts = {})\n if Configuration.debugging\n Configuration.logger.debug \"Calling API: StorageApi#post_move_folder ...\"\n end\n \n # verify the required parameter 'src' is set\n fail \"Missing the required parameter 'src' when calling post_move_folder\" if src.nil?\n \n # verify the required parameter 'dest' is set\n fail \"Missing the required parameter 'dest' when calling post_move_folder\" if dest.nil?\n \n # resource path\n path = \"/storage/folder/{src}\".sub('{format}','json').sub('{' + 'src' + '}', src.to_s)\n\n # query parameters\n query_params = {}\n query_params[:'dest'] = dest\n query_params[:'storage'] = opts[:'storage'] if opts[:'storage']\n query_params[:'destStorage'] = opts[:'dest_storage'] if opts[:'dest_storage']\n\n # header parameters\n header_params = {}\n\n # HTTP header 'Accept' (if needed)\n _header_accept = ['application/json', 'text/json', 'application/xml', 'text/xml', 'text/javascript']\n _header_accept_result = @api_client.select_header_accept(_header_accept) and header_params['Accept'] = _header_accept_result\n\n # HTTP header 'Content-Type'\n _header_content_type = []\n header_params['Content-Type'] = @api_client.select_header_content_type(_header_content_type)\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n \n\n auth_names = []\n result = @api_client.call_api(:POST, path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'BaseResponse')\n if Configuration.debugging\n Configuration.logger.debug \"API called: StorageApi#post_move_folder. Result: #{result.inspect}\"\n end\n return result\n end", "def test_post_move_folder()\n\n src = 'folder6'\n dest = 'folder7/folder6'\n storage = 'First Storage'\n dest_storage = 'First Storage'\n request = PostMoveFolderRequest.new(src, dest, storage, dest_storage)\n\n result = @storage_api.post_move_folder(request)\n assert result.code == 200, 'Error while moving folder'\n\n end", "def move(to_path)\n self.cloud_api.move(self, to_path)\n end", "def move(path, options = {}, &block)\n perform_request Net::HTTP::Move, path, options, &block\n end", "def restoredisks(src, dst)\n\n # retrieve the vm deploy id\n vm_id = dst.gsub(VMDIR.squeeze(\"/\"),\"\").split(\"/\")[1]\n\n # source is the save directory on one\n img_src = src.gsub(VAR_LOCATION + \"/\",\"\")\n # remote VM directory on ESX(i) host\n remote_dst = \"one-\"+vm_id\n\n # create the directory\n rc, info = do_vifs(\"-f --mkdir '[#{@datastore}] #{remote_dst}'\", false)\n\n # list the directory\n rc, info = do_vifs(\"--dir '[#{@imagestore}] #{img_src}'\", false)\n entrylist = \"\"\n if rc == true\n info.split(\"\\n\").each{ |line|\n next if line.empty?\n entrylist = line.match(\".*(Content Listing).*\")\n next if entrylist\n entrylist = line.match(\".*(\\-\\-\\-\\-\\-\\-\\-\\-\\-\\-\\-\\-\\-\\-\\-).*\")\n next if entrylist\n # if disk\n entrylist = line.match(\"disk\\.[0-9]\")\n if entrylist\n # test if directory\n rc, info = do_vifs(\"--dir '[#{@imagestore}] #{img_src}/line'\", false)\n # VMWare disk\n if rc == true\n # delete eventual previous disk\n rc, info = do_vmkfs(\"-U '[#{@datastore}] #{remote_dst}/#{line}.vmdk'\", false)\n # clone file\n rc, info = do_vmkfs(\"-i '[#{@imagestore}] #{img_src}/#{line}/disk.vmdk' -d thin '[#{@datastore}] #{remote_dst}/#{line}.vmdk'\")\n # if cloning successful\n if rc == true\n # delete the original\n rc, info = do_vmkfs(\"-U '[#{@imagestore}] #{img_src}/#{line}/disk.vmdk'\")\n # delete the original\n rc, info = do_vifs(\"-f --rmdir '[#{@imagestore}] #{img_src}/#{line}'\")\n else\n OpenNebula.log_error(\"Error during move of the virtual disk '[#{@imagestore}] #{img_src}/#{line}' to '[#{@datastore}] #{remote_dst}/#{line}.vmdk' on the host #{@host}\")\n exit info\n end\n # ISO disks\n else\n # delete eventual previous disk\n rc, info = do_vifs(\"-f --rm '[#{@datastore}] #{remote_dst}/#{line}.iso'\", false)\n # clone file\n rc, info = do_vifs(\"-f -m '[#{@imagestore}] #{img_src}/#{line}' '[#{@datastore}] #{remote_dst}/#{line}.iso'\")\n if rc == false\n OpenNebula.log_error(\"Error during move of the virtual disk '[#{@imagestore}] #{img_src}/#{line}' to '[#{@datastore}] #{remote_dst}/#{line}.iso' on the host #{@host}\")\n exit info\n end\n end\n end\n }\n end\n\n OpenNebula.log_info(\"Successfully restored virtual disks from '[#{@imagestore}] #{img_src}' to '[#{@datastore}] #{remote_dst}' on the host #{@host}\")\n\n return 0\n\n end", "def move(from_path, to_path, opts = {})\n input_json = {\n from_path: from_path,\n to_path: to_path,\n }\n response = @session.do_rpc_endpoint(\"/#{ @namespace }/move\", input_json)\n Dropbox::API::File.from_json(Dropbox::API::HTTP.parse_rpc_response(response))\n end", "def file_moved\n end", "def move_disks(n, src, dest, remain)\n return if n <= 0\n move_disks n - 1, src, remain, dest\n move_disk src, dest\n move_disks n - 1, remain, dest, src\n end", "def post_move(filename,src_repo,data)\n curl_post(\"#{self.host}/api2/repos/#{src_repo}/file/?p=#{filename}\",data).body_str\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Baseline implementation for the move_instance REST call
def move_instance request_pb, options = nil raise ::ArgumentError, "request must be provided" if request_pb.nil? uri, body, query_string_params = transcode_move_instance_request request_pb response = @client_stub.make_post_request( uri: uri, body: body, params: query_string_params, options: options ) result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true yield result, response if block_given? result end
[ "def move_instance request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_move_instance_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end", "def move_resource_class(direction)\n \"move move__#{direction}\"\n end", "def move(options)\n @error = nil\n\n move_options = { :project_id => project_id }\n if options[:before]\n move_options.merge!({ :type => :before, :target_id => options[:before].id })\n elsif options[:after]\n move_options.merge!({ :type => :after, :target_id => options[:after].id })\n else\n raise ArgumentError, \"Must specify :before => story or :after => story\"\n end\n\n self.connection[self.class.move_path(id, move_options)].post(\"\").body\n\n self\n rescue RestClient::UnprocessableEntity => e\n @error = JSON.parse(XmlToJson.transform(e.response.body))[\"message\"]\n false\n end", "def move\n @guest.move params[:index]\n\n respond_to do |format|\n format.html { render nothing: true }\n format.json { head :ok }\n end\n end", "def move\n return NotFound unless(resource.exist?)\n return BadRequest unless request.depth == :infinity\n\n return BadRequest unless dest = request.destination\n if status = dest.validate(host: request.host,\n resource_path: resource.path)\n return status\n end\n\n resource.lock_check if resource.supports_locking?\n\n return resource.move dest.path_info, request.overwrite?\n end", "def move(*args)\n unless(resource.exist?)\n NotFound\n else\n resource.lock_check if resource.supports_locking? && !args.include(:copy)\n destination = url_unescape(env['HTTP_DESTINATION'].sub(%r{https?://([^/]+)}, ''))\n dest_host = $1\n if(dest_host && dest_host.gsub(/:\\d{2,5}$/, '') != request.host)\n BadGateway\n elsif(destination == resource.public_path)\n Forbidden\n else\n collection = resource.collection?\n dest = resource_class.new(destination, clean_path(destination), @request, @response, @options.merge(:user => resource.user))\n status = nil\n if(args.include?(:copy))\n status = resource.copy(dest, overwrite)\n else\n return Conflict unless depth.is_a?(Symbol) || depth > 1\n status = resource.move(dest, overwrite)\n end\n # RFC 2518\n if collection\n multistatus do |xml|\n xml.response do\n resource_to_check = status == Created ? dest : resource\n xml.href \"#{scheme}://#{host}:#{port}#{resource_to_check.url_format}\"\n xml.status \"#{http_version} #{status.status_line}\"\n end\n end\n else\n status\n end\n end\n end\n end", "def move(*args)\n unless(resource.exist?)\n NotFound\n else\n resource.lock_check if resource.supports_locking? && !args.include?(:copy)\n destination = url_unescape(env['HTTP_DESTINATION'].sub(%r{https?://([^/]+)}, ''))\n dest_host = $1\n if(dest_host && dest_host.gsub(/:\\d{2,5}$/, '') != request.host)\n BadGateway\n elsif(destination == resource.public_path)\n Forbidden\n else\n dest = resource_class.new(destination, clean_path(destination), @request, @response, @options.merge(:user => resource.user))\n status = nil\n if(args.include?(:copy))\n status = resource.copy(dest, overwrite)\n else\n return Conflict unless depth.is_a?(Symbol) || depth > 1\n status = resource.move(dest, overwrite)\n end\n response['Location'] = \"#{scheme}://#{host}:#{port}#{url_format(dest)}\" if status == Created\n # RFC 2518\n return_status(dest,status)\n end\n end\n end", "def move(options = {})\n self.class.move(version_key, options)\n end", "def smove(source, destination, member); end", "def move(to_path)\n self.cloud_api.move(self, to_path)\n end", "def http_move(request, response)\n path = request.path\n\n move_info = @server.copy_and_move_info(request)\n\n if move_info['destinationExists']\n return false unless @server.emit('beforeUnbind', [move_info['destination']])\n end\n\n return false unless @server.emit('beforeUnbind', [path])\n return false unless @server.emit('beforeBind', [move_info['destination']])\n return false unless @server.emit('beforeMove', [path, move_info['destination']])\n\n if move_info['destinationExists']\n @server.tree.delete(move_info['destination'])\n @server.emit('afterUnbind', [move_info['destination']])\n end\n\n @server.tree.move(path, move_info['destination'])\n\n # Its important afterMove is called before afterUnbind, because it\n # allows systems to transfer data from one path to another.\n # PropertyStorage uses this. If afterUnbind was first, it would clean\n # up all the properties before it has a chance.\n @server.emit('afterMove', [path, move_info['destination']])\n @server.emit('afterUnbind', [path])\n @server.emit('afterBind', [move_info['destination']])\n\n # If a resource was overwritten we should send a 204, otherwise a 201\n response.update_header('Content-Length', '0')\n response.status = move_info['destinationExists'] ? 204 : 201\n\n # Sending back false will interupt the event chain and tell the server\n # we've handled this method.\n false\n end", "def move(options = {})\n self.class.move(self.version_key, options)\n end", "def migrate(dst)\n self.stop\n new_instance = self.class.new(@box, @options)\n new_instance.start\n return new_instance\n end", "def move url, options={} \n response = self.class.move(url, {:body => options}).parsed_response\n raise Skydrive::Error.new(response[\"error\"]) if response[\"error\"]\n response[\"data\"] ? response[\"data\"] : response\n end", "def rename_instance request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_rename_instance_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Cloud::BareMetalSolution::V2::Instance.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end", "def switchmove _obj, _args\n \"_obj switchmove _args;\" \n end", "def receive_move_by_application\n @game = Game.find_by_id(params[:game_id])\n do_move_by_application \n \n handle_response_display false \n end", "def move_gear_post(gear, destination_container, state_map)\n app = gear.application\n reply = ResultIO.new\n gear_components = gear.component_instances\n start_order, stop_order = app.calculate_component_orders\n source_container = gear.get_proxy\n\n if gear.group_instance.platform.downcase == \"windows\"\n log_debug \"DEBUG: Restoring ownership and user ACLs for Windows gear '#{gear.uuid}'\"\n rsync_keyfile = Rails.configuration.auth[:rsync_keyfile]\n log_debug `eval \\`ssh-agent\\`; ssh-add #{rsync_keyfile}; ssh -o StrictHostKeyChecking=no -A root@#{destination_container.get_ip_address} \"/cygdrive/c/openshift/bin/oo-cmd.exe oo-admin-restore-acls --uuid:#{gear.uuid}\"; exit_code=$?; ssh-agent -k;exit $exit_code`\n end\n\n start_order.each do |cinst|\n next unless gear_components.include? cinst\n cart = cinst.cartridge_name\n idle, leave_stopped = state_map[cart]\n unless leave_stopped\n log_debug \"DEBUG: Starting cartridge '#{cart}' in '#{app.name}' after move on #{destination_container.id}\"\n args = build_base_gear_args(gear)\n args = build_base_component_args(cinst, args)\n reply.append destination_container.send(:run_cartridge_command, cart, gear, \"start\", args, false)\n end\n end\n\n log_debug \"DEBUG: Fixing DNS and mongo for gear '#{gear.uuid}' after move\"\n log_debug \"DEBUG: Changing server identity of '#{gear.uuid}' from '#{source_container.id}' to '#{destination_container.id}'\"\n gear.server_identity = destination_container.id\n # Persist server identity for gear in mongo\n res = Application.where({\"_id\" => app.id, \"gears.uuid\" => gear.uuid}).update({\"$set\" => {\"gears.$.server_identity\" => gear.server_identity}})\n raise OpenShift::OOException.new(\"Could not set gear server_identity to #{gear.server_identity}\") if res.nil? or !res[\"updatedExisting\"]\n\n gear.group_instance.gear_size = destination_container.get_node_profile\n # Persist gear size for current group instance in mongo\n res = Application.where({\"_id\" => app.id, \"group_instances._id\" => gear.group_instance.id}).update({\"$set\" => {\"group_instances.$.gear_size\" => gear.group_instance.gear_size}})\n raise OpenShift::OOException.new(\"Could not set group instance gear_size to #{gear.group_instance.gear_size}\") if res.nil? or !res[\"updatedExisting\"]\n begin\n dns = OpenShift::DnsService.instance\n public_hostname = destination_container.get_public_hostname\n dns.modify_application(gear.name, app.domain_namespace, public_hostname)\n dns.publish\n ensure\n dns.close\n end\n reply\n end", "def move(io, context)\n location = context[:location]\n metadata = context[:metadata]\n upload_options = context[:upload_options] || {}\n\n storage.move(io, location, shrine_metadata: metadata, **upload_options)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Baseline implementation for the set_common_instance_metadata REST call
def set_common_instance_metadata request_pb, options = nil raise ::ArgumentError, "request must be provided" if request_pb.nil? uri, body, query_string_params = transcode_set_common_instance_metadata_request request_pb response = @client_stub.make_post_request( uri: uri, body: body, params: query_string_params, options: options ) result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true yield result, response if block_given? result end
[ "def set_common_instance_metadata request_pb, options:, &block\n uri = \"/compute/v1/projects/#{request_pb.project}/setCommonInstanceMetadata\"\n body = request_pb.metadata_resource.to_json\n\n response = @client_stub.make_post_request(\n uri: uri,\n body: body,\n options: options,\n )\n\n result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true\n\n yield result, response if block_given?\n\n result\n end", "def set_common_instance_metadata request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_set_common_instance_metadata_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end", "def process_metadata\n if @response\n self.resource.metadata[:http_version] = @response.http_version\n self.resource.metadata[:status] = @response.status\n self.resource.metadata[:reason] = @response.reason\n self.resource.metadata[:headers] = @response.headers\n end\n end", "def metadata_request(key = '')\n key = to_hyph(key)\n begin\n unless zfind('TESTING')\n metadata_uri = \"http://169.254.169.254/latest/meta-data/#{key}\"\n HTTParty.get(metadata_uri).parsed_response.split(\"\\n\")\n else\n stubbed_metadata = Smash::CloudPowers::AwsStubs.instance_metadata_stub\n\n key.empty? ? stubbed_metadata.keys : stubbed_metadata[to_hyph(key)]\n end\n rescue Exception => e\n logger.fatal format_error_message e\n end\n end", "def response_metadata=(_); end", "def set_vm_metadata(vm_cid, metadata)\n @telemetry_manager.monitor('initialize') do\n _init_azure\n end\n with_thread_name(\"set_vm_metadata(#{vm_cid})\") do\n @telemetry_manager.monitor('set_vm_metadata', id: vm_cid) do\n @logger.info(\"set_vm_metadata(#{vm_cid}, #{metadata})\")\n @vm_manager.set_metadata(InstanceId.parse(vm_cid, _azure_config.resource_group_name), encode_metadata(metadata))\n end\n end\n end", "def initialize(client, address, other_metadata = {})\n super(client, address)\n @written_md_vars = {}\n @metadata_loaded = false\n @metadata = {}\n \n key_lookup = self.class.md_key_map\n type_lookup = self.class.md_type_coercion_map\n other_metadata.each do |key, value|\n insert_metadata key, value, key_lookup, type_lookup\n end\n end", "def load_metadata\n if self.class.metadata_schema && !@metadata_loaded\n if @metadata_loader\n # Load the metadata externally\n @metadata_loader.call\n @metadata_loader = nil\n else\n # Load the metadata just for this single resource\n response = api('request',\n :uri => metadata_uri,\n :response_schema => self.class.metadata_schema\n )\n self.metadata = response['result']\n end\n end\n end", "def vm_metadata=(value)\n @vm_metadata = value\n end", "def metadata=(value)\n @metadata = value\n end", "def update_initial_metadata(metadata)\n end", "def set_meta_headers_for(instance)\n # Open storage with cf admin credentials for setting quotas and meta-key\n storage_options_for_tenant = @fog_options[:storage].merge(:hp_tenant_id => instance.tenant_id)\n storage = VCAP::Services::Swift::Storage.new(@logger, storage_options_for_tenant)\n storage.set_meta_key_and_quotas(instance.account_meta_key, BASE_QUOTA)\n end", "def set_vm_metadata(server_id, metadata)\n with_thread_name(\"set_vm_metadata(#{server_id}, ...)\") do\n server = openstack_server(server_id)\n cloud_error(\"Server `#{server_id}' not found\") unless server\n @openstack.with_openstack { TagManager.tag_server(server, metadata) }\n\n apply_human_readable_name(metadata, server_id) if human_readable_name?(server, server_id)\n end\n end", "def fetch_metadata(metadata = \"instance\")\n response = http_get(\"#{OCI_METADATA_URL}/#{metadata}\")\n if response.code == \"200\"\n json_data = parse_json(response.body)\n if json_data.nil?\n logger.warn(\"Mixin OciMetadata: Metadata response is NOT valid JSON\")\n end\n json_data\n else\n logger.warn(\"Mixin OciMetadata: Received response code #{response.code} requesting metadata\")\n nil\n end\n end", "def set_metadata(metadatahash)\n headers = {}\n metadatahash.each{|key, value| headers['X-Object-Meta-' + key.to_s.capitalize] = value.to_s}\n response = self.container.connection.cfreq(\"POST\",@storagehost,@storagepath,headers)\n raise NoSuchObjectException, \"Object #{@name} does not exist\" if (response.code == \"404\")\n raise InvalidResponseException, \"Invalid response code #{response.code}\" unless (response.code == \"202\")\n true\n end", "def set_vm_metadata(vm_id, metadata)\n @logger.info('Setting virtual machine metadata...')\n end", "def update_metadata(metadata)\n return if metadata.nil?\n fsi = Vcloud::Core::Fog::ServiceInterface.new\n metadata.each do |k, v|\n fsi.put_vapp_metadata_value(@vapp.id, k, v)\n fsi.put_vapp_metadata_value(id, k, v)\n end\n end", "def instance_data\n @instance_data ||= JSON.parse(Net::HTTP.get(URI.parse('http://169.254.169.254/latest/dynamic/instance-identity/document')))\n end", "def metadata_merge(hash)\n response = RedboothRuby.request(:put, nil, 'metadata',\n { target_id: id, target_type: klass_name, metadata: hash },\n { session: session })\n\n response.data\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Baseline implementation for the set_default_network_tier REST call
def set_default_network_tier request_pb, options = nil raise ::ArgumentError, "request must be provided" if request_pb.nil? uri, body, query_string_params = transcode_set_default_network_tier_request request_pb response = @client_stub.make_post_request( uri: uri, body: body, params: query_string_params, options: options ) result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true yield result, response if block_given? result end
[ "def set_default_network_tier request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_set_default_network_tier_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end", "def set_default_network_tier request_pb, options:, &block\n uri = \"/compute/v1/projects/#{request_pb.project}/setDefaultNetworkTier\"\n body = request_pb.projects_set_default_network_tier_request_resource.to_json\n\n response = @client_stub.make_post_request(\n uri: uri,\n body: body,\n options: options,\n )\n\n result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true\n\n yield result, response if block_given?\n\n result\n end", "def tier\n raw_tier || Tier.default\n end", "def get_default_tier(tiers=[])\n tiers = get_plan_names if tiers.empty?\n \n tiers.present? ? tiers[\"default\"] : \"\"\n end", "def tier=(value)\n @tier = value\n end", "def default_network\n BASE_URL + @options[:gce_project] + '/global/networks/default'\n end", "def tier\n settings[:tier]\n end", "def tier\n data[:tier]\n end", "def load_tier\n if id = tier_param_id || params[:tier_id]\n @tier = Tier.find_by_permalink_and_region_and_active(id)\n @tier_class = @tier.class if @tier\n end\n end", "def set_pricing_tier_with_http_info(did, pricing_tier, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: MonetizationApi.set_pricing_tier ...\"\n end\n # verify the required parameter 'did' is set\n fail ArgumentError, \"Missing the required parameter 'did' when calling MonetizationApi.set_pricing_tier\" if did.nil?\n # verify the required parameter 'pricing_tier' is set\n fail ArgumentError, \"Missing the required parameter 'pricing_tier' when calling MonetizationApi.set_pricing_tier\" if pricing_tier.nil?\n # resource path\n local_var_path = \"/pricing/devices/{did}/pricingtiers\".sub('{format}','json').sub('{' + 'did' + '}', did.to_s)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(pricing_tier)\n auth_names = ['artikcloud_oauth']\n data, status_code, headers = @api_client.call_api(:PUT, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'DevicePricingTierEnvelope')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: MonetizationApi#set_pricing_tier\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end", "def current_tier\n @current_tier ||= load_tier_from_session unless @current_tier == false\n end", "def tier\n return @tier\n end", "def save_tier\n self.tier = @tier_cache if @tier_cache\n end", "def choose_layout\n @tier ? 'tier' : 'front'\n end", "def tier\n @tier_cache || self.tiers[0]\n end", "def create_zero_tier\n ManageIQ::Showback::Tier.create(:tier_start_value => 0, :tier_end_value => Float::INFINITY, :rate => self)\n end", "def network_type=(network_type)\n network_obj = resource[:network][\"staticNetworkConfiguration\"]\n racadm_set_niccfg(\"chassis\", :static, network_obj['ipAddress'], network_obj['subnet'], network_obj['gateway'])\n wait_for_ip_change(network_obj['ipAddress'])\n end", "def set_default_pool # rubocop:disable AbcSize\n converge_by(\"Updating #{new_resource} default pool to #{new_resource.default_pool}\") do\n Chef::Log.info(\"Updating #{new_resource} default pool to #{new_resource.default_pool}\")\n load_balancer.client['LocalLB.VirtualServer'].set_default_pool_name([new_resource.vs_name], [new_resource.default_pool])\n current_resource.default_pool(new_resource.default_pool)\n\n new_resource.updated_by_last_action(true)\n end\n end", "def configure_network\n Template.render_to('ct/network', {\n netifs: @netifs,\n }, lxc_config_path('network'))\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Baseline implementation for the set_usage_export_bucket REST call
def set_usage_export_bucket request_pb, options = nil raise ::ArgumentError, "request must be provided" if request_pb.nil? uri, body, query_string_params = transcode_set_usage_export_bucket_request request_pb response = @client_stub.make_post_request( uri: uri, body: body, params: query_string_params, options: options ) result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true yield result, response if block_given? result end
[ "def set_usage_export_bucket request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_set_usage_export_bucket_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end", "def set_usage_export_bucket request_pb, options:, &block\n uri = \"/compute/v1/projects/#{request_pb.project}/setUsageExportBucket\"\n body = request_pb.usage_export_location_resource.to_json\n\n response = @client_stub.make_post_request(\n uri: uri,\n body: body,\n options: options,\n )\n\n result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true\n\n yield result, response if block_given?\n\n result\n end", "def set_usage_export_bucket project:, bucket_name:, report_name_prefix: \"\"\n export_location = { bucket_name: bucket_name, report_name_prefix: report_name_prefix }\n if report_name_prefix.empty?\n # Sending an empty value for report_name_prefix results in the\n # next usage report being generated with the default prefix value\n # \"usage_gce\". (ref: https://cloud.google.com/compute/docs/reference/rest/v1/projects/setUsageExportBucket)\n puts \"Setting report_name_prefix to empty value causes the report \" \\\n \"to have the default prefix of `usage_gce`.\"\n end\n projects_client = ::Google::Cloud::Compute::V1::Projects::Rest::Client.new\n operation = projects_client.set_usage_export_bucket project: project,\n usage_export_location_resource: export_location\n wait_until_done operation: operation\nend", "def get_usage_export_bucket project:\n projects_client = ::Google::Cloud::Compute::V1::Projects::Rest::Client.new\n project_data = projects_client.get project: project\n export_location = project_data.usage_export_location\n\n if !export_location.nil? && export_location.report_name_prefix.empty?\n puts \"Report name prefix not set, replacing with default value of `usage_gce`.\"\n export_location.report_name_prefix = \"usage_gce\"\n end\n export_location\nend", "def get_bucket(bucket)\n end", "def disable_usage_export project:\n projects_client = ::Google::Cloud::Compute::V1::Projects::Rest::Client.new\n\n # Passing an empty message to usage_export_location_resource disables the usage report generation.\n operation = projects_client.set_usage_export_bucket project: project, usage_export_location_resource: {}\n wait_until_done operation: operation\nend", "def update_export\n zfs(:set, \"sharenfs=\\\"#{@params[\"share_options\"]}\\\"\", @params[\"dataset\"]) if @params[\"share_options\"]\n zfs(:set, \"quota=#{@params[\"quota\"].to_i == 0 ? \"none\" : @params[\"quota\"]}\", @params[\"dataset\"])\n end", "def newBucket\n innerNewBucket \n end", "def put_bucket_limit(bucket_limit, optional={})\n\t\targs = self.class.new_params\n\t\targs[:query]['Action'] = 'PutBucketLimit'\n\t\targs[:query]['BucketLimit'] = bucket_limit\n\t\targs[:region] = optional[:_region] if (optional.key? :_region)\n\t\tif optional.key? :_method\n\t\t\traise ArgumentError, '_method must be GET|POST' unless 'GET|POST'.split('|').include? optional[:_method]\n\t\t\targs[:method] = optional[:_method]\n\t\tend\n\t\tif optional.key? :_scheme\n\t\t\traise ArgumentError, '_scheme must be http|https' unless 'http|https'.split('|').include? optional[:_scheme]\n\t\t\targs[:scheme] = optional[:_scheme]\n\t\tend\n\t\tif optional.key? :bid\n\t\t\targs[:query]['bid'] = optional[:bid]\n\t\tend\n\t\tif optional.key? :owner_account\n\t\t\targs[:query]['OwnerAccount'] = optional[:owner_account]\n\t\tend\n\t\tif optional.key? :uid\n\t\t\targs[:query]['uid'] = optional[:uid]\n\t\tend\n\t\tself.run(args)\n\tend", "def bucket=(bucket)\n @bucket = bucket\n self\n end", "def process_bucket(bucketname,options: {},logger: Logger.new(LOGFILE))\n \n client = Elasticsearch::Client.new(host: options.elasticsearch, log: false)\n indexer = ElasticIndexer.new(client: client, autocommit: 500,logger: logger)\n \n logger.info(\"Scanning #{bucketname}\")\n Aws::S3::Bucket.new(bucketname).objects.each {|objectsummary|\n logger.info(\"got key #{objectsummary.key}\")\n ct_major = \"\"\n ct_minor = \"\"\n parts = /^(.*)\\/(.*)$/.match(objectsummary.object.content_type)\n if parts\n ct_major = parts[1]\n ct_minor = parts[2]\n else\n logger.warn(\"Unable to parse content type '#{objectsummary.object.content_type}'\")\n end\n \n xtn = \"\"\n parts = /\\.([^\\.]+)$/.match(objectsummary.key)\n if parts\n xtn = parts[1]\n end\n \n ap objectsummary.object.metadata\n \n indexer.add_record({\n bucket: bucketname,\n etag: objectsummary.etag,\n path: objectsummary.key,\n last_modified: Date.parse(objectsummary.last_modified.to_s).iso8601,\n owner: {\n display_name: objectsummary.owner.display_name,\n id: objectsummary.owner.id\n },\n size: objectsummary.size,\n content_type: {\n major: ct_major,\n minor: ct_minor,\n raw: objectsummary.object.content_type\n },\n storage_class: objectsummary.storage_class,\n content_encoding: objectsummary.object.content_encoding,\n content_disposition: objectsummary.object.content_disposition,\n extra_data: objectsummary.object.metadata,\n file_extension: xtn\n })\n }\n indexer.commit\nrescue Aws::S3::Errors::PermanentRedirect=>e\n logger.error(e.message)\nend", "def finish\n # Look up our bucket, if there is one\n bucket\n super\n end", "def bucket_after_action=(bucket)\n return unless bucket.present? && bucket.persisted?\n\n self.target_bucket_id = bucket.id\n self.target_bucket_name = bucket.name\n self.bucket_end_position = bucket.current_position\n end", "def set_TargetBucket(value)\n set_input(\"TargetBucket\", value)\n end", "def default_bucket(bucket)\n @bucket = bucket\n end", "def bucket_before_action=(bucket)\n self.source_bucket_id = bucket.id\n self.source_bucket_name = bucket.name\n self.bucket_start_position = bucket.current_position\n end", "def summarize_bucket(bucket_id, limit: 20)\n bucket = relation(:buckets).with_id(bucket_id).as(:entity).one!\n\n BucketSummary.new do |summary|\n summary.bucket = bucket\n summary.traces = relation(:traces).in_bucket(bucket_id, limit: limit).as(:entity).to_a\n summary.counts = rom.relations.traces.count_types bucket\n end\n end", "def bucket!(bucket)\n request = Riakpb::RpbGetBucketReq.new(:bucket => bucket)\n response = rpc.request(\n Util::MessageCode::GET_BUCKET_REQUEST,\n request\n )\n @bucket_cache[bucket].load(response)\n end", "def set_bucket(lower, upper)\n @base = \"#{@dirname}/#{lower}.#{upper}.#{@basename}\"\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
rename local variable rrlv
def rename_local_variable # Start on either 'asdf', and do viw<leader>rrlv asdf = 10 p asdf # TODO: make this work without 'v' end
[ "def rename_var(var_name, opts={})\n return var_name if BUILTIN_VARS.include?(var_name)\n\n generate = opts.fetch(:generate, true)\n unresolved = opts.fetch(:unresolved, [])\n renamed = @renames[var_name]\n\n if renamed.nil? and parent\n renamed = parent.rename_var(var_name, :generate => false)\n end\n\n if renamed.nil? and generate\n @renames[var_name] = random_var_name\n renamed = @renames[var_name]\n end\n\n #puts \"Mapped #{var_name} => #{renamed}\" if renamed\n\n renamed\n end", "def modify_rdn args\n rename args\n end", "def get_fresh_variable_name\n @vindex = (@vindex || 0xFF) + 1\n return \"var_#{@vindex.to_s(16)}\"\n end", "def relvar(name)\n r = @relvar.namespace.relvar(name, false)\n raise \"Unknown relvar #{name}\" unless r\n r\n end", "def rename; end", "def rename_locals(oldsource, datatypes)\n source = \"\"\n \n parse_functions(oldsource, datatypes).each_with_index { |chunk, i|\n rename_function_locals(chunk, datatypes) if i % 2 != 0\n \n source += chunk.join(\"\")\n }\n \n return source\nend", "def initialize(vrn)\n @vrn = vrn&.delete(' ')&.upcase\n end", "def substitute(x,r)\n walk(0) { |y,currentBinders|\n if y == x + currentBinders\n r\n else\n ULVar.new(y)\n end }\n end", "def defvar(name, dwords=1)\n unless var?(name)\n @bss << \"#{name}: resd #{dwords}\\n\"\n @vars[name] = name\n else\n STDERR.puts \"[warning] attempted to redefine #{name}\"\n end\n end", "def rename_get_form_vars\n @record = VmOrTemplate.find_by(:id => @edit[:vm_id])\n @edit[:new][:name] = params[:name] if params[:name]\n session[:edit] = @edit\n end", "def refurbish variable_name\n Value.typed @vars[variable_name], variable_name.to_s.sub(/^\\@/, '') #.sub(/^\\$/, '')\n end", "def rename_vm\n return unless load_edit(\"vm_rename__#{params[:id]}\")\n @explorer = true if @edit[:explorer]\n build_rename_screen\n if @edit[:explorer]\n replace_right_cell\n end\n end", "def replace_vim_variable(variable)\n Vim::command(\"let snip_tmp = #{vim_mappings[variable]}\")\n result = Vim::evaluate(\"snip_tmp\")\n case variable\n when \"VI_SOFT_TABS\"\n result = (result == \"1\" ? \"YES\" : \"NO\")\n when \"VI_FILENAME\"\n result = File.basename(result)\n else\n result\n end\n end", "def var=(name)\n @var = if name == :random\n \"_#{rand(2000)}\"\n else\n name\n end\n end", "def rsv1=(_arg0); end", "def rename(name, new_name, flags = T.unsafe(nil)); end", "def set_vector_with_names_via_hash(r_var_name, hash)\n vals = array_to_r_vector(hash.values)\n nms = array_to_r_vector(hash.keys, :string_vals => true)\n\n command = \"#{r_var_name} <- #{vals}; names(#{r_var_name}) = #{nms}\"\n @con.eval(command)\n end", "def relvar(name, create_empty = true)\n if not(relvars.has_key?(name)) and create_empty\n relvars[name] = Relvar.new(self,name) \n instance_eval \"def #{name}(); relvars[:#{name}]; end\"\n end\n relvars[name]\n end", "def subst_name hoge\n hoge\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
create a dns record
def create_record(fqdn, type, ipdata) unless @dnss.is_valid? Puppet.crit dns.cstatus end priority = {} # TODO: research how to implement priority for puppet # priority = priority[0] # if priority.nil? # priority = {} # else # priority = { :priority => priority.to_i } # end record = @dnss.create_record(fqdn, type, ipdata, priority) if record.nil? Puppet.err dns.cstatus end Puppet.notice "Created dns record '#{fqdn}' with id '#{record[:id]}'." end
[ "def create_dns_record(params)\n if RECORD_MAPPING[:dns].include?(params[:record_type])\n if params[:record_type].eql?('A')\n record = Infoblox::Arecord.new(connection: connection, name: params[:name], ipv4addr: params[:ipv4addr])\n elsif params[:record_type].eql?('AAAA')\n record = Infoblox::AAAArecord.new(connection: connection, name: params[:name], ipv6addr: params[:ipv6addr])\n elsif params[:record_type].eql?('PTR')\n record = Infoblox::Ptr.new(connection: connection, name: params[:name], ptrdname: params[:ptrdname])\n elsif params[:record_type].eql?('CNAME')\n record = Infoblox::Cname.new(connection: connection, name: params[:name], canonical: params[:canonical])\n end\n # record.zone = params[:zone] if params[:zone]\n # record.comment = params[:comment] if params[:comment]\n record.disable = params[:disable] if params[:disable]\n record.view = params[:view] if params[:view]\n record.extattrs = params[:extattrs] if params[:extattrs]\n begin\n resp = record.post\n Chef::Log.info 'DNS Record is successfully created.'\n return resp\n rescue StandardError => e\n Chef::Log.error e.message\n return false\n end\n else\n Chef::Log.error \"Please enter valid record type for DNS record creation i.e. A/AAAA/PTR/CNAME\"\n end\n end", "def create_dns_record(domain, hostname, type, content, ttl, priority = nil)\n body = {\n 'hostname' => hostname,\n 'type' => type,\n 'content' => content,\n 'ttl' => ttl\n }\n body.update!(:priority => priority) if priority\n connection.post \"/dns/create/#{domain}\", body\n end", "def create(name, ip)\n Netdot.logger.debug(\"Creating new DNS records with name:#{name} and ip:#{ip}\")\n @connection.post('host', {'name' => name, 'address' => ip})\n end", "def create(name, ip)\n Netdot.logger.debug(\"Creating new DNS records with name:#{name}\" \\\n \" and ip:#{ip}\")\n @connection.post('host', 'name' => name, 'address' => ip)\n end", "def makeARecord(machine, ttl: Config::TTL_A_RECORD)\n return DnsRecord.new(name: machine.dns_record(),\n type: \"A\",\n content: machine.static_ip(),\n ttl: ttl)\nend", "def add_dns_record(params)\n get_request('dnsAddRecord?'+get_url_parameters(params)).body\n end", "def create\n @dns_record = DnsRecord.new(params[:dns_record])\n\n respond_to do |format|\n if @dns_record.save\n format.html { redirect_to @dns_record, notice: 'Dns record was successfully created.' }\n format.json { render json: @dns_record, status: :created, location: @dns_record }\n else\n format.html { render action: \"new\" }\n format.json { render json: @dns_record.errors, status: :unprocessable_entity }\n end\n end\n end", "def create_record(record_type, zone_id, name, data, options = {})\n optional_tags= ''\n options.each { |option, value|\n case option\n when :ttl\n optional_tags+= \"<ttl type='integer'>#{value}</ttl>\"\n when :active\n optional_tags+= \"<active>#{value}</active>\"\n when :aux\n optional_tags+= \"<aux>#{value}</aux>\"\n end\n }\n\n request(\n :body => %Q{<?xml version=\"1.0\" encoding=\"UTF-8\"?><record><record_type>#{record_type}</record_type><zone_id type=\"integer\">#{zone_id}</zone_id><name>#{name}</name><data>#{data}</data>#{optional_tags}</record>},\n :expects => 201,\n :method => 'POST',\n :parser => Fog::Parsers::DNS::Slicehost::CreateRecord.new,\n :path => 'records.xml'\n )\n end", "def create_record(name, value, type = \"A\", ttl = 300)\n # Delete existing record because you can't update records\n delete_record_by_name(name, type, false)\n\n # Create new record\n begin\n @route53.client.change_resource_record_sets({\n :hosted_zone_id => @hosted_zone_id, \n :change_batch => {\n :changes => [ \n {\n :action => \"CREATE\", \n :resource_record_set => { \n :name => name, \n :type => type,\n :ttl => ttl, \n :resource_records => [ { :value => value } ]\n }\n }\n ]\n }\n })\n rescue StandardError => bang\n @log.error \"Error creating A record from Route53: #{bang}\"\n end\n end", "def create_record(zone_id, type, name, content, options={})\n body = %Q{<?xml version=\"1.0\" encoding=\"UTF-8\"?><record><type>#{type}</type><name>#{name}</name><content>#{content}</content>}\n options.each do |k,v|\n body += %Q{<#{k}>#{v}</#{k}>}\n end\n body += %Q{</record>}\n request(\n :body => body,\n :expects => 202,\n :method => 'POST',\n :parser => Fog::Parsers::DNS::Bluebox::CreateRecord.new,\n :path => \"/api/domains/#{zone_id}/records.xml\"\n )\n end", "def makeSoaRecord(cluster, ttl: Config::TTL_SOA)\n return DnsRecord.new(name: cluster.nameservers()[0].dns_record(),\n type: \"SOA\",\n content: cluster.hostmasters()[0].dns_record(),\n ttl: ttl)\nend", "def makeARecordGroup(name, machines, ttl: Config::TTL_A_RECORD)\n return DnsRecord.new(name: name,\n type: \"A\",\n content: machines.map(&:static_ip).join(\",\"),\n ttl: ttl)\nend", "def add_dns_sec_record(params)\n get_request('dnsSecAddRecord?'+get_url_parameters(params)).body\n end", "def create_alias(alias_target, domain)\n dns = Fog::DNS.new({\n :provider => 'AWS',\n :aws_access_key_id => ENV[\"AWS_ACCESS_KEY_ID\"],\n :aws_secret_access_key => ENV[\"AWS_SECRET_ACCESS_KEY\"]\n })\n\n # TODO: Get or create zone by base domain\n zone = dns.zones.first\n\n puts \"Creating DNS alias\"\n\n record = zone.records.create(\n :name => domain,\n :alias_target => alias_target,\n :type => 'A'\n )\n\n puts \"#{domain} -> #{alias_target[\"DNSName\"]}\"\nend", "def create(options={})\n check_required_keys options, :name, :content, :type, :ttl\n \n r = post(\"/domains/#{@@parent_id}/records\", :query => { \"record[name]\" => options[:name],\n \"record[ttl]\" => options[:ttl],\n \"record[content]\" => options[:content],\n \"record[prio]\" => options[:prio],\n \"record[type]\" => options[:type] })\n r[\"errors\"] and raise StandardError, r[\"errors\"][\"error\"].to_a.join(\", \")\n if r.code == 201\n Record.new r[\"record\"]\n else\n raise StandardError, 'Could not create the record'\n end \n end", "def create_zone(zone, email, ttl)\n @dnsh.name = zone\n @dnsh.email = email\n @dnsh.ttl = ttl\n if @dnsh.save == true\n Puppet.notice \"Created dns zone #{zone} with id #{@dnsh.id}\"\n else\n Puppet.err @dnsh.cstatus\n raise @dnsh.cstatus\n end\n end", "def create\n name, type = resource[:name].split('/')\n rdata = resource[:rdata]\n ttl = resource[:ttl]\n case type\n when 'MX'\n Array(rdata).each_with_index do |exchange, index|\n preference = Array(resource[:preference])[index]\n nsupdate(\"server #{server}\n update add #{name} #{ttl} MX #{preference} #{exchange}\n send\")\n end\n when 'SRV'\n Array(rdata).each_with_index do |target, index|\n port = Array(resource[:port])[index]\n weight = Array(resource[:weight])[index]\n priority = Array(resource[:priority])[index]\n nsupdate(\"server #{server}\n update add #{name} #{ttl} SRV #{priority} #{weight} #{port} #{target}\n send\")\n end\n else\n nsupdate(\"server #{server}\n update add #{name} #{ttl} #{type} #{Array(rdata).first}\n send\")\n end\n end", "def set_dns(fdqn, ttl=60)\n frags = fdqn.split('.')\n domainarr = []\n # com\n domainarr.unshift(frags.pop)\n # acquia\n domainarr.unshift(frags.pop)\n domain = domainarr.join('.')\n subdomain = frags.join('.')\n puts \"fdqn: #{fdqn}, domain: #{domain}, subdomain: #{subdomain}\"\n dns_api = it.dns_provider\n dns_api.create_or_replace_a_record(domain, \"#{subdomain}.#{domain}\", it.system_details.external_ipaddress())\n end", "def create_domain_with_soa_record(name)\n domain = ::PowerDns::Domain.create(name)\n return false if domain.nil?\n\n soa_record = ::PowerDns::Record.create :domain_id => domain.id, :name => name,\n :type => 'SOA', :content => SOA_CONTENT, :ttl => 600\n !soa_record.nil?\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
get a dns record object
def get_record(fqdn, type) matches = find_match(@dnss.records, fqdn, true) if matches != nil record = nil matches.each do |record| Puppet.debug "inspecting #{record.hash_type} == #{type}" if record.hash_type.to_s == type.to_s Puppet.notice "found dns record : #{fqdn}, #{type}" return record end end else Puppet.debug "match found no record : #{fqdn}, #{type}" end Puppet.debug "record not found : #{fqdn}, #{type}" return nil end
[ "def makeARecord(machine, ttl: Config::TTL_A_RECORD)\n return DnsRecord.new(name: machine.dns_record(),\n type: \"A\",\n content: machine.static_ip(),\n ttl: ttl)\nend", "def get_record(zone_id, record_id)\n request(\n :expects => 200,\n :method => 'GET',\n :parser => Fog::Parsers::DNS::Bluebox::GetRecord.new,\n :path => \"/api/domains/#{zone_id}/records/#{record_id}.xml\"\n )\n end", "def find_record(name, type = 'A')\n # Find the record\n response = @route53.client.list_resource_record_sets({\n :hosted_zone_id => @hosted_zone_id,\n :start_record_name => name,\n :start_record_type => type,\n :max_items => 1\n })\n\n record = nil\n if response && response.data\n if response.data[:resource_record_sets] && response.data[:resource_record_sets].size > 0\n response_record = response.data[:resource_record_sets][0]\n if (response_record[:name] == name || response_record[:name] == \"#{name}.\") && response_record[:type] == type\n record = Route53Record.new(response_record[:name], response_record[:type], response_record[:ttl], response_record[:resource_records][0][:value])\n end\n end\n end\n\n record\n end", "def makeSoaRecord(cluster, ttl: Config::TTL_SOA)\n return DnsRecord.new(name: cluster.nameservers()[0].dns_record(),\n type: \"SOA\",\n content: cluster.hostmasters()[0].dns_record(),\n ttl: ttl)\nend", "def dns\n doc = request(\"dns-list_records\")\n api_error?(doc)\n (doc/:data).inject([]) { |records, dns| records << Dns.new_from_xml(dns); records }\n end", "def record_data\n return @record unless @record_class == DNSSD::Record::IN\n\n case @record_type\n when DNSSD::Record::A,\n DNSSD::Record::AAAA then\n IPAddr.new_ntoh @record\n when DNSSD::Record::CNAME,\n DNSSD::Record::NS,\n DNSSD::Record::PTR then\n domain_name_to_string @record\n when DNSSD::Record::MX then\n mx = @record.unpack 'nZ*'\n mx[-1] = domain_name_to_string mx.last\n mx\n when DNSSD::Record::SOA then\n soa = @record.unpack 'Z*Z*NNNNN'\n soa[0] = domain_name_to_string soa[0]\n soa[1] = domain_name_to_string soa[1]\n soa\n when DNSSD::Record::SRV then\n srv = @record.unpack 'nnnZ*'\n srv[-1] = domain_name_to_string srv.last\n srv\n when DNSSD::Record::TXT then\n record = @record.dup\n txt = []\n\n until record.empty? do\n txt << character_string_to_string(record)\n record.slice! 0, txt.last.length + 1\n end\n\n txt\n else\n @record\n end\n end", "def get_record(opts={})\n OAI::GetRecordResponse.new(do_request('GetRecord', opts))\n end", "def rr(type, name, rdata)\n ttl = 3600\n klass = 'IN'\n string = [name, ttl, klass, type, rdata].join(' ')\n Dnsruby::RR.new_from_string(string)\n end", "def get_record(bibnumber)\n if record_exists?(bibnumber)\n marc_url = URI_FOR_MARC % ([@scope] + Array.new(3, bibnumber))\n record_url = URI_FOR_RECORD % [bibnumber, @scope]\n \n # Retrieve MARC data and convert to UTF-8 prior to decoding ...\n record_page = get_page(marc_url)\n record_data = MARC_REGEX.match(record_page)\n \n if record_data.nil?\n raise ParserError, \"Could not decode data: MARC data not found.\"\n else\n record_data = record_data[1].strip()\n record_data = Iconv.conv('UTF-8', 'LATIN1', record_data)\n end\n\n record = decode_pseudo_marc(record_data)\n unless record.nil?\n record.bibnum = bibnumber\n record.raw = record_data\n record.record_url = \"#{self.class.base_uri}#{record_url}\"\n record.marc_url = \"#{self.class.base_uri}#{marc_url}\"\n end\n return record\n else\n raise NonExistentRecordError, \"Record not found.\"\n end\n rescue NonExistentRecordError => error\n warn error.message\n return nil\n rescue ParserError => error \n warn error.message\n return nil\n end", "def raw_domain_lookup(hostname, dns_server=nil, dns_port=53, record_type = Net::DNS::A)\n # use default server if un-specified\n dns_server ||= @dns_server\n\n @log.debug_message += \"Domain lookup: #{hostname} @#{dns_server.nil? ? 'default' : dns_server} (type #{DnsWorker.record_name record_type})\\n\"\n\n if dns_server.nil?\n dns = Net::DNS::Resolver.new(port: dns_port)\n else\n dns_server.sub!(/(:\\d+)$/, '') { dns_port = $1 if dns_port == 53 }\n dns = Net::DNS::Resolver.new(\n nameservers: dns_server,\n port: dns_port,\n domain: params[:domain],\n )\n end\n\n if dns.nil?\n packet = nil\n else\n packet = dns.query hostname, record_type\n end\n\n if packet.blank? or packet.answer.blank?\n raise Net::DNS::Resolver::NoResponseError.new\n else\n packet\n end\n end", "def dns\n @dns ||= Resolv::DNS.open\n end", "def dns\n doc = request(\"dns-list_records\")\n raise ApiError if (doc/:result).innerHTML == \"error\"\n (doc/:data).inject([]) { |records, dns| records << Dns.new_from_xml(dns); records }\n end", "def create_dns_record(params)\n if RECORD_MAPPING[:dns].include?(params[:record_type])\n if params[:record_type].eql?('A')\n record = Infoblox::Arecord.new(connection: connection, name: params[:name], ipv4addr: params[:ipv4addr])\n elsif params[:record_type].eql?('AAAA')\n record = Infoblox::AAAArecord.new(connection: connection, name: params[:name], ipv6addr: params[:ipv6addr])\n elsif params[:record_type].eql?('PTR')\n record = Infoblox::Ptr.new(connection: connection, name: params[:name], ptrdname: params[:ptrdname])\n elsif params[:record_type].eql?('CNAME')\n record = Infoblox::Cname.new(connection: connection, name: params[:name], canonical: params[:canonical])\n end\n # record.zone = params[:zone] if params[:zone]\n # record.comment = params[:comment] if params[:comment]\n record.disable = params[:disable] if params[:disable]\n record.view = params[:view] if params[:view]\n record.extattrs = params[:extattrs] if params[:extattrs]\n begin\n resp = record.post\n Chef::Log.info 'DNS Record is successfully created.'\n return resp\n rescue StandardError => e\n Chef::Log.error e.message\n return false\n end\n else\n Chef::Log.error \"Please enter valid record type for DNS record creation i.e. A/AAAA/PTR/CNAME\"\n end\n end", "def get_record(collection, id)\n oai_id = \"oai:%s:%s/%d\" % [@base_uri.host, collection, id]\n response = get_response({ :verb => 'GetRecord', :identifier => oai_id, :metadataPrefix => 'qdc' })\n record = parse_records(response).first\n Record.new(record, { :base_uri => @base_uri, :collection => collection })\n end", "def get_additional_resource_record(dns_query, parsed_dns)\n arcount = parsed_dns[:dns_header_field][:arcount]\n return nil if arcount == 0\n get_resource_record(dns_query, arcount, parsed_dns)\n end", "def get_answer_resource_record(dns_query, parsed_dns)\n ancount = parsed_dns[:dns_header_field][:ancount]\n return nil if ancount == 0\n get_resource_record(dns_query, ancount, parsed_dns)\n end", "def get_parsed_dns( dns_query )\n begin\n parsed_dns = {\n :index => 0,\n :domain_name_dictionary => [],\n :dns_header_field => Hash.new(),\n :question_section => Hash.new(),\n :answer_section => Hash.new(),\n :authority_section => Hash.new(),\n :additional_section => Hash.new()\n }\n\n parsed_dns[:dns_header_field] = get_header_section(dns_query)\n parsed_dns[:index] = QUESTION_FIELD_START_INDEX\n parsed_dns[:question_section] = get_question_section(dns_query, parsed_dns)\n parsed_dns[:answer_section] = get_answer_resource_record(dns_query, parsed_dns)\n parsed_dns[:authority_section] = get_authority_resource_record(dns_query, parsed_dns)\n parsed_dns[:additional_section] = get_additional_resource_record(dns_query, parsed_dns)\n rescue\n end\n parsed_dns\n end", "def cache_record(record)\n return unless @monitor_thread\n if record.is_a?(Dnsruby::RR) and\n (!record.respond_to?(:address) or Rex::Socket.is_ip_addr?(record.address.to_s)) and\n record.name.to_s.match(MATCH_HOSTNAME)\n add(record, Time.now.to_i + record.ttl)\n else\n raise \"Invalid record for cache entry - #{record.inspect}\"\n end\n end", "def add_dns_record(params)\n get_request('dnsAddRecord?'+get_url_parameters(params)).body\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
create a dns zone
def create_zone(zone, email, ttl) @dnsh.name = zone @dnsh.email = email @dnsh.ttl = ttl if @dnsh.save == true Puppet.notice "Created dns zone #{zone} with id #{@dnsh.id}" else Puppet.err @dnsh.cstatus raise @dnsh.cstatus end end
[ "def create_zone zone_name, zone_dns, description: nil,\n name_server_set: nil\n ensure_service!\n gapi = service.create_zone zone_name, zone_dns,\n description: description,\n name_server_set: name_server_set\n Zone.from_gapi gapi, service\n end", "def create_zone zone_name, zone_dns, description: nil,\n name_server_set: nil\n ensure_connection!\n resp = connection.create_zone zone_name, zone_dns,\n description: description,\n name_server_set: name_server_set\n if resp.success?\n Zone.from_gapi resp.data, connection\n else\n fail ApiError.from_response(resp)\n end\n end", "def create_zone( domain, default_ttl, ns_type, options = {})\n\n optional_tags= ''\n options.each { |option, value|\n case option\n when :ns1\n optional_tags+= \"<ns1>#{value}</ns1>\"\n when :nx_ttl\n optional_tags+= \"<nx-ttl type='interger'>#{value}</nx-ttl>\"\n when :slave_nameservers\n optional_tags+= \"<slave-nameservers>#{value}</slave-nameservers>\"\n when :axfr_ips\n optional_tags+= \"<axfr-ips>#{value}</axfr-ips>\"\n when :custom_nameservers\n optional_tags+= \"<custom-nameservers>#{value}</custom-nameservers>\"\n when :custom_ns\n optional_tags+= \"<custom-ns>#{value}</custom-ns>\"\n when :hostmaster\n optional_tags+= \"<hostmaster>#{value}</hostmaster>\"\n when :notes\n optional_tags+= \"<notes>#{value}</notes>\"\n when :restrict_axfr\n optional_tags+= \"<restrict-axfr>#{value}</restrict-axfr>\"\n when :tag_list\n optional_tags+= \"<tag-list>#{value}</tag-list>\"\n end\n }\n \n request(\n :body => %Q{<?xml version=\"1.0\" encoding=\"UTF-8\"?><zone><domain>#{domain}</domain><default-ttl type=\"integer\">#{default_ttl}</default-ttl><ns-type>#{ns_type}</ns-type>#{optional_tags}</zone>},\n :expects => 201,\n :method => 'POST',\n :parser => Fog::Parsers::Zerigo::DNS::CreateZone.new,\n :path => '/api/1.1/zones.xml'\n )\n end", "def zone_create(zone)\n obj_create zone, Zone\n end", "def create_zone(origin, options = {})\n\n optional_tags= ''\n options.each { |option, value|\n case option\n when :ttl\n optional_tags+= \"<ttl type='interger'>#{value}</ttl>\"\n when :active\n optional_tags+= \"<active>#{value}</active>\"\n end\n }\n\n request(\n :body => %Q{<?xml version=\"1.0\" encoding=\"UTF-8\"?><zone><origin>#{origin}</origin>#{optional_tags}</zone>},\n :expects => 201,\n :method => 'POST',\n :parser => Fog::Parsers::DNS::Slicehost::CreateZone.new,\n :path => 'zones.xml'\n )\n end", "def create_zone attrs\n ZerigoDNS::Zone.create({follow_template: 'follow', zone_template_id: id}.merge(attrs))\n end", "def create\n @domain = Domain.friendly.find(params[:domain_id])\n @dns_zone = @domain.dns_zones.create(dns_zone_params.merge({:version => 1}))\n if @dns_zone.save\n redirect_to @domain, notice: 'Dns zone was successfully created.'\n else\n render action: 'edit', @errors => @dns_zone.errors, alert: \"Dns zone validation failed.\"\n end\n end", "def create_hosted_zone(name, options = {})\n optional_tags = ''\n if options[:caller_ref]\n optional_tags += \"<CallerReference>#{options[:caller_ref]}</CallerReference>\"\n else\n #make sure we have a unique call reference\n caller_ref = \"ref-#{rand(1000000).to_s}\"\n optional_tags += \"<CallerReference>#{caller_ref}</CallerReference>\"\n end\n if options[:comment]\n optional_tags += \"<HostedZoneConfig><Comment>#{options[:comment]}</Comment></HostedZoneConfig>\"\n end\n if options[:vpc_id] and options[:vpc_region]\n optional_tags += \"<VPC><VPCId>#{options[:vpc_id]}</VPCId><VPCRegion>#{options[:vpc_region]}</VPCRegion></VPC>\"\n end\n\n request({\n :body => %Q{<?xml version=\"1.0\" encoding=\"UTF-8\"?><CreateHostedZoneRequest xmlns=\"https://route53.amazonaws.com/doc/#{@version}/\"><Name>#{name}</Name>#{optional_tags}</CreateHostedZoneRequest>},\n :parser => Fog::Parsers::AWS::DNS::CreateHostedZone.new,\n :expects => 201,\n :method => 'POST',\n :path => \"hostedzone\"\n })\n end", "def create_hosted_zone(name, options = {})\n\n optional_tags = ''\n if options[:caller_ref]\n optional_tags+= \"<CallerReference>#{options[:caller_ref]}</CallerReference>\"\n else\n #make sure we have a unique call reference\n caller_ref = \"ref-#{rand(1000000).to_s}\"\n optional_tags+= \"<CallerReference>#{caller_ref}</CallerReference>\"\n end\n if options[:comment]\n optional_tags+= \"<HostedZoneConfig><Comment>#{options[:comment]}</Comment></HostedZoneConfig>\"\n end\n\n request({\n :body => %Q{<?xml version=\"1.0\" encoding=\"UTF-8\"?><CreateHostedZoneRequest xmlns=\"https://route53.amazonaws.com/doc/2010-10-01/\"><Name>#{name}</Name>#{optional_tags}</CreateHostedZoneRequest>},\n :parser => Fog::Parsers::DNS::AWS::CreateHostedZone.new,\n :expects => 201,\n :method => 'POST',\n :path => \"hostedzone\"\n })\n\n end", "def create_alias(alias_target, domain)\n dns = Fog::DNS.new({\n :provider => 'AWS',\n :aws_access_key_id => ENV[\"AWS_ACCESS_KEY_ID\"],\n :aws_secret_access_key => ENV[\"AWS_SECRET_ACCESS_KEY\"]\n })\n\n # TODO: Get or create zone by base domain\n zone = dns.zones.first\n\n puts \"Creating DNS alias\"\n\n record = zone.records.create(\n :name => domain,\n :alias_target => alias_target,\n :type => 'A'\n )\n\n puts \"#{domain} -> #{alias_target[\"DNSName\"]}\"\nend", "def allocate(session, template)\n write_task('rvpe.zone.allocate', session, true) do\n zone_def = ResourceFile::Parser.load_yaml(template)\n\n # create a zone\n name = zone_def[ResourceFile::Zone::NAME]\n unless name\n raise 'Specify ' + ResourceFile::Zone::NAME + ' in Zone file.'\n end\n zone = Zone.find_by_name(name).last\n raise \"Zone[#{name}] already exists.\" if zone\n # create an associated site in OpenNebula\n rc = call_one_xmlrpc('one.cluster.allocate', session, name)\n raise rc[1] unless rc[0]\n osite_id = rc[1]\n # create a zone record\n begin\n zone = Zone.new\n zone.oid = osite_id\n zone.name = name\n zone.description = zone_def[ResourceFile::Zone::DESCRIPTION]\n zone.create\n rescue => e\n call_one_xmlrpc('one.cluster.delete', session, osite_id)\n raise e\n end\n\n # add hosts to this zone\n if zone_def[ResourceFile::Zone::HOST]\n begin\n zone_def[ResourceFile::Zone::HOST].each do |host|\n add_host_to_zone(session, host, zone)\n end\n rescue => e\n # delete this zone\n _delete(session, zone)\n raise e\n end\n end\n\n # add virtual networks to this zone\n if zone_def[ResourceFile::Zone::NETWORK]\n begin\n zone_def[ResourceFile::Zone::NETWORK].each do |net|\n add_vnet_to_zone(session, net, zone)\n end\n rescue => e\n # delete this zone\n _delete(session, zone)\n raise e\n end\n end\n\n [true, zone.id]\n end\n end", "def create\n @dns_zone = DnsZone.new(params[:dns_zone])\n\n respond_to do |format|\n if @dns_zone.save\n flash[:notice] = 'DnsZone was successfully created.'\n format.html { render :action => \"edit\" }\n #format.html { redirect_to(@dns_zone) }\n format.xml { render :xml => @dns_zone, :status => :created, :location => @dns_zone }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @dns_zone.errors, :status => :unprocessable_entity }\n end\n end\n end", "def add_zone!(name)\n zone = add_zone(name)\n zone.save!\n zone\n end", "def zonecreate_standard(fabrickey, zonename, *aliases)\n result = @zones.zonecreate_standard(fabrickey, zonename, *aliases)\n result[1]\n end", "def create(name, ip)\n Netdot.logger.debug(\"Creating new DNS records with name:#{name}\" \\\n \" and ip:#{ip}\")\n @connection.post('host', 'name' => name, 'address' => ip)\n end", "def create(name, ip)\n Netdot.logger.debug(\"Creating new DNS records with name:#{name} and ip:#{ip}\")\n @connection.post('host', {'name' => name, 'address' => ip})\n end", "def build( zone_name )\n zone = Zone.new( :name => zone_name, :ttl => self.ttl )\n \n self.class.transaction do\n # Pick our SOA template out, and populate the zone\n soa_template = record_templates.detect { |r| r.record_type == 'SOA' }\n zone.primary_ns = soa_template.primary_ns\n zone.contact = soa_template.contact\n zone.refresh = soa_template.refresh\n zone.retry = soa_template.retry\n zone.expire = soa_template.expire\n zone.minimum = soa_template.minimum\n \n # save the zone or die\n zone.save!\n \n # get the templates\n templates = record_templates.dup\n \n # now build the remaining records according to the templates\n templates.delete( soa_template )\n templates.each do |template|\n record = template.build( zone_name )\n record.zone = zone\n record.save!\n end\n end\n \n zone\n end", "def create(options = {})\n response = request(:post, \"/network_zones.json\", :query => {:pack => options})\n end", "def process_forward_zone(domain)\n absolute_domain_name = \"#{domain['net.dns.domain']}.\"\n\n zone = DNS::Zone.new\n\n zone.origin = absolute_domain_name\n zone.ttl = domain['net.dns.ttl']\n\n zone.soa.serial = @serial\n zone.soa.label = zone.origin\n zone.soa.nameserver = domain['net.dns.soa-ns']\n zone.soa.email = \"#{domain['net.dns.soa-contact']}.\".sub('@', '.')\n\n domain['net.dns.mx']&.each do |server|\n mx = DNS::Zone::RR::MX.new\n mx.label = absolute_domain_name\n mx.priority = server['priority']\n mx.exchange = server['server']\n zone.records << mx\n end\n\n domain['net.dns.ns'].each do |address|\n ns = DNS::Zone::RR::NS.new\n ns.label = absolute_domain_name\n ns.nameserver = address\n zone.records << ns\n end\n\n domain['net.dns.cname']&.each do |name, target|\n cname = DNS::Zone::RR::CNAME.new\n cname.label = name\n cname.domainname = target\n zone.records << cname\n end\n\n domain['net.dns.srv']&.each do |service|\n srv = DNS::Zone::RR::SRV.new\n srv.label = \"_#{service['service']}._#{service['protocol']}\"\n srv.target = service['target']\n srv.port = service['port']\n service['priority'] ||= 0\n srv.priority = service['priority']\n service['weight'] ||= 0\n srv.weight = service['weight']\n zone.records << srv\n end\n\n domain['net.dns.txt']&.each do |record|\n txt = DNS::Zone::RR::TXT.new\n txt.label = record['label'] || absolute_domain_name\n txt.text = record['text']\n zone.records << txt\n end\n\n domain.children(kind: 'ipv4_interface') do |interface|\n a = DNS::Zone::RR::A.new\n a.label = interface['net.dns.name']\n a.address = interface['net.ipv4.address']\n zone.records << a\n\n interface['net.dns.alias']&.each do |label|\n cname = DNS::Zone::RR::CNAME.new\n cname.label = label\n cname.domainname = interface['net.dns.name']\n zone.records << cname\n end\n end\n\n @forward_zones[domain['net.dns.domain']] = zone\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
get a dns zone
def get_zone(zone) return find_match(@dns.domains, zone) end
[ "def fetch_zone\n route53 = Fog::DNS.new(connection_params)\n @zone = route53.zones.get(@current_resource.zone_id)\nend", "def get_hosted_zone()\n\t\tresp = @r53.get_hosted_zone(:id => @zone)\n\t\tresp[:hosted_zone]\n\tend", "def get_zone(zone=nil) \n zone ||= @zone\n get(\"Zone/#{zone}\")\n end", "def get_zone(zone)\n request(\n :expects => 200,\n :method => 'GET',\n :parser => Fog::Parsers::Zerigo::DNS::GetZone.new,\n :path => \"/api/1.1/zones/#{zone}.xml\"\n )\n end", "def by_name name\n begin\n self.get_zone( :id => \n list_zones.find { |z| z[:name] == name }[:id]\n )\n rescue NoMethodError\n puts '='*80 + \"\\n Failed to find hosted DNS zone by name: #{name}\\n\" +'='*80\n nil\n end\n end", "def get_hosting_zone(domain)\n route53.list_hosted_zones.hosted_zones.select do |x|\n x.name == \"#{domain}.\"\n end.first\n end", "def zone_by_name name\n connection if @connection.nil?\n @connection.zones.all.each do |zone|\n if zone.domain == name\n return zone\n end\n end\n return nil\n end", "def get_zone(zone_id)\n request(\n :expects => 200,\n :method => 'GET',\n :parser => Fog::Parsers::DNS::Bluebox::GetZone.new,\n :path => \"/api/domains/#{zone_id}.xml\"\n )\n end", "def fetch_dns_ptr(zone_name)\n \n @o = ''\n # find out the range we are parsing in this zone.\n ranges = []\n zone_name.split(\".\").map{|a| \n # Make sure it's request for arpa zone which should be all number\n if ! (a =~ /^\\d+$/).nil?\n if ranges.empty?\n ranges << a\n else\n ranges.insert(0, a)\n end\n end\n }\n \n # After look through zone name, if it's not a arpa zone, we'll return nil\n if ranges.empty?\n return @o\n end\n \n # Use later to determine how many number from ip we need\n zone_class = ranges.length\n \n # Now we have the range from zone name, let's use Networking class to help us find all interfaces\n # Find out wether it's class a, b, c or d\n netmask = 32 # start with D class netmask\n for i in (ranges.length..4-1)\n # Get the netmask correctly\n netmask = netmask - 8\n # Set the empty ip slot to 0\n ranges[i] = 0;\n end\n \n # Build the address\n range_address = ranges.join(\".\") + \"/#{netmask}\"\n \n # Call in cidr, find all interface under this range.\n interfaces = Networking.get_interfaces_in_range(range_address).sort{|a,b| a.ip <=> b.ip}\n\n # Find asset for each interface\n # If asset is Server, we need to look up two things\n # 1. If ip is drac, we will add \"-d\" to the hostname\n # 2. Maybe for vip, we need to ptr to it, but on the second thought, that shouldn't be. \n for interface in interfaces\n\n # Build the ip correct for this zone\n ips = []\n ip_parts = interface.ip_to_string.split(\".\")\n for i in (zone_class..4-1)\n # Insert reversely\n ips.insert(0, ip_parts[i])\n end\n ip = ips.join(\".\")\n \n # If it's a drac and type = server, we transform the hostname\n if ( interface.drac_ip? && interface.asset.resource_type == 'Server')\n name = convert_to_drac_name(interface.asset.name)\n else\n # Check to see if there is multiple vips poing to this ip, if so raise exception.\n # If only one ip, then we'll point PTR record to that vip.\n if interface.vips.length > 1\n #raise \"#{interface.ip_to_string} has more than one vip \" + interface.vips.collect{|a| a.name}.inspect + \" pointing to it\"\n @o += \";#{interface.ip_to_string} has more than one vip \" + interface.vips.collect{|a| a.name}.inspect + \" pointing to it.\\n\"\n @o += \";Picking the first one.\\n\"\n name = interface.vips[0].name \n elsif interface.vips.length == 1\n name = interface.vips[0].name\n # Network asset with named interface\n elsif interface.asset.resource_type == 'Network' and interface.name and interface != interface.asset.primary_interface and ! interface.name.empty?\n name = interface.name + '.' + interface.asset.name\n else\n name = interface.asset.name \n end\n \n end\n \n @o += \"#{ip}\\t\\tPTR\\t\\t#{name}.\\n\"\n\n end\n\n return @o\n\n rescue Exception => exc\n flash[:interface] = \"##ERROR## Fetch failed following reason: #{exc.message}\\n\"\n\n \n end", "def fetch_zone(zone)\n case node[:dns][:zone_strategy]\n when 'hybrid'\n zone_data = hybrid_zone zone\n when 'bags'\n zone_data = bag_zone zone\n else\n zone_data = attr_zone zone\n end\n zone_data\n end", "def zone zone_id\n ensure_connection!\n resp = connection.get_zone zone_id\n if resp.success?\n Zone.from_gapi resp.data, connection\n else\n nil\n end\n end", "def host_zone(hostname)\n HOST_ZONE[hostname]\n end", "def get_zone hash\n self.client.get_hosted_zone hash\n end", "def zone\n self\n end", "def zone\n fetch('games.final_fantasy_xiv.zones')\n end", "def route53_zone(suffix)\n suffix_ = Zonify.dot_(suffix)\n relevant_zone = r53.list_hosted_zones.select do |zone|\n suffix_.end_with?(zone[:name])\n end.sort_by{|zone| zone[:name].length }.last\n if relevant_zone\n zone_id = relevant_zone[:aws_id]\n relevant_records = r53.list_resource_record_sets(zone_id).map do |rr|\n if rr[:name].end_with?(suffix_)\n rr[:name] = Zonify.read_octal(rr[:name])\n rr\n end\n end.compact\n [relevant_zone, Zonify.tree_from_right_aws(relevant_records)]\n end\n end", "def find_zone(str)\n find(str,:object=>:zone)\n end", "def zone_by_name name\n connection.list_hosted_zones_by_name(dns_name: name, max_items: 1).hosted_zones.each do |zone|\n if zone.name == name\n return zone.id\n end\n end\n return nil\n end", "def zone_id\n route_53_client.\n list_hosted_zones[:hosted_zones].\n each.\n select { |z| z[:name] == zone }.\n map {|z| z[:id] }.\n first\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
the unsigned m/z value
def mz _mz_signed = mz_signed _mz_signed >= 0 ? _mz_signed : -_mz_signed end
[ "def max_z\n @max_z\n end", "def z\n return @z\n end", "def z_value\n @z_value ||= self.class.z_value\n end", "def min_z\n @min_z\n end", "def unit \n\t\t\tunitq = self.dup\n\t\t\tmagnitude = self.abs\n\t\t\tunitq[0] /= magnitude\n\t\t\tunitq[1] /= magnitude\n\t\t\tunitq[2] /= magnitude\n\t\t\tunitq[3] /= magnitude\n\t\t\treturn unitq\n\t\tend", "def ulps_from_zero\n return self if nan?\n s, e, m = binary_string\n s, e, m = s[0] == \"1\" ? -1 : 1, e.to_i(2), m.to_i(2)\n s * (e.zero? ? m : (m + e * 2**52))\n end", "def magnitude\n Math.sqrt(@x**2 + @y**2 + @z**2)\n end", "def z\n return (@sprite != nil ? @sprite.z : 0)\n end", "def z\n @z\n end", "def mag\n Marshal::load(Marshal.dump(@magnitude))\n end", "def z=(new_z)\n @z = new_z.to_f if new_z\n end", "def alu_c0; @bits[12]; end", "def u(x, t)\n return 12 * @parameters[:alpha] ** 2 * (\n sech(\n @parameters[:alpha] * (x - 4 * @parameters[:alpha] ** 2 * t)\n )\n ) ** 2\n end", "def z=(value)\n @z = value\n end", "def z_value=(value)\n @z_value = value\n end", "def z\n return nil unless @grpc.position\n @grpc.position.z\n end", "def basic_z_position\n z = @pokemon.bank == 0 ? 501 : 1\n z += @pokemon.position\n return z\n end", "def z\r\n return @window.z\r\n end", "def to_uint\n elements = ((\"%02d\" * 4) % [hours,minutes,seconds,frames]).split(//).map{|e| e.to_i }\n uint = 0\n elements.reverse.each_with_index do | p, i |\n uint |= p << 4 * i \n end\n uint\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
GET /supplies_providers_loans/1 GET /supplies_providers_loans/1.json
def show @supplies_providers_loan = SuppliesProvidersLoan.find(params[:id]) respond_to do |format| format.html # show.html.erb format.json { render json: @supplies_providers_loan } end end
[ "def show\n @supplies_loan = SuppliesLoan.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @supplies_loan }\n end\n end", "def new\n @supplies_providers_loan = SuppliesProvidersLoan.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @supplies_providers_loan }\n end\n end", "def new\n @supplies_loan = SuppliesLoan.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @supplies_loan }\n end\n end", "def get_supplies\n with_monitoring_and_error_handling do\n raw_response = perform(:get, @supplies, nil, headers)\n\n MDOT::Response.new(\n response: raw_response,\n schema: :supplies,\n uuid: @user.uuid\n )\n end\n end", "def index\n @providers = Provider.all\n\n render json: @providers\n end", "def show\n @provider = current_company.providers.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @provider }\n end\n end", "def show\n @supplies_return = SuppliesReturn.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @supplies_return }\n end\n end", "def new\n @supplies_return = SuppliesReturn.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @supplies_return }\n end\n end", "def loans\n url = \"#{LOANS_PATH}/pre-approved\"\n data = perform_get(url)\n data || {}\n end", "def show\n @supply = Supply.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @supply }\n end\n end", "def new\n @provider = current_company.providers.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @provider }\n end\n end", "def index\n @providers = Provider.all\n\n respond_to do |format|\n format.json { render json: @providers }\n format.xml { render xml: @providers }\n end\n end", "def index\n @supplies = Supply.all\n end", "def show\n @loan = Loan.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @loan }\n end\n end", "def index\n @shop_platinum_offers = Shop::PlatinumOffer.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @shop_platinum_offers }\n end\n end", "def new\n @provider = Provider.new\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @provider }\n end\n end", "def show\r\n @applied_loan = AppliedLoan.find(params[:id])\r\n\r\n respond_to do |format|\r\n format.html # show.html.erb\r\n format.json { render json: @applied_loan }\r\n end\r\n end", "def show\n @books_on_loan = BooksOnLoan.find(params[:id])\n respond_to do |format|\n format.json { render json: @books_on_loan }\n end\n end", "def new\n @supply = Supply.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @supply }\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
GET /supplies_providers_loans/new GET /supplies_providers_loans/new.json
def new @supplies_providers_loan = SuppliesProvidersLoan.new respond_to do |format| format.html # new.html.erb format.json { render json: @supplies_providers_loan } end end
[ "def new\n @supplies_loan = SuppliesLoan.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @supplies_loan }\n end\n end", "def new\n @title = t('view.providers.new_title')\n @provider = Provider.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @provider }\n end\n end", "def new\n @provider = Provider.new\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @provider }\n end\n end", "def new\n @provider = current_company.providers.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @provider }\n end\n end", "def new\n @supply = Supply.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @supply }\n end\n end", "def new\n @supplies_return = SuppliesReturn.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @supplies_return }\n end\n end", "def new\n @provider = Provider.new\n respond_to do |format|\n format.json { render json: @provider }\n format.xml { render xml: @provider }\n end\n end", "def new\n @loan = Loan.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @loan }\n end\n end", "def new\n @on_loan = OnLoan.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @on_loan }\n end\n end", "def provider_new\n @page_title = _('New_provider')\n @page_icon = \"add.png\"\n @provider, @tariffs, @servers = Provider.provider_new(session)\n\n if not @tariffs\n flash[:notice] = _('No_tariffs_available')\n redirect_to :action => 'list'\n end\n end", "def new\n @supplysite = Supplysite.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @supplysite }\n end\n end", "def new\n @available = Available.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @available }\n end\n end", "def new\n @provider = Provider.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @provider }\n end\n end", "def new\n @cloud_provider = current_user.cloud_providers.new\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @cloud_provider }\n end\n end", "def new\n @payment_provider = PaymentProvider.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @payment_provider }\n end\n end", "def new\n @borrow = Borrow.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @borrow }\n end\n end", "def new\n @borrow_request = BorrowRequest.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @borrow_request }\n end\n end", "def new\n @pokedb = Pokedb.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @pokedb }\n end\n end", "def new\n @pick = Pick.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @pick }\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
PUT /supplies_providers_loans/1 PUT /supplies_providers_loans/1.json
def update @supplies_providers_loan = SuppliesProvidersLoan.find(params[:id]) respond_to do |format| if @supplies_providers_loan.update_attributes(params[:supplies_providers_loan]) format.html { redirect_to @supplies_providers_loan, notice: 'Supplies providers loan was successfully updated.' } format.json { head :no_content } else format.html { render action: "edit" } format.json { render json: @supplies_providers_loan.errors, status: :unprocessable_entity } end end end
[ "def update\n @supplies_loan = SuppliesLoan.find(params[:id])\n\n respond_to do |format|\n if @supplies_loan.update_attributes(params[:supplies_loan])\n format.html { redirect_to @supplies_loan, notice: 'Supplies loan was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @supplies_loan.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n if @supplies_needed.update(supplies_needed_params)\n render :show, status: :ok, location: @supplies_needed\n else\n render json: @supplies_needed.errors, status: :unprocessable_entity\n end\n end", "def update\n @provider = current_company.providers.find(params[:id])\n\n respond_to do |format|\n if @provider.update_attributes(params[:provider])\n format.html { redirect_to @provider, notice: 'Provider was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @provider.errors, status: :unprocessable_entity }\n end\n end\n end", "def new\n @supplies_providers_loan = SuppliesProvidersLoan.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @supplies_providers_loan }\n end\n end", "def update\n @provider = Provider.find(params[:id])\n\n if @provider.update(provider_params)\n head :no_content\n else\n render json: @provider.errors, status: :unprocessable_entity\n end\n end", "def update\n respond_to do |format|\n if @v1_provider.update(v1_provider_params)\n format.html { redirect_to @v1_provider, notice: 'Provider was successfully updated.' }\n format.json { render :show, status: :ok, location: @v1_provider }\n else\n format.html { render :edit }\n format.json { render json: @v1_provider.errors, status: :unprocessable_entity }\n end\n end\n end", "def show\n @supplies_providers_loan = SuppliesProvidersLoan.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @supplies_providers_loan }\n end\n end", "def new\n @supplies_loan = SuppliesLoan.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @supplies_loan }\n end\n end", "def update\n @provider = Provider.find(params[:id])\n\n respond_to do |format|\n if @provider.update_attributes(params[:provider])\n format.html { redirect_to(@provider, :notice => 'Provider was successfully updated.') }\n format.xml { head :ok }\n format.json { render :json => { :resp=> \"ok\" }}\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @provider.errors, :status => :unprocessable_entity }\n format.json { render :json => { :resp=> \"error\" }}\n end\n end\n end", "def update\n if @provider.update(provider_params)\n head :no_content\n else\n render json: @provider.errors, status: :unprocessable_entity\n end\n end", "def update\n respond_to do |format|\n if @supplier_provider.update(supplier_provider_params)\n format.html { redirect_to @supplier_provider, notice: 'Provider was successfully updated.' }\n format.json { render :show, status: :ok, location: @supplier_provider }\n else\n format.html { render :edit }\n format.json { render json: @supplier_provider.errors, status: :unprocessable_entity }\n end\n end\n end", "def destroy\n @supplies_providers_loan = SuppliesProvidersLoan.find(params[:id])\n @supplies_providers_loan.destroy\n\n respond_to do |format|\n format.html { redirect_to supplies_providers_loans_url }\n format.json { head :no_content }\n end\n end", "def show\n @supplies_loan = SuppliesLoan.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @supplies_loan }\n end\n end", "def update\n respond_to do |format|\n if @info_provider.update(info_provider_params)\n format.html { redirect_to @info_provider, notice: 'Provider was successfully updated.' }\n format.json { render :show, status: :ok, location: @info_provider }\n else\n format.html { render :edit }\n format.json { render json: @info_provider.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @scholarship_provider = ScholarshipProvider.find(params[:id])\n\n respond_to do |format|\n if @scholarship_provider.update_attributes(params[:scholarship_provider])\n format.json { respond_with_bip(@scholarship_provider) }\n format.html { redirect_to(@scholarship_provider, :notice => 'Scholarship provider was successfully updated.') }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @scholarship_provider.errors, :status => :unprocessable_entity }\n format.json { respond_with_bip(@scholarship_provider) }\n end\n end\n end", "def update\n respond_to do |format|\n if @provider_info.update(provider_info_params)\n format.html { redirect_to @provider_info, notice: 'Provider info was successfully updated.' }\n format.json { render :show, status: :ok, location: @provider_info }\n else\n format.html { render :edit }\n format.json { render json: @provider_info.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @provider = Provider.find(params[:id])\n\n respond_to do |format|\n if @provider.update_attributes(params[:provider])\n format.html { redirect_to(@provider, :notice => 'Provider was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @provider.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @id_provider.update(id_provider_params)\n flash.now[:notice] = 'Id provider information was successfully updated.'\n format.any { render :edit }\n format.json { render :show, status: :ok, location: @id_provider }\n else\n format.any { render :edit }\n format.json { render json: @id_provider.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @supply = Supply.find(params[:id])\n\n respond_to do |format|\n if @supply.update_attributes(params[:supply])\n format.html { redirect_to supplies_path, notice: 'Supply was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @supply.errors, status: :unprocessable_entity }\n end\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
DELETE /supplies_providers_loans/1 DELETE /supplies_providers_loans/1.json
def destroy @supplies_providers_loan = SuppliesProvidersLoan.find(params[:id]) @supplies_providers_loan.destroy respond_to do |format| format.html { redirect_to supplies_providers_loans_url } format.json { head :no_content } end end
[ "def destroy\n @supplies_loan = SuppliesLoan.find(params[:id])\n @supplies_loan.destroy\n\n respond_to do |format|\n format.html { redirect_to supplies_loans_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @provider = Provider.find(params[:id])\n @provider.destroy\n\n respond_to do |format|\n format.html { redirect_to providers_url }\n format.json { head :ok }\n end\n end", "def destroy\n @provider.destroy\n respond_to do |format|\n format.html { redirect_to providers_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @supplies_needed.destroy\n end", "def destroy\n @supply = Supply.find(params[:id])\n @supply.destroy\n\n respond_to do |format|\n format.html { redirect_to supplies_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @has_used_supply.destroy\n respond_to do |format|\n format.html { redirect_to has_used_supplies_url, notice: 'Has used supply was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @v1_provider.destroy\n respond_to do |format|\n format.html { redirect_to v1_providers_url, notice: 'Provider was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @hotelsupply = Hotelsupply.find(params[:id])\n @hotelsupply.destroy\n\n respond_to do |format|\n format.html { redirect_to hotelsupplies_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @supply = Supply.find(params[:id])\n @supply.destroy\n\n respond_to do |format|\n format.html { redirect_to(supplies_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @supplement_plan = SupplementPlan.find(params[:id])\n @supplement_plan.destroy\n\n respond_to do |format|\n format.html { redirect_to supplement_plans_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @supply.destroy\n respond_to do |format|\n format.html { redirect_to supplies_path, notice: 'Record was successfully deleted.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @supplies_return = SuppliesReturn.find(params[:id])\n @supplies_return.destroy\n\n respond_to do |format|\n format.html { redirect_to supplies_returns_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @on_loan = OnLoan.find(params[:id])\n @on_loan.destroy\n\n respond_to do |format|\n format.html { redirect_to on_loans_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @supply_list.destroy\n respond_to do |format|\n format.html { redirect_to supply_lists_url, notice: 'Supply list was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @surgery_supply.destroy\n respond_to do |format|\n format.html { redirect_to surgery_supplies_url, notice: 'Surgery supply was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @demand.destroy\n respond_to do |format|\n format.json { head :no_content }\n end\n end", "def delete_tenant_circle(args = {}) \n delete(\"/tenantcircles.json/#{args[:circleId]}\", args)\nend", "def destroy\n @supplier_provider.destroy\n respond_to do |format|\n format.html { redirect_to supplier_providers_url, notice: 'Provider was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n authorize! :manage, @short_supply_registration\n @short_supply_registration = ShortSupplyRegistration.find(params[:id])\n @short_supply_registration.destroy\n\n respond_to do |format|\n format.html { redirect_to short_supply_registrations_url }\n format.json { head :no_content }\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
returns a copy of the point
def get_point_clone return @point.clone end
[ "def get_point_clone\n return @point.clone\n end", "def copy()\n Point.new(@x,@y)\n end", "def to_point\n if length == 2\n p = Point.new(*self)\n elsif length == 1\n p = self[0].clone\n end\n return p\n end", "def add_to_point point\n add_to_point! point.dup\n end", "def point\n Point.new(x, y)\n end", "def get_point\n p = nil\n @m.synchronize{\n p = @p[0]\n @p.delete_at(0)\n }\n return p\n end", "def dup\n Trajectory.new(self.points.map(&:dup))\n end", "def s_copy\n Vector3f.new(@x, @y)\n end", "def copy!(inputpoint)\n end", "def + (point) \r\n return Point2D.new(@x+point.x,@y+point.y) \r\n end", "def clone\n\t\t\n\t\tGeocoordinate.new(@latitude, @longitude)\n\t\t\n\tend", "def minus(point)\n delta_x = @x - point.x\n delta_y = @y - point.y\n Point.new(delta_x, delta_y)\n end", "def + (point)\n return Point2D.new(@x + point.x, @y + point.y)\n end", "def point( point )\n curve = self.sample( point.y )\n return curve.point( point.x )\n end", "def inverse(point)\r\n inverse!(point.dup)\r\n end", "def + point\n\t\tPoint.new(@x+point.x, @y+point.y)\n\tend", "def -(other_point)\n Point.new(self.x - other_point.x, self.y - other_point.y)\n end", "def get_real_point\n pos_x = x * get_scale(:x)\n pos_y = y * get_scale(:y)\n return Point.new(pos_x, pos_y) unless (has_layer?)\n real_point = get_layer.get_real_point\n return Point.new(\n (real_point.x + pos_x),\n (real_point.y + pos_y)\n )\n end", "def copy\n Ripple.new(@pos)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
checks whether the function is converging
def converging? # check the convergence in a given direction comparing the previous and current values def point_converged?(previous, current) pre = previous.value curr = current.value diff = (pre - curr).abs size = [pre.abs, curr.abs].max return !((diff <= (size * @relative_threshold)) and (diff <= @absolute_threshold)) end # returns true if converging is possible atleast in one direction if @iterations > 0 # given direction is converged converged = true 0.upto(@simplex.length - 1) do |i| converged &= !point_converged?(@previous[i], @simplex[i]) end return !converged end # if no iterations were done, convergence undefined return true end
[ "def converge_complete; end", "def converged?\n @iterations.last.converged?\n end", "def converge_complete\n end", "def converge\n transition_to(:converge)\n end", "def converge_failed(exception); end", "def is_training_finished(points)\n res = true\n points.each do | point|\n if point <= NUMBER_OF_CORRECTNESS\n res= false\n return false\n end\n end\n\n return true\nend", "def solvable?\n steps > 0\n end", "def converge?(a, b)\n difference = find_difference(a, b)\n\n a_runner = a\n b_runner = b\n\n if difference > 0\n difference.times do\n b_runner = b_runner.next\n end\n else\n (-difference).times do\n a_runner = a_runner.next\n end\n end\n\n until a_runner.nil?\n return true if a_runner == b_runner\n a_runner = a_runner.next\n b_runner = b_runner.next\n end\n\n false\nend", "def funded?\n input_value >= (output_value + fee_override)\n end", "def enough_iterations?\n iter % iter_interval == 0 if iter_interval\n end", "def failed_conformance_checking?\n did_fail_conformance_checking?\n end", "def enough_iterations?\n iter % iter_interval == 0 if iter_interval\n end", "def swing?(base_val, num_steps)\n base_val != 1.0 and num_steps != 0\nend", "def completed_conformance_checking?\n did_complete_conformance_checking?\n end", "def finished_iterations?\n return true unless running?\n return true if maximum_iterations and (iterations >= maximum_iterations)\n return false\n end", "def is_effective?(value)\n value % @step_value == 0\n end", "def converge_start(run_context); end", "def convergence\n variances.sum\n end", "def finite?() end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Build an initial simplex == Parameters: start_point: starting point of the minimization search
def build_simplex(start_point) n = start_point.length raise "dimension mismatch" if n != @start_configuration.length # set first vertex @simplex = Array.new(n+1) @simplex[0] = PointValuePair.new(start_point, Float::NAN) # set remaining vertices 0.upto(n - 1) do |i| conf_i = @start_configuration[i] vertex_i = Array.new(n) 0.upto(n - 1) do |k| vertex_i[k] = start_point[k] + conf_i[k] end @simplex[i + 1] = PointValuePair.new(vertex_i, Float::NAN) end end
[ "def build_simplex(start_point)\n n = start_point.length\n raise \"dimension mismatch\" if n != @start_configuration.length\n # set first vertex\n @simplex = Array.new(n+1)\n @simplex[0] = PointValuePair.new(move_into_bounds(start_point), Float::NAN)\n\n # set remaining vertices\n 0.upto(n - 1) do |i|\n conf_i = @start_configuration[i]\n vertex_i = Array.new(n)\n 0.upto(n - 1) do |k|\n vertex_i[k] = start_point[k] + conf_i[k]\n end\n @simplex[i + 1] = PointValuePair.new(move_into_bounds(vertex_i), Float::NAN)\n end\n end", "def build_simplex(start_point)\n n = start_point.length\n raise \"dimension mismatch\" if n != @start_configuration.length\n # set first vertex\n @simplex = Array.new(n+1)\n @simplex[0] = PointValuePair.new(start_point, Float::NAN)\n\n # set remaining vertices\n 0.upto(n - 1) do |i|\n conf_i = @start_configuration[i]\n vertex_i = Array.new(n)\n 0.upto(n - 1) do |k|\n vertex_i[k] = start_point[k] + conf_i[k]\n end\n @simplex[i + 1] = PointValuePair.new(vertex_i, Float::NAN)\n end\n end", "def initialize(start_point:, lower_bound: nil, upper_bound: nil, \n epsilon: 1e-6, max_iterations: 1e6, &f)\n @start_point = start_point\n @lower_bound = lower_bound\n @upper_bound = upper_bound\n\n @rho = 1.0 # Reflection coefficient\n @khi = 2.0 # Expansion coefficient\n @gamma = 0.5 # Contraction coefficient\n @sigma = 0.5 # Shrinkage coefficient\n\n @epsilon = epsilon\n @max_iterations = max_iterations\n @relative_threshold = 100 * @epsilon\n @absolute_threshold = @epsilon\n @x_minimum = nil\n @f_minimum = nil\n @f = f\n\n n = start_point.length\n # create and initialize start configurations\n if @start_configuration == nil\n # sets the start configuration point as unit\n self.start_configuration = Array.new(n) { 1.0 }\n end\n\n if lower_bound.nil? then\n @lower_bound = Array.new(n) { -Float::INFINITY }\n else\n raise \"Lower bound should be of the same length as the start point\" unless lower_bound.length == n\n @lower_bound = lower_bound \n end\n if upper_bound.nil? then\n @upper_bound = Array.new(n) { Float::INFINITY }\n else\n raise \"Upper bound should be of the same length as the start point\" unless upper_bound.length == n\n @upper_bound = upper_bound \n end\n 0.upto(n-1) do |i|\n raise \"Lower bounds should be smaller than upper bounds\" unless @lower_bound[i] < @upper_bound[i]\n end\n\n @iterations = 0\n @evaluations = 0\n # create the simplex for the first time\n build_simplex(start_point)\n evaluate_simplex\n end", "def initial_simplex(x1=ParameterSet.new(-4.0,-4.0),c=8)\n p= c/Math.sqrt(2) * (Math.sqrt(3)-1)/2\n q= ParameterSet.new(p,p)\n x2 = x1 + q + ParameterSet.new(1.0,0.0) * (c/Math.sqrt(2))\n x3 = x1 + q + ParameterSet.new(0.0,1.0) * (c/Math.sqrt(2))\n @simplex = [x1,x2,x3]\n end", "def start_at(point)\n\t\traise BuildError, \"Can't specify a start point more than once\" if first\n\t\tpush Point[point]\n\t end", "def init_dijkstra(start_town)\n temp_nodes = gp.make_graph\n temp_nodes.each do |name,node|\n node.min_dist = Float::INFINITY\n end\n temp_nodes[start_town].min_dist = 0\n queue = [].push temp_nodes[start_town]\n visited = []\n return temp_nodes,queue,visited\n end", "def generate_initial_condition\n initial_condition_elements = []\n (0...@parameters[:number_of_spatial_points]).each do |n|\n first_soliton_point = soliton_point(\n n, @parameters[:alpha1], @parameters[:spatial_offset1]\n )\n second_soliton_point = soliton_point(\n n, @parameters[:alpha2], @parameters[:spatial_offset2]\n )\n initial_condition_elements << (\n first_soliton_point + second_soliton_point\n )\n end\n @initial_condition = Vector.elements(initial_condition_elements)\n end", "def evaluate_simplex\n # evaluate the objective function at all non-evaluated simplex points\n 0.upto(@simplex.length - 1) do |i|\n vertex = @simplex[i]\n point = vertex.point\n if vertex.value.nan?\n @simplex[i] = PointValuePair.new(point, f(point))\n end\n end\n # sort the simplex from best to worst\n @simplex.sort!{ |x1, x2| x1.value <=> x2.value }\n end", "def evaluate_simplex\n # evaluate the objective function at all non-evaluated simplex points\n 0.upto(@simplex.length - 1) do |i|\n vertex = @simplex[i]\n point = vertex.point\n if vertex.value.nan?\n @simplex[i] = PointValuePair.new(point, f(point))\n end\n end\n # sort the simplex from best to worst\n @simplex.sort!{ |x1, x2| x1.value <=> x2.value }\n end", "def solve_start(first)\n starting_edge = Edge.new(first,first,0, 0)\n @queue.push(starting_edge)\n solve(first)\n end", "def generate_initial_condition\n initial_condition_elements = []\n \n num_wave_points = (\n (@parameters[:period]/2) / @parameters[:x_interval]\n ).to_i\n \n num_zero_points_before = \n (\n @parameters[:spatial_offset] * \n @parameters[:number_of_spatial_points]\n ).to_i\n \n num_zero_points_after = \n @parameters[:number_of_spatial_points] - \n num_wave_points - \n num_zero_points_before\n \n (0..num_zero_points_before).each do |n|\n initial_condition_elements << 0.0\n end\n \n (0...num_wave_points).each do |n|\n initial_condition_elements << sine_wave_point(\n n * @parameters[:x_interval]\n )\n end\n \n (0..num_zero_points_after).each do |n|\n initial_condition_elements << 0.0\n end\n \n @initial_condition = Vector.elements(initial_condition_elements)\n end", "def evaluate_simplex\n # evaluate the objective function at all non-evaluated simplex points\n @simplex.each_with_index do |v,i|\n @simplex[i].value = f(v.point) if v.value.nan?\n end\n # sort the simplex from best to worst\n @simplex.sort!{ |x1, x2| x1.value <=> x2.value }\n end", "def start_point\n o = st_start_point\n [o.y, o.x]\n end", "def shortest_path(start, finish)\n queue << [start, 0]\n loop do\n break if queue.empty?\n vertex, d = queue.pop\n graph[*vertex] = d\n break if vertex == finish\n enqueue_neighbours(*vertex, d + 1)\n end\n queue.clear\n !blank?(finish) ? build_path(start, finish) : []\n end", "def startpoint=(startPoint)\n @elementHash[:startpoint] = startPoint\n end", "def start_point\n postgis_calculate(:startpoint, self)\n end", "def initialize(repo,start,stepper)\n @repo=repo\n @stepper=stepper\n @start=start\n @visited=Set.new\n # a binding binding :start to each current value of start\n @solution=if @start.is_a? RDF::Query\n start.execute(@repo)\n elsif @start.is_a? Array\n start.map{|u|RDF::Solution.new(:end=>u)}\n elsif @start.is_a? RDF::URI\n [RDF::Query::Solution.new(:end=>@start)]\n else\n raise \"Should supply start as query or uriref or array of urirefs\"\n end\n end", "def generate_mst(source)\n @vertices[source].distance = 0\n self.distances(source)\n @vertices.each do |vert|\n @unoptimized_nodes << vert[0]\n end\n @unoptimized_nodes.delete_if{|x| x == source}\n while @unoptimized_nodes.length > 0\n self.step_forward\n end\n puts \"The edges for the MST are below:\"\n @vertices.each do |vert|\n edge1 = vert[1].node\n edge2 = vert[1].previous\n if edge2 != ''\n puts \"#{edge1} to #{edge2}\"\n end\n end\n end", "def spanning_forest(start, routine)\r\n predecessor = {}\r\n roots = []\r\n te = Proc.new {|e| predecessor[e.target] = e.source}\r\n rv = Proc.new {|v| roots << v}\r\n send routine, :start => start, :tree_edge => te, :root_vertex => rv\r\n [predecessor, roots]\r\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Evaluate all the nonevaluated points of the simplex
def evaluate_simplex # evaluate the objective function at all non-evaluated simplex points 0.upto(@simplex.length - 1) do |i| vertex = @simplex[i] point = vertex.point if vertex.value.nan? @simplex[i] = PointValuePair.new(point, f(point)) end end # sort the simplex from best to worst @simplex.sort!{ |x1, x2| x1.value <=> x2.value } end
[ "def evaluate_simplex\n # evaluate the objective function at all non-evaluated simplex points\n @simplex.each_with_index do |v,i|\n @simplex[i].value = f(v.point) if v.value.nan?\n end\n # sort the simplex from best to worst\n @simplex.sort!{ |x1, x2| x1.value <=> x2.value }\n end", "def evaluate_simplex\n # evaluate the objective function at all non-evaluated simplex points\n 0.upto(@simplex.length - 1) do |i|\n vertex = @simplex[i]\n point = vertex.point\n if vertex.value.nan?\n @simplex[i] = PointValuePair.new(point, f(point))\n end\n end\n # sort the simplex from best to worst\n @simplex.sort!{ |x1, x2| x1.value <=> x2.value }\n end", "def remove_unfeasible (points = [])\n if @rules then\n points.select!{ |pt|\n rules_checker(pt)\n }\n end\n return points\n end", "def calculate_points\n unless self.points\n self.points = PointCalculator.new(self.value, self.promotion).points\n end\n end", "def possible_points\n (self.evals.reduce(0) {|sum, eval| sum += eval.max_score.to_i; sum})\n end", "def accumulate_points(points)\n points.map{|point| point.value}.reduce(0, :+)\n end", "def eqs_for_free_points(p)\n return [] if p.free?\n q, r = first_point, second_point\n eqs = []\n uv = [:x,:y,:z] # uv stands for unused vars\n %w{x y z}.map(&:to_sym).each do |v|\n if 0 == (q.send(v).to_f - r.send(v).to_f)\n eqs << p.send(v.to_s+\"_to\", q) # <-- A bit different equation\n uv.delete(v)\n end\n end\n if uv.size == 3\n eq_for_non_free_points(p) # The equations are the same as the\n # non-free-points options\n elsif uv.size == 2\n eqs << eq_for_coordinates(p, *uv)\n end\n eqs\n end", "def evaluate\n\t\t#puts \"Neuron evaluated\"\n\t\ttotal = 0.0\n\t\t@inputs.each do |input|\n\t\t\ttotal = total + input.get_value\n\t\tend\n\t\ttotal = total - @bias\n\t\t\n\t\t#puts \"Node total was #{total}\"\n\t\t\n\t\t#Sign function implementation\n\t\t#if(total > 0.0)\n\t\t\t#output = 1.0\n\t\t#elsif (total < 0.0)\n\t\t\t#output = -1.0\n\t\t#else\n\t\t\t#output = 0.0\n\t\t#end\n\t\n\t\t#Linear function output\n\t\t\n\t\toutput = total\n\t\t\n\t\t\n\t\n\t\t@output_value = output\n\t\n\tend", "def negation_solution(x, y)\n negation_value(coords_of_square_neighbors(x, y), x, y) ||\n negation_value(coords_of_row_neighbors(x, y), x, y) ||\n negation_value(coords_of_column_neighbors(x, y), x, y)\n end", "def fitness\r\n @fitness = evaluate unless @fitness\r\n @fitness\r\n end", "def isAllWhite \n for x in 0..3\n for y in 0..3\n v = self.val(x,y) ;\n if(v == 1) \n return false ;\n end\n end\n end\n return true ;\n end", "def test_unbounded\n simplex = Simplex.new(\n [1, 1, 1],\n [\n [3, 1, -2],\n [4, 3, 0]\n ],\n [5, 7]\n )\n assert_raises Simplex::UnboundedProblem do\n simplex.solution\n end\n end", "def get_noncollinear_points(points)\n pt1 = points[0]\n pt2 = nil\n points.each { |pt|\n next if (pt1 == pt)\n pt2 = pt\n break\n }\n return unless pt2\n v1 = pt2 - pt1\n points.each { |pt|\n next if (pt1 == pt || pt2 == pt)\n pt3 = pt\n v2 = pt3 - pt1\n return [pt1, pt2, pt3] unless v2.parallel?(v1)\n }\n nil\n end", "def unassigned_evaluate_proposals\n self.evaluate_proposal.unassigned # use a StonePath provided named scopes\n end", "def empty_neighbours point\n neighbours(point).select do |(x, y)|\n at(x, y) < 0\n end\n end", "def sor(n, a, b, x0, w, error, n_max)\n n = n - 1\n\n x = Array.new(n + 1)\n for k in (0..n_max)\n sumatoria = (1..n).inject(0) { |sum, j| sum + a[0][j] * x0[j] }\n x[0] = (1 - w) * x0[0] + w * (b[0] - sumatoria).fdiv(a[0][0])\n\n (1..n - 1).each do |i|\n sumatoria_1 = (0..i - 1).inject(0) { |sum, j| sum + a[i][j] * x[j] }\n sumatoria_2 = (i + 1..n).inject(0) { |sum, j| sum + a[i][j] * x0[j] }\n x[i] = (1 - w) * x0[i] + w * (b[i] - sumatoria_1 - sumatoria_2).fdiv(a[i][i])\n end\n\n sumatoria = (0..n - 1).inject(0) { |sum, j| sum + a[n][j] * x[j] }\n x[n] = (1 - w) * x0[n] + w * (b[n] - sumatoria).fdiv(a[n][n])\n\n resta = x.map.with_index { |xi, i| xi - x0[i] }\n modulo = Math::sqrt(resta.inject(0) { |sum, i| sum + i ** 2 })\n if modulo < error\n puts \"Una solucion aproximada es X = #{x}.\"\n return x\n end\n\n x0.replace(x)\n end\n\n puts \"Se alcanzo el numero maximo de iteraciones n_max pero no la tolerancia.\"\nend", "def compute_values \n arr = inputs_matrix.map{|e| @evaluator.call(*e)}\n matrix = NArray[arr].reshape(@width, @height)\n end", "def update_pis\n new_pis = []\n 20.times {|i|\n res = 0\n @ys.each_with_index {|yt, t|\n res += (yt * @xs[t][i] * @pis[i]) / compute_noisy_or(t)\n }\n new_pis << res / @tis[i].to_f\n }\n\n return new_pis\nend", "def unionXWithFunction_PiecewiseLinear(iOtherFunction)\n lPoints = @Function[:Points]\n lOtherPoints = iOtherFunction.function_data[:Points]\n # Get all the abscisses sorted\n lXList = (lPoints.map { |iPoint| next iPoint[0] } + lOtherPoints.map { |iPoint| next iPoint[0] }).sort.uniq\n # Read segments abscisse by abscisse\n lIdxSegment = 0\n lIdxOtherSegment = 0\n lXList.each do |iX|\n if (lPoints[lIdxSegment] == nil)\n # No abscisse on lPoints for this iX\n # Forcefully we have lOtherPoints[lIdxOtherSegment][0] == iX\n yield(iX, nil, lOtherPoints[lIdxOtherSegment][1])\n lIdxOtherSegment += 1\n elsif (lOtherPoints[lIdxOtherSegment] == nil)\n # No abscisse on lOtherPoints for this iX\n # Forcefully we have lPoints[lIdxSegment][0] == iX\n yield(iX, lPoints[lIdxSegment][1], nil)\n lIdxSegment += 1\n elsif (lPoints[lIdxSegment][0] == iX)\n # lPoints has this abscisse\n if (lOtherPoints[lIdxOtherSegment][0] == iX)\n # If both functions have a point here, it's easy.\n yield(iX, lPoints[lIdxSegment][1], lOtherPoints[lIdxOtherSegment][1])\n lIdxOtherSegment += 1\n else\n # Compute the Y value for the other function\n yield(iX, lPoints[lIdxSegment][1], lOtherPoints[lIdxOtherSegment-1][1] + ((lOtherPoints[lIdxOtherSegment][1] - lOtherPoints[lIdxOtherSegment-1][1])*(iX - lOtherPoints[lIdxOtherSegment-1][0]))/(lOtherPoints[lIdxOtherSegment][0] - lOtherPoints[lIdxOtherSegment-1][0]))\n end\n lIdxSegment += 1\n else\n # We have forcefully lOtherPoints[lIdxOtherSegment][0] == iX\n # Compute the Y value for this function\n yield(iX, lPoints[lIdxSegment-1][1] + ((lPoints[lIdxSegment][1] - lPoints[lIdxSegment-1][1])*(iX - lPoints[lIdxSegment-1][0]))/(lPoints[lIdxSegment][0] - lPoints[lIdxSegment-1][0]), lOtherPoints[lIdxOtherSegment][1])\n lIdxOtherSegment += 1\n end\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Replace the worst point of the simplex by a new point == Parameters: point_value_pair: point to insert
def replace_worst_point(point_value_pair) n = @simplex.length - 1 0.upto(n - 1) do |i| if (compare(@simplex[i], point_value_pair) > 0) point_value_pair, @simplex[i] = @simplex[i], point_value_pair end end @simplex[n] = point_value_pair end
[ "def replace_worst_point(point_value_pair)\n n = @simplex.length - 1\n 0.upto(n - 1) do |i|\n if (compare(@simplex[i], point_value_pair) > 0)\n point_value_pair, @simplex[i] = @simplex[i], point_value_pair\n end\n end\n @simplex[n] = point_value_pair\n end", "def add_to_point point\n add_to_point! point.dup\n end", "def insertViaPointLast(viaPoint)\n insertViaPoint(viaPoint, -1) ;\n end", "def set_point_in_space(point, space, value)\n if point.length > 1\n set_point_in_space(point.drop(1), space[point.first], value)\n else\n space[point.first] = value\n end\n end", "def evaluate_simplex\n # evaluate the objective function at all non-evaluated simplex points\n 0.upto(@simplex.length - 1) do |i|\n vertex = @simplex[i]\n point = vertex.point\n if vertex.value.nan?\n @simplex[i] = PointValuePair.new(point, f(point))\n end\n end\n # sort the simplex from best to worst\n @simplex.sort!{ |x1, x2| x1.value <=> x2.value }\n end", "def evaluate_simplex\n # evaluate the objective function at all non-evaluated simplex points\n 0.upto(@simplex.length - 1) do |i|\n vertex = @simplex[i]\n point = vertex.point\n if vertex.value.nan?\n @simplex[i] = PointValuePair.new(point, f(point))\n end\n end\n # sort the simplex from best to worst\n @simplex.sort!{ |x1, x2| x1.value <=> x2.value }\n end", "def insert2pool offspring, iter\n\t\tfit_score = fitness offspring\n\t\t#first find the correct positon\n\t\tidx = @pool.index { |f| f[:fitness] < fit_score }\n\t\tidx = idx.nil? ? -1 : idx #new fitness smaller than all, add at the end\n\t\t@pool.insert( idx, { expr: offspring, fitness: fit_score, generation: iter } )\n\tend", "def insertViaPoint(viaPoint, nth = 0)\n if(nth < 0) then\n if(@viaPointList.size + nth + 1 < @viaPointIndex) ; then\n nth = @viaPointIndex - @viaPointList.size - 1;\n end\n @viaPointList.insert(nth, viaPoint) ;\n else\n if(@viaPointIndex + nth > @viaPointList.size) then\n nth = @viaPointList.size - @viaPointIndex ;\n end\n @viaPointList.insert(@viaPointIndex + nth, viaPoint) ;\n end\n end", "def insert_point(point)\n if @value.nil? then\n # Empty tree\n @value = point\n return\n end\n\n # Decide which side of the hyperplane this point goes on \n # (for points lying on the hyperplane, the side is arbitrary).\n # After that, try to put the point there, or if it's already\n # occupied by another tree, try to put it into that tree.\n # TODO: You could use the first-three-dimensions compare here to \n # potentially speed things up\n if point[@axis] >= @value[@axis] then\n if @right.nil? then\n @right = TreeKD.new([point], @dimension, @axis + 1)\n else\n @right.insert_point(point) \n end\n else\n if @left.nil? then\n @left = TreeKD.new([point], @dimension, @axis + 1)\n else\n @left.insert_point(point)\n end\n end\n end", "def insert_at(points)\n for i in 0...points\n @multi_array[@points_array[i][0]][@points_array[i][1]] = 1 # insert 1 at specified position\n end\n end", "def insert_point(point)\n if @value.nil?\n # Empty tree\n @value = point\n return\n end\n\n # Decide which side of the hyperplane this point goes on\n # (for points lying on the hyperplane, the side is arbitrary).\n # After that, try to put the point there, or if it's already\n # occupied by another tree, try to put it into that tree.\n # TODO: You could use the first-three-dimensions compare here to\n # potentially speed things up\n if point[@axis] >= @value[@axis]\n if @right.nil?\n @right = KDTree.new([point], @dimension, @axis + 1)\n else\n @right.insert_point(point)\n end\n else\n if @left.nil?\n @left = KDTree.new([point], @dimension, @axis + 1)\n else\n @left.insert_point(point)\n end\n end\n end", "def remove_noise_abscisses(iMinDistance)\n case @Function[:FunctionType]\n when FCTTYPE_PIECEWISE_LINEAR\n lNewPoints = [ @Function[:Points][0] ]\n lIdxPoint = 0\n while (lIdxPoint < @Function[:Points].size - 1)\n # Now we skip the next last point among iMinDistance range\n lPointX = @Function[:Points][lIdxPoint][0]\n lIdxOtherPoint = lIdxPoint + 1\n while ((lIdxOtherPoint < @Function[:Points].size) and\n (@Function[:Points][lIdxOtherPoint][0] - lPointX < iMinDistance))\n lIdxOtherPoint += 1\n end\n # Either lIdxOtherPoint is beyond the end, or it points to the first point that is beyond iMinDistance\n # We add the previous point if it is not already ours\n if (lIdxOtherPoint-1 > lIdxPoint)\n lNewPoints << @Function[:Points][lIdxOtherPoint-1]\n # And we continue searching from this new added point\n lIdxPoint = lIdxOtherPoint-1\n else\n # It is our point, continue on to the next one\n lNewPoints << @Function[:Points][lIdxOtherPoint]\n lIdxPoint = lIdxOtherPoint\n end\n end\n @Function[:Points] = lNewPoints\n else\n log_err \"Unknown function type: #{@Function[:FunctionType]}\"\n end\n optimize\n end", "def ordered_insert(value)\n if @coordinates.empty?\n @coordinates << value\n else\n idx = @coordinates.find_index { |x| x[:latitude] > value[:latitude] && x[:longitude] > value[:longitude] }\n if idx.nil?\n @coordinates << value\n else\n temp = @coordinates.slice!(idx, @coordinates.length - idx)\n @coordinates << value\n @coordinates.concat temp\n end\n end\n end", "def update_closest(array, city, new_dist)\n if !array.include?([city, new_dist])\n insert_point = binary_insert(array, new_dist)\n array.insert(insert_point,[city, new_dist])\n end\nend", "def put_point(p)\n process_list.push(p)\n sample_points.push(p) \n grid[grid_coordinates(p)] = p\n end", "def insert_piece(piece,x,y) \n piece.points.each { |point| \n #puts \"Inserting #{point[0]},#{point[1]} to #{x},#{y}\" \n @pole[x+point[0]][y-point[1]] = piece.name\n }\n end", "def evaluate_simplex\n # evaluate the objective function at all non-evaluated simplex points\n @simplex.each_with_index do |v,i|\n @simplex[i].value = f(v.point) if v.value.nan?\n end\n # sort the simplex from best to worst\n @simplex.sort!{ |x1, x2| x1.value <=> x2.value }\n end", "def find_and_replace(list,user,score,k)\n\t\tto_insert=OpenStruct.new(:user=>user,:score=>score)\n\t\tif list.length<k\n\t\t\tlist.push(to_insert)\n\t\telse\n\t\t\tminium=list[0]\n\t\t\tlist.each_index do |i|\n\t\t\t\tif list[i].score<to_insert.score #swap the insert one with current element\n\t\t\t\t\ttemp=list[i]\n\t\t\t\t\tlist[i]=to_insert\n\t\t\t\t\tto_insert=temp\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\tend", "def insertion_point=(point)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
loads structure file csv, returns structure as array
def loadStructure(file) csv_text = File.read(file) return CSV.parse(csv_text, :headers => true) end
[ "def load_csv\n csv = CSV.read(@csv_file)\n @schema = csv.shift[3..-1]\n return csv\n end", "def convert_csv_file_to_object\n begin\n CSV.foreach(@file_name) do |row|\n @csv_object.push(row)\n end \n rescue => exception\n raise FileReadError, exception\n end\n end", "def parse_to_load_file(csv)\n csv.each_with_index do |student, index|\n student = {month: csv[index][0] , name: csv[index][1], city: csv[index][2], hobby: csv[index][3]}\n @students << student\n end\nend", "def get\n arr = []\n\n process(csv_file).each do |record|\n arr << SOA_CSV_RECORD.new(*record.fields)\n end\n\n arr\n end", "def parse_csv(file_path)\n record_list = []\n header = []\n is_header = true\n \n CSV.foreach(file_path) do |row|\n if (is_header)\n\theader = row\n\tis_header = false\n else\n\trecord = Record.new\n\trecord.create(header, row)\n\trecord_list.push(record)\n end\n end\n return record_list\n end", "def mat_from_csv(path)\n arr_of_arrs = CSV.read(path)\n return arr_of_arrs\nend", "def read\n raw = super or return []\n rows = ::CSV.parse(raw.to_s)\n headers = rows.shift or raise Error::File, \"No CSV at #{@path}\"\n rows.map{|row| Utility.array_to_hash(row, headers) }\n end", "def parse_csv\n #pre\n assert @fileName.end_with? \".csv\"\n assert @fileName.is_a? String\n assert FileTest.exist? @fileName\n\n objList = CSV.read(@fileName, converters: :numeric) #converts to numerics\n\n #post\n assert objList[0].is_a? Array\n assert objList[0].each { |a| assert a.is_a? Numeric }\n return objList[0] #grab 0 because we want to omit other rows\n end", "def csv_to_array(csv_name)\n\tCSV.read(csv_name)\nend", "def parse\n csv_file_parsable? unless @csv # make sure we have parsed it even if #valid? wasn't \n begin\n Order.transaction do\n @csv.map(&:to_hash).each do |h|\n order = ImportedOrder.create!(:external_key => h[\"Order #\"], :processed_by => @import.processed_by)\n populate_from_import(order, h)\n @import.imported_orders << order\n end\n end\n rescue ImportedOrder::MissingDataError => e\n import.errors.add(:base, e.message)\n return nil\n end\n end", "def array_from_csv(csv, opts = OPTS)\n CSV.parse(csv, process_csv_serializer_opts(opts)).map do |row|\n row = row.to_hash\n row.delete(nil)\n new(row)\n end\n end", "def get_csv_data_from_file(path)\n text = read_file(path)\n if text == nil\n return nil\n end\n data = Array.new\n text.each_line.each_with_index do |line, i|\n if i > 0\n tmp = line.split(\",\")\n data.push(Struct::Ohlc_data.new(tmp[0].strip, tmp[1].strip.to_f, tmp[2].strip.to_f, tmp[3].strip.to_f, tmp[4].strip.to_f, nil, nil))\n end\n end\n data.sort {|a, b| a[:date] <=> b[:date]}\n return data\nend", "def parse_csv\n require 'csv'\n\n # skip blank rows \n # also skip the row if first three columns are empty? \n # this removes bottom total page_count row.\n # using .all? for first three column row.to_hash.values[0..2].all? \t\n # blank? is supported only in Rails, not in Ruby\n @csv = CSV.parse(book_plan, :headers=> true, :skip_blanks=> true).delete_if do |row| \n row.to_hash.values[0..2].all?{|col| col.to_s.blank?}\n end\n\t\t\n @parts = []\n @csv.each_with_index do |row, i|\n if row[PART_INDEX]\n @parts << @current_part unless i == 0\n @current_part = []\n @current_part << row\n else\n @current_part << row\n end\n end\n @parts << @current_part # insert last part\n @parts.each do |part|\n create_part_nodes(part)\n end\n end", "def parse_quotes_csv_file\n csv_data = CSV.read(\"db/quotes_for_generator.csv\")\n #get rid of tthe column names from the file\n csv_data.shift\n\n quote_object_array = []\n csv_data.each do |quote_row_arr|\n quote_object = {\n :content => quote_row_arr[0],\n :author => quote_row_arr[1],\n }\n quote_object_array.push(quote_object) \n end\n quote_object_array.flatten\nend", "def load_csv_data_stream(csv_file, project, site)\n csv_data = CSV.read(csv_file)\n path = File.dirname(csv_file)\n\n errors = validate_csv_data(csv_data)\n\n all_objects = []\n if errors.empty?\n attr_names = csv_data[0].map{|name| name.tableize.singularize.gsub(' ', '_') }\n attr_names = attr_names.map {|name| name.eql?(\"yogo_id\") ? \"yogo_id\" : \"yogo__#{name}\" }\n props = attr_names.map {|name| properties[name] }\n\n csv_data[3..-1].each_index do |idx|\n line = csv_data[idx+3]\n line_data = Hash.new\n if !line.empty? #ignore blank lines\n csv_data[0].each_index do |i|\n prop = props[i]\n\n if prop.type == DataMapper::Types::YogoFile || prop.type == DataMapper::Types::YogoImage\n column_value = File.open(File.join(path, line[i]))\n line_data[attr_names[i]] = column_value unless column_value.nil? || prop.nil?\n else\n line_data[attr_names[i]] = prop.typecast(line[i]) unless line[i].nil? || prop.nil?\n end\n end\n obj = self.new(line_data)\n if obj.valid?\n all_objects << obj\n else\n obj.errors.each_pair do |key,value|\n value.each do |msg|\n errors << \"Line #{idx+3} column #{key.to_s.gsub(\"yogo__\", '')} #{msg.split[2..-1].join}\"\n end\n end\n end\n end\n end\n end\n all_objects.each{|o| o.save } if errors.empty?\n return errors\n end", "def people\n CSV.foreach(@nombre_archivo) do |row|\n @arr_read << Person.new(row[0],row[1],row[2],row[3],row[4])\n end\n @arr_read\n end", "def parse_csv(path)\n puts \"parse csv\"\n FasterCSV.read(path, :headers => true)\n end", "def data\n data = []\n CSV.foreach(csv, quote_char: '\"', col_sep: ';', row_sep: :auto) do |row|\n data << Country.new(row[1], row[2], row[5])\n end\n data\n end", "def loadCSV\n csvFile = File.open(\"app/assets/csv/test.csv\", \"r\") #Open file with readpermissions\n if csvFile #if file was successfully opened \n csvRowArray = IO.readlines(csvFile) # Turn each row into an array element\n rowId=1 #0 is the Header Row, 1 is the first dataset.\n recordsArray = Array.new(csvRowArray.size-1)\n while csvRowArray[rowId] do #for each row that exists \n rowEntry = csvRowArray[rowId]\n rowEntry.gsub!(/\"/,'') # Remove all the '\"'s\n wordArr = rowEntry.split(\",\") #Split the array on ','s into a new array \n newRecord = Record.new\n newRecord.REF_DATE = wordArr[0]\n newRecord.GEO = wordArr[1]\n newRecord.DGUID = wordArr[2]\n newRecord.Sex = wordArr[3]\n newRecord.Age_group = wordArr[4]\n newRecord.Student_response = wordArr[5]\n newRecord.UOM = wordArr[6]\n newRecord.UOM_ID = wordArr[7]\n newRecord.SCALAR_FACTOR = wordArr[8]\n newRecord.SCALAR_ID = wordArr[9]\n newRecord.VECTOR = wordArr[10]\n newRecord.COORDINATE = wordArr[11]\n newRecord.VALUE = wordArr[12]\n newRecord.STATUS = wordArr[13]\n newRecord.SYMBOL = wordArr[14]\n newRecord.TERMINATED = wordArr[15]\n newRecord.DECIMALS = wordArr[16]\n newRecord.save\n puts rowId\n rowId = rowId+1 \n end\n return recordsArray\n else #file not opened\n puts \"Unable to open file\" \n return \n end \n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Records the original rvm environment and sets up a new gemset.
def setup_rvm @@rvm_original_env ||= RVM.current.expanded_name @@env = RVM::Environment.current @@env.gemset_create(app_name) @@env.gemset_use!(app_name) end
[ "def reset_rvm\n @@env.use!(@@rvm_original_env)\n end", "def update_installed_gemsets(rubie)\n env = RVM::Environment.new\n env.use rubie\n\n @installed_gemsets ||= {}\n @installed_gemsets[rubie] = env.gemset_list\n @installed_gemsets[rubie]\nend", "def update_installed_gemsets(rubie)\n original_rubie = @rvm_env.environment_name\n @rvm_env.use rubie\n\n @installed_gemsets ||= {}\n @installed_gemsets[rubie] = @rvm_env.gemset_list\n @rvm_env.use original_rubie if original_rubie != rubie\n @installed_gemsets[rubie]\n end", "def setup_rubies\n rvm_loaded? ? true : raise(RuntimeError, \"RVM library not loaded.\", caller)\n @all_rubies_gemsets ||= all_rubies_gemsets(@root_path)\n @all_rubies_gemsets.keys.each do |rubie|\n if RVM.list_strings.include?(rubie)\n puts \"info: Rubie #{rubie} already installed.\"\n else\n with_rvm_environment_vars do\n install_rubie(rubie)\n end\n end\n RVM.alias_create(current_ruby_info(rubie)[0][:ruby_alias], \"#{rubie}\") unless rubie == current_ruby_info(rubie)[0][:ruby_alias]\n end\n end", "def run_in_gemset(command)\n run \"rvm #{RUBY_GEMSET_NAME} -S #{command}\"\nend", "def check_and_set_environment\n check_env\n set_env\n end", "def gemset_use(gemset, options = {})\n replace_env = options.delete(:replace_env)\n result = rvm(:gemset, :use, gemset, options)\n if result.successful?\n gemset_name = result[:rvm_gemset_name]\n @environment_name = self.class.environment_with_gemset(@environment_name, gemset_name)\n @expanded_name = nil\n self.class.reset_current!\n use_env_from_result! result if replace_env\n true\n end\n end", "def create_gemsets\n each_ruby do |rubie|\n all_gemsets(@root_path)[rubie][:gemsets].each do |hsh|\n ::RVM.gemset.create([hsh[:gemset_name]])\n end\n end\n end", "def update_rvm_gemset( deps )\n\t\ttmp = Tempfile.new( 'gemset' )\n\t\tdeps.keys.each {|dep| deps[dep.name] = deps.delete(dep) }\n\n\t\tRVM_GEMSET.each_line do |line|\n\t\t\tif line =~ /^\\s*(#|$)/\n\t\t\t\ttmp.print( line )\n\t\t\telse\n\t\t\t\tgem, version = line.split( /\\s+/, 2 )\n\n\t\t\t\tif (( newer = deps.delete(gem) ))\n\t\t\t\t\ttmp.puts( gem + ' -v' + newer.to_s )\n\t\t\t\telse\n\t\t\t\t\ttmp.print( line )\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\n\t\tdeps.each do |gem, newer|\n\t\t\tnext unless newer\n\t\t\ttmp.puts( gem + ' -v' + newer.to_s )\n\t\tend\n\n\t\ttmp.close\n\n\t\tFileUtils.cp( tmp.path, RVM_GEMSET, :verbose => true )\n\tend", "def setting_gemset_for_each_ruby(rvm_gemset)\n @rubies = if @rubies.respond_to?(:split)\n @rubies.split(',').collect { |ruby| ruby << \"@#{gemset}\" }.join(',')\n end\n end", "def env_override=(new_data)\n reset\n @env_override = VividMash.new(self, new_data)\n end", "def source_rvm_environment\n rvm_path = config_value_for(:rvm_path, self.class.default_rvm_path, false)\n actual_config = defined_config.merge('rvm_path' => rvm_path)\n config = []\n actual_config.each_pair do |k, v|\n config << \"#{k}=#{escape_argument(v.to_s)}\"\n end\n run_silently \"export #{config.join(\" \")}\"\n run_silently :source, File.join(rvm_path, \"scripts\", \"rvm\")\n end", "def set_environment; self.environment = Rails.env; end", "def with_environment\n old_gem_home = ENV[\"GEM_HOME\"]\n old_gem_path = ENV[\"GEM_PATH\"]\n ENV[\"GEM_HOME\"] = @gem_home\n ENV[\"GEM_PATH\"] = @gem_home\n @logger.debug(\"Set GEM_* to: #{ENV[\"GEM_HOME\"]}\")\n\n # Clear paths so that it reads the new GEM_HOME setting\n Gem.paths = ENV\n\n # Set a custom configuration to avoid loading ~/.gemrc loads and\n # /etc/gemrc and so on.\n old_config = nil\n begin\n old_config = Gem.configuration\n rescue Psych::SyntaxError\n # Just ignore this. This means that the \".gemrc\" file has\n # an invalid syntax and can't be loaded. We don't care, because\n # when we set Gem.configuration to nil later, it'll force a reload\n # if it is needed.\n end\n Gem.configuration = NilGemConfig.new\n\n # Clear the sources so that installation uses custom sources\n old_sources = Gem.sources\n Gem.sources = Gem.default_sources\n Vagrant::Bundler::DEFAULT_GEM_SOURCES.each do |source|\n if !Gem.sources.include?(source)\n Gem.sources << source\n end\n end\n\n # Use a silent UI so that we have no output\n Gem::DefaultUserInteraction.use_ui(Gem::SilentUI.new) do\n return yield\n end\n ensure\n # Restore the old GEM_* settings\n ENV[\"GEM_HOME\"] = old_gem_home\n ENV[\"GEM_PATH\"] = old_gem_path\n\n # Reset everything\n Gem.configuration = old_config\n Gem.paths = ENV\n Gem.sources = old_sources.to_a\n end", "def with_rvm_installed\n with_env('HOME' => \"#{__dir__}/fixtures/fake_rvm_home\") do\n yield\n end\n end", "def restore_git_system_env_variables; end", "def update_installed_rubies\n @installed_rubies = RVM.list_strings\n @installed_rubies\nend", "def rvm_gemset\n File.basename(`rvm current`.strip)\n end", "def record_environment\n return if down?\n\n @internal_metadata[:environment] = connection.pool.db_config.env_name\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Reverts to the original rvm environment
def reset_rvm @@env.use!(@@rvm_original_env) end
[ "def reset_current!\n Environment.reset_current!\n end", "def restore_env(env)\n @src, @tree, @block_ial, @stack, @text_type = *env\n end", "def restore\n @backend.restore\n setup_container_name\n setup_family\n end", "def reset(mode='soft')\n command = 'reset'\n vm_command = %Q{#{@base_command} #{command} \"#{@datastore}\" #{mode}}\n log vm_command\n result = system(vm_command)\n result ? log(\"VM has been resetted.\") : log(\"Error! VM could not be reset.\")\n result\n end", "def reset_env\n clear_env\n set_env self.env_defaults\n end", "def setup_rvm\n @@rvm_original_env ||= RVM.current.expanded_name\n\n @@env = RVM::Environment.current\n @@env.gemset_create(app_name)\n @@env.gemset_use!(app_name)\n end", "def reset\n send_to_vm('system_reset')\n end", "def reset_vm()\n @log.debug \"Reseting vm for next test\"\n vmm_command(eb(@config['vmm']['loadteststate']))\n # Give it a half a tic to reset...\n sleep 0.5\n end", "def reset\n @instance = nil\n @local_env = nil\n end", "def safe_restore(old_env)\n with_retry do\n @mutex.synchronize do\n old_env.each { |key, val| val ? ENV[key] = val : ENV.delete(key) }\n end\n end\n end", "def reboot_fusion_vm(options)\n reset_fusion_vm(options)\nend", "def recreateVM(vm)\n # Call the service controller to delete the VM and update the master\n paramfile = $WORKDIR + \"/envconfig.\" + @name\n execString = $SVCBIN + \" -c \" + paramfile + \" -v \" + vm.vmid + \" -x \" + @envid \n childpid=Kernel.fork\n if childpid == nil\n $stdout.reopen($ENVLOGFILE + \".#{@envid}\" ,'a+')\n $stderr.reopen($ENVLOGFILE + \".#{@envid}\",'a+')\n exec(execString)\n end\n jid = $jobManager.register(childpid, @envid, \"RECREATE\", @name, \"RUNNING\", 0)\n return jid \n end", "def restore_memory\n @restore_memory\n end", "def hard_reset\n update(machine: nil, state: 'dirty')\n end", "def restore\n backend_class.instance_set(@saved_backend_instance)\n if ::Specinfra.configuration.backend != @saved_backend_name\n backend_class.host_reset\n ::Specinfra.configuration.backend = @saved_backend_name\n end\n end", "def restore_environment(env)\n env[:sig].each do |signal, handler|\n trap signal, handler\n end\n unless env[:tty].nil?\n `stty #{env[:tty]}`\n end\n @interact = false\n end", "def undo_bundler\n clean_env = nil\n Bundler.with_clean_env do\n clean_env = ENV.to_hash\n end\n ENV.replace(clean_env)\n end", "def restore\r\n @status = @stack.shift if @stack.size > 0\r\n end", "def restore_home\n FileUtils.rm_rf(new_home_directory)\n \tENV['HOME'] = @original_home\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns the rbenv ruby version
def rbenv_ruby `rbenv version`.split(' ').first end
[ "def ruby_version\n ENV[\"RUBY_VERSION\"]\n end", "def ruby_version\n if ruby = ENV['RUBY']\n File.basename(ruby)\n else\n RUBY_VERSION\n end\n end", "def determine_rbenv_ruby_version_if_not_given\n if node[:rbenv_passenger][:rbenv_ruby].nil?\n ruby_version = node[:rbenv][:global]\n \n node.set[:rbenv_passenger][:rbenv_ruby] = ruby_version\n Chef::Log.debug(%{Setting node[:rbenv_passenger][:rbenv_ruby] = } + %{\"#{node[:rbenv_passenger][:rbenv_ruby]}\"})\n end\n end", "def rbenv(c, str)\n c.env['RBENV_VERSION'] = c[:ruby]\n\n # Strip out any rbenv version bin path. Assumes rbenv is\n # installed in .rbenv. This is necessary because if we run\n # console-mux via an rbenv shim, that shim first tacks on\n # the bin path to the in-use ruby version prior to exec'ing\n # the actual console-mux... But we don't want that path to\n # follow down to sub processes spawned by console-mux.\n c.env['PATH'] =\n ENV['PATH'].\n split(File::PATH_SEPARATOR).\n reject{|p| p =~ %r{.rbenv/versions}}.\n join(File::PATH_SEPARATOR)\n \n str\n end", "def ruby_version; end", "def ruby_version\n\t\treturn RUBY_VERSION.split( /\\./, 3 ).map( &:to_i ).pack( 'n*' )\n\tend", "def ruby_version\n if ruby = ENV['RUBY_ROOT']\n File.basename(ruby)\n else\n RUBY_VERSION\n end\n end", "def ruby_version\n @ruby_version ||= begin\n version_ = parse(::RUBY_VERSION, :standard)\n if version_.release_type == :final\n version_ = version_.change({:patchlevel => ::RUBY_PATCHLEVEL},\n :patchlevel_required => true, :patchlevel_delim => '-p')\n end\n version_\n end\n end", "def current_ruby_version\n with_rvm? ? full_ruby_version(@ruby_string) : shell(\"ruby -v\")\n end", "def python_version\n return `python --version 2>&1`.split(/([\\d\\.]+)/)[1]\n end", "def ruby_versions\n if jruby?\n [ \"JRuby #{JRUBY_VERSION}\", \"like Ruby #{RUBY_VERSION}\" ]\n else\n [ \"Ruby #{RUBY_VERSION}\" ]\n end\n end", "def rvm_ruby\n @@env.expanded_name.match(/([\\w\\-\\.]+)/)[1]\n end", "def ruby\n @ruby ||= \\\n begin\n rb = runtime['executable']\n pattern = Regexp.new(Regexp.quote(runtime['version']))\n output = StagingPlugin.get_ruby_version(rb)\n if $? == 0 && output.strip =~ pattern\n rb\n elsif \"#{RUBY_VERSION}p#{RUBY_PATCHLEVEL}\" =~ pattern\n current_ruby\n else\n puts \"No suitable runtime found. Needs version matching #{runtime['version']}\"\n exit 1\n end\n end\n end", "def getRubyVersion(fn)\n if fn.length > 1\n puts \".ruby-version: #{File.open(fn).gets.chop}. Version actually running is in purple area above. Can't tell if ruby is selected by #{fn} or TM Preferences. At this time rbenv isn't functioning for TM. 2013.11.23\"\n else\n puts \"fn: #{fn} isn't in this folder. Need to change the script to go until finds one.\"\n end\nend", "def version_from_ruby_version_file\n shell_return = run_shell(\"cat .ruby-version\")\n shell_return.nil? ? nil : shell_return.stdout\n end", "def rbenv_installed?\n @rbenv_installed ||= `which rbenv`.present?\n end", "def ruby_version\n @ruby_version ||= begin\n version = @options['ruby_version'] || RUBY_VERSION\n normalize_version version\n end\n end", "def target_ruby_version; end", "def rbenv_installed?\n !which('rbenv').nil?\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns the RVM ruby version
def rvm_ruby @@env.expanded_name.match(/([\w\-\.]+)/)[1] end
[ "def ruby_version\n default = %x{ rvm current}.strip\n items = %x{ rvm ls strings }.split.compact\n\n ruby = menu_with_default \"RVM Ruby version to use for deployment:\", items,default\n ruby = ask \"Enter alternative RVM Ruby string: \" if ruby =~ /Other/\n ruby\n end", "def current_ruby_version\n with_rvm? ? full_ruby_version(@ruby_string) : shell(\"ruby -v\")\n end", "def ruby_version\n ENV[\"RUBY_VERSION\"]\n end", "def current_ruby_default\n RVM.list_default\nend", "def vm\n # RUBY_ENGINE_VERSION returns the VM version, which\n # will differ from RUBY_VERSION for non-mri VMs.\n if defined?(RUBY_ENGINE_VERSION)\n \"#{RUBY_ENGINE}-#{RUBY_ENGINE_VERSION}\"\n else\n # Ruby < 2.3 doesn't support RUBY_ENGINE_VERSION\n \"#{RUBY_ENGINE}-#{RUBY_VERSION}\"\n end\n end", "def vm\n # RUBY_ENGINE_VERSION returns the VM version, which\n # will differ from RUBY_VERSION for non-mri VMs.\n if defined?(RUBY_ENGINE_VERSION)\n \"#{RUBY_ENGINE}-#{RUBY_ENGINE_VERSION}\"\n else\n # Ruby < 2.3 doesn't support RUBY_ENGINE_VERSION\n \"#{RUBY_ENGINE}-#{RUBY_VERSION}\"\n end\n end", "def rvmrc_ruby_name\n extract_rubie(rvmrc)\n end", "def ruby_version\n if ruby = ENV['RUBY']\n File.basename(ruby)\n else\n RUBY_VERSION\n end\n end", "def ruby_version\n\t\treturn RUBY_VERSION.split( /\\./, 3 ).map( &:to_i ).pack( 'n*' )\n\tend", "def ruby_version; end", "def perl_version\n `perl --version 2>&1`.match(/\\(v([\\d\\.]+)\\)/)[1]\n end", "def ruby_version\n @ruby_version ||= begin\n version = @options['ruby_version'] || RUBY_VERSION\n normalize_version version\n end\n end", "def version_from_ruby_version_file\n shell_return = run_shell(\"cat .ruby-version\")\n shell_return.nil? ? nil : shell_return.stdout\n end", "def ruby_version\n if ruby = ENV['RUBY_ROOT']\n File.basename(ruby)\n else\n RUBY_VERSION\n end\n end", "def read_ruby_version vim\n script = %{require \"rbconfig\"; print File.join(RbConfig::CONFIG[\"bindir\"], RbConfig::CONFIG[\"ruby_install_name\"])}\n version = `#{vim} --nofork --cmd 'ruby #{script}' --cmd 'q' 2>&1 >/dev/null | grep -v 'Vim: Warning'`.strip\n version unless version.empty? or version.include?(\"command is not available\")\nend", "def rvm_version?\n choose do |menu|\n menu.prompt = ''\n menu.choice('latest stable') { nil }\n menu.choice('bleeding edge') { '--head' }\n end\n end", "def ruby\n @ruby ||= \\\n begin\n rb = runtime['executable']\n pattern = Regexp.new(Regexp.quote(runtime['version']))\n output = StagingPlugin.get_ruby_version(rb)\n if $? == 0 && output.strip =~ pattern\n rb\n elsif \"#{RUBY_VERSION}p#{RUBY_PATCHLEVEL}\" =~ pattern\n current_ruby\n else\n puts \"No suitable runtime found. Needs version matching #{runtime['version']}\"\n exit 1\n end\n end\n end", "def rvm_gemset\n File.basename(`rvm current`.strip)\n end", "def determine_rvm_ruby_if_not_given\n if node['rvm_passenger']['rvm_ruby'].nil?\n rvm_ruby = node['rvm']['default_ruby']\n rvm_ruby += \"@passenger\" unless rvm_ruby == \"system\"\n\n node.set['rvm_passenger']['rvm_ruby'] = rvm_ruby\n Chef::Log.debug(%{Setting node['rvm_passenger']['rvm_ruby'] = } +\n %{\"#{node['rvm_passenger']['rvm_ruby']}\"})\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns true if rbenv is installed.
def rbenv_installed? @rbenv_installed ||= `which rbenv`.present? end
[ "def rbenv_installed?\n !which('rbenv').nil?\n end", "def has_virtualenv_installed(python)\n `#{python} -m virtualenv --help 2>&1`\n if (0 != $?.to_i)\n false\n else\n true\n end\n end", "def ruby_installed?\n !which('ruby').nil?\n end", "def ruby?\n exist? 'Gemfile'\n end", "def ruby_build_installed?\n !which('ruby-build').nil?\n end", "def installed?\n ::File.directory?(ruby_install_path)\n end", "def rubygems?\n defined?(::Gem)\n end", "def python?\n exist? 'requirements.txt'\n end", "def installed?\n ::File.directory?(\"/opt/rubies/ruby-#{version}\") ||\n Dir['/opt/rubies/ruby-*'].any? { |ruby| ruby.include?(version) }\n end", "def rubygems?\n defined? ::Gem\n end", "def installed?\n !IO.popen(\"which #{self}\"){|i| i.read}.empty?\n end", "def env_exists?(ruby_string)\n return true if system_ruby?(ruby_string)\n\n rubie = select_ruby(ruby_string)\n gemset = select_gemset(ruby_string)\n\n if gemset\n gemset_exists?(:ruby => rubie, :gemset => gemset)\n else\n ruby_installed?(rubie)\n end\nend", "def installed?\n formula.any_version_installed?\n end", "def installed?\n MacOS.dev_tools_path == Pathname.new(\"/usr/bin\")\n end", "def php_installed?\n !which('php').nil?\n end", "def env_exists?(ruby_string)\n return true if system_ruby?(ruby_string)\n\n rubie = select_ruby(ruby_string)\n gemset = select_gemset(ruby_string)\n\n if gemset\n gemset_exists?(:ruby => rubie, :gemset => gemset)\n else\n ruby_installed?(rubie)\n end\n end", "def environment?\n dir?('.bundle') || dir?('.virtualenv') || dir?('node_modules')\n end", "def have_brew?\n have_command? :brew\n end", "def installed?\n result = current_shell.exec \"#{@bin_path}/brew info #{@package_name}\"\n\n if current_shell.last_exit_status.zero?\n !result.match %r[Not installed]\n else\n false\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns true if RVM is installed.
def rvm_installed? @rvm_installed ||= `which rvm`.present? end
[ "def rvm_installed?\n cmd_test %{-s \"/usr/local/lib/rvm\"}\nend", "def rvm?\n\t\t\tFile.exists?(RvmPow::RVM_BINARY)\n\t\tend", "def installed?\n if File.exists?(dm_cmd)\n return true\n else\n message.warn \"LVM Tools are not installed\"\n return false\n end\n end", "def ruby_installed?(ruby)\n cmd_if %{rvm list strings | grep -q \"#{ruby}\" >/dev/null}, true\nend", "def tools_installed?()\n installed=`rpm -q VMwareTools`\n if $? == 0 \n return true\n end\n return false\nend", "def installed?\n !IO.popen(\"which #{self}\"){|i| i.read}.empty?\n end", "def installed?\n ::File.directory?(\"/opt/rubies/ruby-#{version}\") ||\n Dir['/opt/rubies/ruby-*'].any? { |ruby| ruby.include?(version) }\n end", "def ruby_installed?(rvm_path, rvm_user, ruby_version)\n cmd = \"su #{rvm_user} -l -c 'source #{rvm_path}/scripts/rvm && \\\n rvm list'\"\n ::Chef::Log.debug(\"Running [#{cmd}]\")\n results = shell_out(cmd)\n results.stdout.match(ruby_version)\n end", "def installed?\n current_shell.exec \"rpm -qa | grep #{@package_name}\"\n\n current_shell.last_exit_status.zero?\n end", "def ruby_installed?\n !which('ruby').nil?\n end", "def with_rvm?\n !@ruby_string.nil?\n end", "def installed?\n result = current_shell.exec \"#{@bin_path}/brew info #{@package_name}\"\n\n if current_shell.last_exit_status.zero?\n !result.match %r[Not installed]\n else\n false\n end\n end", "def installed?\n ::File.directory?(ruby_install_path)\n end", "def installed?(utility)\n return false if execute_as_root(\"which #{utility}\").nil?\n true\n end", "def installed?\n output = current_shell.exec \"dpkg --status #{@package_name}\"\n return false unless current_shell.last_exit_status.zero?\n\n !output.match(/not-installed/)\n end", "def proselint_installed?\n `which proselint`.strip.empty? == false\n end", "def proselint_installed?\n `which proselint`.strip.empty?\n end", "def librarian_installed?\n return true if ENV['SPEC_BUNDLE_EXEC']\n `librarian-puppet version`\n $?.exitstatus == 0\n end", "def is_installed?(package)\n begin\n cmd.run \"dpkg -s #{package}\" and return true\n rescue\n return false\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
GET /chase_vehicles GET /chase_vehicles.json
def index @chase_vehicles = ChaseVehicle.all end
[ "def index\n @vehicles = Vehicle.all\n\n render json: @vehicles\n end", "def vehicles\n vehiclesUrl = URI.parse('https://owner-api.teslamotors.com/api/1/vehicles')\n\n response = Net::HTTP.start(vehiclesUrl.host, use_ssl: true, verify_mode: OpenSSL::SSL::VERIFY_NONE) do |http|\n http.get(vehiclesUrl.request_uri, { 'Authorization' => \"Bearer #{@token}\" })\n end\n\n throw \"Can't load vehicles\" unless response.code == '200'\n\n vehicles = JSON.parse(response.body)\n\n return vehicles['response'].map {|x| Vehicle.new(@token, x, @debug)}\n end", "def vehicle\n fetch('conan.vehicles')\n end", "def vehicle\n fetch('venture_bros.vehicle')\n end", "def vehicles\n @vehicles ||= begin\n _, json = get_json(\"/vehicles\")\n json.map { |data| Vehicle.new(self, data) }\n end\n end", "def show\n render json: @vehicle\n end", "def vehicle\n fetch('final_space.vehicles')\n end", "def index\n @vehicles = Vehicle.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @vehicles }\n end\n end", "def vehicles_all\n @work_order_vehicles = WorkOrderVehicle.by_id\n render json: serialized_work_order_vehicles(@work_order_vehicles)\n end", "def index\n @load_vehicle = LoadVehicle.all\n respond_to do |format|\n format.json { render json: @load_vehicle }\n end\n end", "def show\n @vehicle = Vehicle.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @vehicle }\n end\n end", "def index\n @vehicle_types = VehicleType.all\n\n render json: @vehicle_types\n end", "def vehicleList()\n @vehicleManager.vehicleList ;\n end", "def index\n @recovered_vehicles = RecoveredVehicle.all\n end", "def index\n @has_vehicles = HasVehicle.all\n end", "def get_models_for_make_id_year\n render json: vehicle_service.get_models_for_make_id_year(params[:make_id], params[:make_year])\n end", "def index\n @vehicle_services = VehicleService.all\n end", "def index\n @passenger_vehicles = PassengerVehicle.all\n end", "def show\n vehicle=Vehicle.where(uid: params[:id]).first\n render :json => {\"vehicle\"=>vehicle }\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
DELETE /chase_vehicles/1 DELETE /chase_vehicles/1.json
def destroy @chase_vehicle.destroy respond_to do |format| format.html { redirect_to :back } format.json { head :no_content } end end
[ "def destroy\n @vehicle.destroy\n render json: { status: true }\n end", "def destroy\n @vehicle = Vehicle.find(params[:id])\n @vehicle.destroy\n\n respond_to do |format|\n format.html { redirect_to root_path }\n format.json { head :no_content }\n end\n end", "def destroy\n @vehicle = Vehicle.find(params[:id])\n @vehicle.destroy\n\n respond_to do |format|\n format.html { redirect_to vehicles_url }\n format.json { head :no_content }\n end\n end", "def destroy\n destroy_resource @vehicle, vehicles_url\n end", "def destroy\n @has_vehicle.destroy\n respond_to do |format|\n format.html { redirect_to has_vehicles_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @type_vehicle = TypeVehicle.find(params[:id])\n @type_vehicle.destroy\n\n respond_to do |format|\n format.html { redirect_to type_vehicles_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @new_vehicle.destroy\n respond_to do |format|\n format.html { redirect_to new_vehicles_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @load_vehicle.destroy\n respond_to do |format|\n format.html { redirect_to load_vehicle_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @vehicle_model.destroy\n respond_to do |format|\n format.html { redirect_to vehicle_models_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @daily_data_vehicle = DailyDataVehicle.find(params[:id])\n @daily_data_vehicle.destroy\n\n respond_to do |format|\n format.html { redirect_to daily_data_vehicles_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @vehicle_model = VehicleModel.find(params[:id])\n @vehicle_model.destroy\n\n respond_to do |format|\n format.html { redirect_to vehicle_models_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @vehicle = Vehicle.find(params[:id])\n @vehicle.destroy\n\n respond_to do |format|\n format.html { redirect_to(vehicles_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @vehicle_make = VehicleMake.find(params[:id])\n @vehicle_make.destroy\n\n respond_to do |format|\n format.html { redirect_to vehicle_makes_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @applied_vehicle_detail = AppliedVehicleDetail.find(params[:id])\n @applied_vehicle_detail.destroy\n\n respond_to do |format|\n format.html { redirect_to applied_vehicle_details_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @vehicle_fuel = VehicleFuel.find(params[:id])\n @vehicle_fuel.destroy\n\n respond_to do |format|\n format.html { redirect_to vehicle_fuels_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @sub_vehicle = SubVehicle.find(params[:id])\n @sub_vehicle.destroy\n\n respond_to do |format|\n format.html { redirect_to(sub_vehicles_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @vehicle_road_tax.destroy\n respond_to do |format|\n format.html { redirect_to vehicle_road_taxes_url }\n format.json { head :no_content }\n end\n end", "def delete_aos_version(args = {}) \n delete(\"/aosversions.json/#{args[:aosVersionId]}\", args)\nend", "def destroy\n @vehicule = Vehicule.find(params[:id])\n @vehicule.destroy\n\n respond_to do |format|\n format.html { redirect_to vehicules_url }\n format.json { head :no_content }\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Add a tag with the given key and data value to all collections matching the query condition. If append is true, this method will append the value to any existing data (if not already present), rather than overwriting it. The optional block gets passed Arrayuniq to determine whether a piece of data is already present in the tag. Newer versions of data will overwrite older ones.
def add_tag(key, value, condition, token, append=false, only_granules=true, &block) query = tag_condition_to_query(condition) # https://bugs.earthdata.nasa.gov/browse/CMR-2855 will fix the need for some of this logic # https://bugs.earthdata.nasa.gov/browse/CMR-2609 as well if value.present? || only_granules query_params = {include_tags: key, include_has_granules: true} response = json_query_collections(condition, token, query_params) return response unless response.success? && response.body['feed']['entry'].present? entries = response.body['feed']['entry'] entries = entries.select {|entry| entry['has_granules']} if only_granules assoc_data = nil if append data = Array.wrap(value) assoc_data = entries.map do |entry| tags = entry['tags'] data = Array.wrap(tags[key]['data']) + data if tags && tags[key] # Ensure no duplicate values and that newer values overwrite older ones data = data.reverse.uniq(&block).reverse {'concept-id' => entry['id'], 'data' => data} end elsif value.present? assoc_data = entries.map { |entry| {'concept-id' => entry['id'], 'data' => value} } else assoc_data = entries.map { |entry| {'concept-id' => entry['id']} } end if assoc_data.present? response = post("/search/tags/#{key}/associations", assoc_data.to_json, token_header(token)) end else response = post("/search/tags/#{key}/associations/by_query", query.to_json, token_header(token)) end response end
[ "def append(key, value); end", "def append(value, hash, array)\n key = array[1]\n hash[value[:key]] = { flags: value[:flags], exptime: value[:exptime], value: hash[key][:value] + value[:value], cas_unique: value[:cas_unique] }\n value[:reply] != 'false' ? (self.result = \"\\r\\nSTORED\") : (self.result = '')\n end", "def append(key, value)\n perform(:append, key, value.to_s)\n end", "def append(key, value)\n value.then do |resolved_value|\n @client.perform(:append, key, resolved_value.to_s)\n end\n end", "def user_append_keyword(attribs, dir_info)\n attribs = user_record_name_alternatives(attribs)\n check_critical_attribute( attribs, :record_name )\n\n attribs[:value] = attribs[:value] || attribs[:keywords]\n attribs[:value] = attribs[:value] || attribs[:keyword]\n\n check_critical_attribute( attribs, :value, :keyword )\n attribs = tidy_attribs(attribs)\n\n command = {action: 'append', scope: 'Users', attribute: 'Keywords'}\n user_attrs = attribs.merge(command)\n\n answer = dscl( user_attrs, dir_info )\n attribs[:value] = nil\n return answer\n end", "def append(new_data)\n end", "def append!\n self.operation = :append\n end", "def append_attribute(key_, value_)\n attribute(key_, value_, :append)\n end", "def append_to_association(association_map, key, datum_to_append, opts = {})\n if opts[:coord] # we will be interacting with the associations of a part of a collection if coord is specified\n association_map.putrc(opts[:coord][0], opts[:coord][1], key, []) if association_map.getrc(opts[:coord][0], opts[:coord][1], key).nil?\n association_map.getrc(opts[:coord][0], opts[:coord][1], key) << datum_to_append\n else\n association_map.put(key, []) if association_map.get(key).nil?\n association_map.get(key) << datum_to_append\n end\n end", "def addUniques(uqs); uqs.each { |uq| addUnique uq } end", "def add_tag_recursive(xml, data)\n data.each do |key, value|\n if data[key].is_a?(Hash)\n xml.tag!(key) do\n add_tag_recursive(xml, data[key])\n end\n else\n xml.tag!(key, value)\n end\n end\n end", "def addUnique(key, value)\n curVal = @hash[key]\n if curVal then\n curVal += value if not curVal.include?(value)\n @hash[key] = curVal\n else\n @hash[key] = value\n end\n end", "def add_tags(*tags)\n @hash_tags.concat(tags.flatten)\n end", "def with_tag_data( newhash={} )\n\t\traise LocalJumpError, \"no block given\" unless block_given?\n\t\t# self.log.debug \"Overriding tag state with: %p\" % [ newhash ]\n\n\t\tbegin\n\t\t\t@tag_data.push( @tag_data.last.merge(newhash) )\n\t\t\tyield( self )\n\t\tensure\n\t\t\t@tag_data.pop\n\t\tend\n\tend", "def add(hash)\n hash = HashWithIndifferentAccess.new(hash.merge(primary_key => generate_unique_id(hash)))\n add_timestamps_to_hash(hash) if timestamps?\n interface.add(data_collection, hash).first\n end", "def append( arg=nil )\n @append_mode = true\n @condition_proc = ( arg ? arg[ :condition ] : Proc.new { |old,new| true } )\n Datamith::Runner::appended[ self.class.new_table ] ||= Hash.new\n end", "def concat_any_elements(key, elements)\n key_d = CartonDb::Datum.for_plain(key)\n segment = segment_containing(key_d)\n segment.write_key_d_elements key_d, elements\n end", "def append_data(data); end", "def merge_value(hash, key, value)\n return unless hash\n\n values = hash[key] ||= []\n if key == '@list'\n values << value\n elsif list?(value)\n values << value\n elsif !values.include?(value)\n values << value\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
The number of citizen not engaged in color pass in args
def citizens_ready_in(color) citizens.where(:color => color, :engaged => false).count end
[ "def amount_of_color(color)\n colorizr_vector.amount_of_color(color)\n end", "def diversity\n color_count.keys\n end", "def nc\n Ncurses::COLOR_PAIR(@id)\n end", "def assign_game_color count\n case count\n when 0\n return \"transparent\"\n when 6\n return $app_red\n else\n return $app_blue\n end\n end", "def amount_of_color(color)\n @vector[map_to_bin(color[0], color[1], color[2])]\n end", "def numColor(number)\n if(number > 0)\n return 'green'\n elsif(number < 0)\n return 'red'\n else\n return ''\n end\n end", "def threatened_pieces_number(color)\n all_pieces = find_pieces(color)\n number = 0\n enemy_color = (color == :white ? :black : :white)\n squares_threatened = threatening_squares(enemy_color)\n all_pieces.each do |piece|\n if squares_threatened.include?(piece.pos)\n number+=1\n end\n end\n number\n end", "def count_with_red_ribbon( count, step = 10, align = :right )\n count_with_image( count, step, 'rosette.png', align )\n end", "def element_non_green_count\n @record_data_set.where(color: NON_GREEN_COLORS).group(:column_name).order('count_all DESC').limit(10).count.to_a\n end", "def count_occurrences\n array = self.convert_color_to_array\n occurrences = {}\n array.sort.uniq.each { |color| occurrences[color] = array.count(color)}\n occurrences\n end", "def color_from_vote_count(positive, neutral, negative)\n \n #Rails.logger.error '**********BEGIN********'\n #Rails.logger.error 'positive = %{u}' % { :u => positive }\n #Rails.logger.error 'neutral= %{n}' % { :n => neutral }\n #Rails.logger.error 'negative= %{d}' % { :d => negative }\n \n #set red as the default\n color = 'cb0223' #red\n \n #neutral wins if greater or equal to negative\n if neutral >= negative\n color = 'F88017'\n end\n \n #positive wins if great than neutral and greater or equal to negative\n if positive > neutral and positive >= negative\n color = '23b62a'\n end \n \n #Rails.logger.error 'color= %{c}' % { :c => color }\n #Rails.logger.error '**********END********'\n \n return color\n end", "def as_red\n @red += 1\n end", "def color_page_count\n return @color_page_count\n end", "def colorCounter(str)\n cntr = 0\n colors = ['blue','green','gray','red','black','purple']\n\n str.split.each do |word|\n colors.each do |color| \n if word.downcase == color\n cntr +=1\n break\n end\n end\n end\n return cntr\n end", "def determine_color\n colors = {'Red' => 0, 'Green' => 0, 'Yellow' => 0, 'Blue' => 0}\n @hand.each do |card|\n colors[card.suit] += 1\n end\n return colors.key(colors.values.max)\n end", "def color_i\n if (failures + errors) > 0\n 0\n elsif skips > 0\n 1\n else\n 2\n end\n end", "def vary_colors; end", "def scan_for_colors; end", "def counter(*args)\n identifier(\"counter(#{args.map {|a| a.to_s(options)}.join(',')})\")\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return the IP associated with this record if it can be deduced from the record name
def ip ip = nil unless valid? return nil end begin case name when /\.in-addr\.arpa$/ name_without_suffix = name.sub(/\.in-addr\.arpa$/, '') quads = name_without_suffix.split('.') if quads.size == 4 quads.reverse! ip = quads.join('.') end when /\.ip6\.arpa$/ name_without_suffix = name.sub(/\.ip6\.arpa$/, '') nibbles = name_without_suffix.split('.') nibbles.each do |nibble| if nibble.empty? raise DnsRecord::EmptyNibbleError end end if nibbles.size == 32 n = nibbles.reverse! ip = \ n[0..3].join('') + ":" + n[4..7].join('') + ":" + n[8..11].join('') + ":" + n[12..15].join('') + ":" + n[16..19].join('') + ":" + n[20..23].join('') + ":" + n[24..27].join('') + ":" + n[28..31].join('') ip = NetAddr::CIDR.create(ip).ip(:Short => true) end end rescue DnsRecord::EmptyNibbleError ip = nil end ip end
[ "def info_detected_address\n self.class.info(name)[:ip]\n end", "def ipaddress\n block = /\\d{,2}|1\\d{2}|2[0-4]\\d|25[0-5]/\n re = /\\A#{block}\\.#{block}\\.#{block}\\.#{block}\\z/\n if info[1]['host'] && re =~ info[1]['host']\n info[1]['host']\n else\n address(info[1]['http_address']).split(':')[0] \n end\n end", "def ptr(ip)\n # use cache if ip was already resolved\n return @cache[ip] if @cache[ip]\n\n begin\n name = @hypedns.getname(ip).to_s\n @cache[ip] = name\n return name\n rescue Resolv::ResolvError, SocketError\n return nil\n end\n end", "def kvm_ip(name)\n addr = ip_by_mac(node_mac(name))\n addr.empty? ? ip_by_mount(name) : addr\nend", "def reversed_ip\n return nil if reverse_name.nil?\n\n @reverse_ip ||= @dns.getaddress(reverse_name)\n @reverse_ip.to_s\n end", "def ip_address\n begin\n return Resolv.getaddress(self.address)\n rescue Resolv::ResolvError\n return nil\n end\n end", "def public_ip_v4_address\n addr = nil\n loop do\n addr = ip_v4_address\n break unless reserved_net_checker[addr]\n end\n addr\n end", "def ipaddress(family = 'ipv4')\n return address if address.present? && IPAddr.new(address).send(\"#{family}?\")\n resource.ipaddresses.detect { |ip| IPAddr.new(ip).send(\"#{family}?\") }\n end", "def describe_as_ip\n parts = strip.split('.')\n bytes = parts.zip(parts.map(&:to_i)).map {|(str,val)|\n val if ((1..255) === val) || (val == 0 && str == '0')\n }.squash\n\n if bytes.length != 4\n false\n elsif bytes.starts_with? 0 # Source hosts on \"this\" network\n :reserved\n elsif bytes.starts_with? 127 # Loopback network; RFC1700\n :loopback\n elsif bytes.starts_with? 10 # Class-A private; RFC1918\n :private\n elsif bytes.starts_with?(172) && ((16..31) === bytes[1]) # Class-B private; RFC1918\n :private\n elsif bytes.starts_with? 169, 254 # Link-local range; RFC3330/3927\n bytes[2].in?(0, 255) ? :reserved : :self_assigned\n elsif bytes.starts_with? 192, 0, 2 # TEST-NET - used as example.com IP\n :reserved\n elsif bytes.starts_with? 192, 88, 99 # 6-to-4 relay anycast; RFC3068\n :reserved\n elsif bytes.starts_with? 192, 168 # Class-C private; RFC1918\n :private\n elsif bytes.starts_with? 198, 18 # Benchmarking; RFC2544\n :reserved\n else\n :public\n end\n end", "def parse_ip\n @request[FHOST] || BLANK_STR\n end", "def ip_address_record(env)\n data_type = env[:machine].provider_config.private_only ? \"primaryBackendNetworkComponent\" : \"primaryNetworkComponent\"\n data_type = \"primaryBackendNetworkComponent\" if env[:machine].provider_config.force_private_ip\n mask = \"#{data_type}.primaryIpAddressRecord.id,#{data_type}.primaryIpAddressRecord.ipAddress\"\n record = sl_warden { env[:sl_machine].object_mask(\"mask[#{mask}]\").getObject }\n return {\n :address => record[data_type][\"primaryIpAddressRecord\"][\"ipAddress\"],\n :id => record[data_type][\"primaryIpAddressRecord\"][\"id\"]\n }\n end", "def private_ip_address\n private_ip_addresses.first\n end", "def public_ip_address\n public_ip_addresses.first\n end", "def lookup_ip_for(hostname)\n ip = `nslookup #{hostname} | tail -n 2 | head -n 1 | cut -d ' ' -f 2`.chomp\n ip != '' ? ip : nil\n end", "def get_ip_address(name, eth)\n network[:interfaces][eth][:addresses].each do |key, info|\n if info[\"family\"] == \"inet\"\n rackspace[name] = key\n break # break when we found an address\n end\n end\n end", "def ip?\n return (proto == 'ip')\n end", "def query(fqdn)\n return unless fqdn\n return unless name = fqdn2name(fqdn)\n return unless record = Record.active.where(name: name).first\n record.ip\n end", "def ip()\n \"172.16.#{@project.id}.#{@index + 2}\"\n end", "def name\n if ipv4?\n \"[#{ip_address}]\"\n elsif ipv6?\n \"[IPv6:#{ip_address}]\"\n elsif @config[:host_encoding] && @config[:host_encoding] == :unicode\n ::SimpleIDN.to_unicode(host_name)\n else\n dns_name\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
PATCH/PUT /result_statistic_sections/1 PATCH/PUT /result_statistic_sections/1.json
def update respond_to do |format| if @result_statistic_section.update(result_statistic_section_params) format.html { redirect_to edit_result_statistic_section_path(@result_statistic_section), notice: t('success') } format.json { render :show, status: :ok, location: @result_statistic_section } format.js do @eefpst1 = @result_statistic_section .population .extractions_extraction_forms_projects_sections_type1 @extraction = @result_statistic_section.extraction @project = @result_statistic_section.project @extraction_forms_projects = @project.extraction_forms_projects @eefpst1s = ExtractionsExtractionFormsProjectsSectionsType1 .by_section_name_and_extraction_id_and_extraction_forms_project_id('Outcomes', @extraction.id, @extraction_forms_projects.first.id) end else format.html { render :edit } format.json { render json: @result_statistic_section.errors, status: :unprocessable_entity } end end end
[ "def update\n respond_to do |format|\n if @result_statistic_section.update(result_statistic_section_params)\n format.html do |_format|\n if params[:result_statistic_section].has_key? :extraction_ids\n redirect_to consolidate_result_statistic_section_path(@result_statistic_section,\n extraction_ids: params[:result_statistic_section][:extraction_ids]),\n notice: t('success')\n else\n redirect_to edit_result_statistic_section_path(@result_statistic_section),\n notice: t('success')\n end\n end\n format.json { render :show, status: :ok, location: @result_statistic_section }\n format.js do\n @result_statistic_section.measures.each do |measure|\n @result_statistic_section.related_result_statistic_sections.each do |rss|\n rss.measures << measure unless rss.measures.include?(measure)\n end\n end\n end\n else\n format.html { render :edit }\n format.json { render json: @result_statistic_section.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @result_statistic_section.update(result_statistic_section_params)\n format.html { redirect_to edit_result_statistic_section_path(@result_statistic_section),\n notice: t('success') }\n format.json { render :show, status: :ok, location: @result_statistic_section }\n else\n format.html { render :edit }\n format.json { render json: @result_statistic_section.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @analysis_result = AnalysisResult.find(params[:id])\n\n respond_to do |format|\n if @analysis_result.update_attributes(params[:analysis_result])\n format.html { redirect_to @analysis_result, notice: 'Analysis result was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @analysis_result.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @result_suite.update(result_suite_params)\n format.html { redirect_to @result_suite, notice: 'Result suite was successfully updated.' }\n format.json { render :show, status: :ok, location: @result_suite }\n else\n format.html { render :edit }\n format.json { render json: @result_suite.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n params.permit!\n @sampleresult = Sampleresult.find(params[:id])\n\n respond_to do |format|\n if @sampleresult.update_attributes(params[:sampleresult])\n format.html { redirect_to(@sampleresult, :notice => 'Sampleresult was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @sampleresult.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @result_total.update(result_total_params)\n format.html { redirect_to @result_total, notice: 'Result total was successfully updated.' }\n format.json { render :show, status: :ok, location: @result_total }\n else\n format.html { render :edit }\n format.json { render json: @result_total.errors, status: :unprocessable_entity }\n end\n end\n end", "def update \n if @section.update(section_params)\n render json: @section, status: :ok\n else\n render json: {errors: @section.errors}, status: :unprocessable_entity\n end \n end", "def update\n respond_to do |format|\n if @segment_result.update(segment_result_params)\n format.html { redirect_to @segment_result, notice: 'Segment result was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @segment_result.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @survey_result = SurveyResult.find(params[:id])\n\n respond_to do |format|\n if @survey_result.update_attributes(params[:survey_result])\n format.html { redirect_to @survey_result, notice: 'Survey result was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @survey_result.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @skid_row_statistic = SkidRowStatistic.find(params[:id])\n\n respond_to do |format|\n if @skid_row_statistic.update_attributes(params[:skid_row_statistic])\n format.html { redirect_to @skid_row_statistic, notice: 'Skid row statistic was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @skid_row_statistic.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @test_case_result.update(test_case_result_params)\n format.html { redirect_to @test_case_result, notice: 'Test case result was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @test_case_result.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @ref_consult_summary = RefConsultSummary.find(params[:id])\n\n respond_to do |format|\n if @ref_consult_summary.update_attributes(params[:ref_consult_summary])\n format.html { redirect_to @ref_consult_summary, notice: 'Ref consult summary was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @ref_consult_summary.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @researchsection = Researchsection.find(params[:id])\n\n respond_to do |format|\n if @researchsection.update_attributes(params[:researchsection])\n format.html { redirect_to @researchsection, notice: 'Researchsection was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @researchsection.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @test_suite_result.update(test_suite_result_params)\n format.html { redirect_to @test_suite_result, notice: 'Test suite result was successfully updated.' }\n format.json { render :show, status: :ok, location: @test_suite_result }\n else\n format.html { render :edit }\n format.json { render json: @test_suite_result.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @survey_result.update(survey_result_params)\n format.html { redirect_to @survey_result, notice: 'Survey result was successfully updated.' }\n format.json { render :show, status: :ok, location: @survey_result }\n else\n format.html { render :edit }\n format.json { render json: @survey_result.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @section_test = SectionTest.find(params[:id])\n\n respond_to do |format|\n if @section_test.update_attributes(params[:section_test])\n format.html { redirect_to @section_test, notice: 'Section test was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @section_test.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @survey_section.update(survey_section_params)\n format.html { redirect_to @survey_section, notice: 'Survey section was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @survey_section.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @set_section.update(set_section_params)\n format.html { redirect_to set_sections_url, notice: 'Set section was successfully updated.' }\n format.json { render :index, status: :ok, location: @set_sections }\n else\n format.html { render :edit }\n format.json { render json: @set_section.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @result1 = Result1.find(params[:id])\n\n respond_to do |format|\n if @result1.update_attributes(params[:result1])\n format.html { redirect_to @result1, notice: 'Result1 was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @result1.errors, status: :unprocessable_entity }\n end\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
POST /bugreports POST /bugreports.json
def create @bugreport = Bugreport.new(bugreport_params) @bugreport.reporter = current_user respond_to do |format| if @bugreport.save format.html { redirect_to root_url, notice: 'Bugreport was successfully created.' } format.json { render action: 'show', status: :created, location: @bugreport } else format.html { render action: 'new' } format.json { render json: @bugreport.errors, status: :unprocessable_entity } end end end
[ "def create\n @bugreport = Bugreport.new(bugreport_params)\n\n respond_to do |format|\n if @bugreport.save\n format.html { redirect_to @bugreport, notice: 'Bugreport was successfully created.' }\n format.json { render :show, status: :created, location: @bugreport }\n else\n format.html { render :new }\n format.json { render json: @bugreport.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @bugreport = Bugreport.new(params[:bugreport])\n\n respond_to do |format|\n if @bugreport.save\n format.html { redirect_to @bugreport, notice: 'Bugreport was successfully created.' }\n format.json { render json: @bugreport, status: :created, location: @bugreport }\n else\n format.html { render action: \"new\" }\n format.json { render json: @bugreport.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @bug_report = BugReport.new(params[:bug_report])\n\n respond_to do |format|\n if @bug_report.save\n format.html { redirect_to @bug_report, notice: 'Bug report was successfully created.' }\n format.json { render json: @bug_report, status: :created, location: @bug_report }\n else\n format.html { render action: \"new\" }\n format.json { render json: @bug_report.errors, status: :unprocessable_entity }\n end\n end\n end", "def bugs_report\n @project = Project.find(params[:id])\n @data = @project.get_bugs_report\n \n respond_to do |format|\n format.html #bugs_report.html.erb\n format.json \n end\n end", "def create\n p \"/////////////////////////////////////\"\n @bug = Bug.new(:bug_title => params[:bugsname], :issue_log => params[:comments], :project_id => params[:project_id],\n :due_date => params[:dueDate], :priority => params[:priority], :status => params[:status], :severity => params[:severity])\n\n\n respond_to do |format|\n if @bug.save\n p 'successsssssssss'\n format.html { redirect_to @bug }\n format.json { render json: @bug }\n else\n p 'failllllllll'\n # format.html { render :new }\n # format.json { render json: @bug.errors, status: :unprocessable_entity }\n end\n end\n end", "def new\n @bug_report = BugReport.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @bug_report }\n end\n end", "def new\n @bugreport = Bugreport.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @bugreport }\n end\n end", "def create\n @bugreport = Bugreport.new(params[:bugreport])\n\n respond_to do |format|\n if @bugreport.save\n flash[:notice] = 'Bugreport was successfully created.'\n format.html { redirect_to(@bugreport) }\n format.xml { render :xml => @bugreport, :status => :created, :location => @bugreport }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @bugreport.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @bug = @project.bugs.new(bug_params.merge!(user_status: current_user))\n respond_to do |format|\n if @bug.save\n send_slack_notification ( {create: true} )\n format.html { redirect_to [@project, @bug], notice: 'Bug was successfully created.' }\n format.json { render :show, status: :created, location: @bug }\n else\n format.html { render :new }\n format.json { render json: @bug.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @bug = Bug.new(params[:bug])\n\n respond_to do |format|\n if @bug.save\n format.html { redirect_to @bug, notice: 'Bug was successfully created.' }\n format.json { render json: @bug, status: :created, location: @bug }\n else\n format.html { render action: \"new\" }\n format.json { render json: @bug.errors, status: :unprocessable_entity }\n end\n end\n end", "def index\n @bugreports = Bugreport.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @bugreports }\n end\n end", "def index \n render :json => Project.find(11).bug_tracker.bugs\n end", "def create\n @client_bug = ClientBug.new(client_bug_params)\n\n respond_to do |format|\n if @client_bug.save\n format.html { redirect_to @client_bug, notice: 'Client bug was successfully created.' }\n format.json { render json: @client_bug, status: :created }\n else\n format.html { render action: 'new' }\n format.json { render json: @client_bug.errors, status: :unprocessable_entity }\n end\n end\n end", "def bug\n bug = Bug.where(id: params[:bugId])\n render :json => bug.to_json\n end", "def ios_reports\n create_issue!\n\n render_airbrake_response\n end", "def add_report_template(args = {}) \n post(\"/reports.json/template\", args)\nend", "def create\n @bug = Bug.new(params[:bug])\n \n respond_to do |format|\n if @bug.save\n flash[:notice] = 'Bug was successfully created.'\n format.html { redirect_to(bugs_path) }\n format.xml { render :xml => @bug, :status => :created, :location => @bug }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @bug.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @report = Report.new(report_params)\n\n if @report.save\n render json: @report, status: :created, location: @report\n else\n render json: @report.errors, status: :unprocessable_entity\n end\n end", "def create(projectId, bug)\r\n\t\t\t\turl = getBaseURL+\"projects/\"+String(projectId)+\"/bugs/\"\t\t\r\n\t\t\t\tresponse = ZohoHTTPClient.post(url, getQueryMap, bug.toParamMAP)\t\t\r\n\t\t\t\treturn $bugParser.getBug(response)\r\n\t\t\tend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
PATCH/PUT /bugreports/1 PATCH/PUT /bugreports/1.json
def update respond_to do |format| if @bugreport.update(bugreport_params) format.html { redirect_to @bugreport, notice: 'Bugreport was successfully updated.' } format.json { head :no_content } else format.html { render action: 'edit' } format.json { render json: @bugreport.errors, status: :unprocessable_entity } end end end
[ "def update\n @bugreport = Bugreport.find(params[:id])\n\n respond_to do |format|\n if @bugreport.update_attributes(params[:bugreport])\n format.html { redirect_to @bugreport, notice: 'Bugreport was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @bugreport.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @bug_report = BugReport.find(params[:id])\n\n respond_to do |format|\n if @bug_report.update_attributes(params[:bug_report])\n format.html { redirect_to @bug_report, notice: 'Bug report was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @bug_report.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @bug_report.update(bug_report_params)\n format.html { redirect_to @bug_report, notice: 'Bug report was successfully updated.' }\n format.json { render :show, status: :ok, location: @bug_report }\n else\n format.html { render :edit }\n format.json { render json: @bug_report.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n\n @bug = @project.bugs.find(params[:id])\n\n respond_to do |format|\n if @bug.update(bug_params)\n format.html { redirect_to [@bug.project, @bug], notice: 'Bug was successfully updated.' }\n format.json { render :show, status: :ok, location: @bug }\n else\n format.html { render :edit }\n format.json { render json: @bug.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n\n if project_bug_params[:status] ==\"started\"\n @project_bug.assigned_id = current_user.id\n\n end\n if @project_bug.update(project_bug_params)\n format.html { redirect_to @project_bug, notice: \"Project bug was successfully updated.\" }\n format.json { render :show, status: :ok, location: @project_bug }\n else\n format.html { render :edit, status: :unprocessable_entity }\n format.json { render json: @project_bug.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @bug = Bug.find(params[:id])\n\n respond_to do |format|\n if @bug.update_attributes(params[:bug])\n format.html { redirect_to @bug, notice: 'Bug was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @bug.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @client_bug.update(client_bug_params)\n format.html { redirect_to @client_bug, notice: 'Client bug was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @client_bug.errors, status: :unprocessable_entity }\n end\n end\n end", "def update_report_template(args = {}) \n put(\"/reports.json/template/#{args[:templateId]}\", args)\nend", "def bugs_report\n @project = Project.find(params[:id])\n @data = @project.get_bugs_report\n \n respond_to do |format|\n format.html #bugs_report.html.erb\n format.json \n end\n end", "def api_patch(path, data = {})\n api_request(:patch, path, :data => data)\n end", "def update_development_info_jira (jira_issues, name, type)\n\tif type == \"branch\"\n\t\tfield = JIRA_FIELD_BRANCH\n\telsif type == \"pull_request\"\n\t\tfield = JIRA_FIELD_PULL_REQUEST\n\tend\n\ti = 0;\n\twhile (i < jira_issues.length) do\n\t\tjira_issue = jira_issues[i].join\n\n\t\tdata = {\n\t\t\t\"fields\" => {\n\t\t\t\tfield => name\n\t\t\t}\n\t\t}.to_json\n\n\t\theaders = {\n\t :\"Authorization\" => \"Basic #{JIRA_TOKEN}\",\n\t :\"Content-Type\" => \"application/json\"\n\t }\n\n\t if jira_issue =~ /(?:|^)(JQWE-[0-9]+|PQ-[0-9]+|JTQ-[0-9]+|JRQ-[0-9]+|JCEQ-[0-9]+|JITR.+-[0-9]+|TOOLSONE-[0-9]+)(?=|$)/i\n\t\t\turl = JACKTHREADS_JIRA_URL + jira_issue\n\t\telse\n\t\t\turl = THRILLIST_JIRA_URL + jira_issue\n\t\tend\n\n\t\tresponse = RestClient.put( url, data, headers )\n\n\t\ti+=1\n\tend\n\n\t#\"customfield_10000\" = branch\n\t#\"customfield_10123\" = pull_request\nend", "def update\n @bug = Bug.find(params[:id])\n\n respond_to do |format|\n if @bug.update_attributes(params[:bug])\n format.html { redirect_to(@bug, :notice => 'Bug was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @bug.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @bugtraq.update(bugtraq_params)\n format.html { redirect_to @bugtraq, notice: 'Bugtraq was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @bugtraq.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @api_report = Report.find(params[:id])\n if @api_report.update(params[:api_report])\n render json: @api_report, status: :success\n else\n render json: @api_report.errors, status: :unprocessable_entity\n end\n end", "def update\n @api_v1_report_comment = Api::V1::ReportComment.find(params[:id])\n\n if @api_v1_report_comment.user_id == @current_user.id && @api_v1_report_comment.update(api_v1_report_comment_params)\n head :no_content\n else\n render json: @api_v1_report_comment.errors, status: :unprocessable_entity\n end\n end", "def update\n @api_v1_report = Api::V1::Report.find(params[:id])\n\n if @api_v1_report.user_id == @current_user.id && @api_v1_report.update(api_v1_report_params)\n head :no_content\n else\n render json: @api_v1_report.errors, status: :unprocessable_entity\n end\n end", "def patch *args\n make_request :patch, *args\n end", "def update\n @bug = Bug.find(params[:id])\n\n \n respond_to do |format|\n if @bug.update_attributes(params[:bug])\n notice = \"Bug marked as solved\" if params[:bug][:solved] == \"1\"\n format.html { redirect_to_index :controller => \"admin\", :notice => notice }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @bug.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @bug_post = BugPost.find(params[:id])\n respond_to do |format|\n if @bug_post.update_attributes(params[:bug_post])\n format.html { redirect_to [@bug_post.bug_list.project, @bug_post.bug_list, @bug_post], notice: 'Bug post was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @bug_post.errors, status: :unprocessable_entity }\n end\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
DELETE /bugreports/1 DELETE /bugreports/1.json
def destroy @bugreport.destroy respond_to do |format| format.html { redirect_to bugreports_url } format.json { head :no_content } end end
[ "def destroy\n @bugreport = Bugreport.find(params[:id])\n @bugreport.destroy\n\n respond_to do |format|\n format.html { redirect_to bugreports_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @bug_report = BugReport.find(params[:id])\n @bug_report.destroy\n\n respond_to do |format|\n format.html { redirect_to bug_reports_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @report.destroy!\n render json: {status: :ok}\n end", "def destroy\n @client_bug.destroy\n respond_to do |format|\n format.html { redirect_to client_bugs_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @api_report = Report.find(params[:id])\n @api_report.destroy\n\n head :no_content\n end", "def destroy\n @bug_history = BugHistory.find(params[:id])\n @bug_history.destroy\n\n respond_to do |format|\n format.html { redirect_to(bug_histories_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @report.destroy\n\n respond_to do |format|\n format.html { redirect_to reports_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @status_report = StatusReport.find(params[:id])\n @status_report.destroy\n\n respond_to do |format|\n format.html { redirect_to status_reports_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @bug = Project.find(params[:project_id]).bug_tracker.bugs.find(params[:id])\n \n if @bug.destroy\n render :json => {:status => :ok}\n else\n render :json => {:error => @bug.errors.full_messages, :status => :bad_request}\n end\n end", "def destroy\n @notification_report = NotificationReport.find(params[:id])\n @notification_report.destroy\n\n respond_to do |format|\n format.html { redirect_to notification_reports_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @report.destroy\n respond_to do |format|\n format.html { redirect_to reports_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @bug.destroy\n\n respond_to do |format|\n format.html { redirect_to project_environment_bugs_url(@project, @environment), flash: {success: t('controllers.bugs.destroy.deleted', number: number_with_delimiter(@bug.number))} }\n end\n end", "def destroy\n @failure_report.destroy\n respond_to do |format|\n format.html { redirect_to failure_reports_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @bug = Bug.find(params[:id])\n @bug.bug_histories.each do |bh|\n bh.destroy\n end\n TestcaseBugXref.delete_all(\"bug_id=\"+params[:id])\n filename = \"#{RAILS_ROOT}/public/files/\"+File.join('attachments', 'bugs', @bug.id.to_s)\n FileUtils.rm_rf filename\n @bug.attachments.each do |a|\n a.destroy\n end\n @bug.destroy\n\n respond_to do |format|\n format.html { redirect_to(bugs_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @bug = Bug.find(params[:id])\n @bug.destroy\n\n respond_to do |format|\n format.html { redirect_to bugs_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @bug_status = BugStatus.find(params[:id])\n @bug_status.destroy\n\n respond_to do |format|\n format.html { redirect_to(bug_statuses_url) }\n format.xml { head :ok }\n end\n end", "def destroy\r\n @test_report = TestReport.find(params[:id])\r\n @test_report.destroy\r\n\r\n respond_to do |format|\r\n format.html { redirect_to test_reports_url }\r\n format.json { head :no_content }\r\n end\r\n end", "def destroy\n @debug.destroy\n respond_to do |format|\n format.html { redirect_to debugs_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @maintenance_report.destroy\n respond_to do |format|\n format.html { redirect_to maintenance_reports_url }\n format.json { head :no_content }\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
2. Metoda display_details() afiseaza numele si prenumele
def display_details() puts "Numele persoanei: #@nume" puts "Prenumele persoanei: #@prenume" end
[ "def inspect_details\n\t\t\treturn ''\n\t\tend", "def display_details()\r\n\t\tprintf(\"\\n**************************************************\")\r\n\t\tprintf(\"\\n***** MONTHLY PAY SLIP DETAILS *****\")\r\n\t\tprintf(\"\\n**************************************************\")\r\n\t\tprintf(\"\\nEmployee Name : %s\",@name)\r\n\t\t# Amounts are depicted with 2 decimal places.\r\n\t\tprintf(\"\\nGross Monthly Salary : $ %.2f\",@grossMonthlyIncome)\r\n\t\tprintf(\"\\nMonthly Tax : $ %.2f\",@monthlyIncomeTax)\r\n\t\tprintf(\"\\nNet Monthly Salary : $ %.2f\",@netMonthlyIncome)\r\n\t\tprintf(\"\\n**************************************************\")\r\n end", "def show_details(db, options = {}) # Pass in the db, parent vm is in @vm\n association = options[:association]\n conditions = options[:conditions]\n # generate the grid/tile/list url to come back here when gtl buttons are pressed\n @gtl_url = \"/#{@db}/#{@listicon.pluralize}/#{@record.id}?\"\n @showtype = \"details\"\n @display = \"main\"\n @no_checkboxes = @no_checkboxes.nil? || @no_checkboxes\n @showlinks = true\n\n @view, @pages = get_view(db,\n :parent => @record,\n :association => association,\n :conditions => conditions,\n :dbname => \"#{@db}item\") # Get the records into a view & paginator\n\n if @explorer # In explorer?\n @refresh_partial = \"vm_common/#{@showtype}\"\n replace_right_cell\n else\n if pagination_request?\n replace_gtl_main_div\n elsif request.xml_http_request?\n # reload toolbars - AJAX request\n c_tb = build_toolbar(center_toolbar_filename)\n render :update do |page|\n page << javascript_prologue\n page.replace(\"flash_msg_div\", :partial => \"layouts/flash_msg\")\n page.replace_html(\"main_div\", :partial => \"shared/views/ems_common/show\") # Replace main div area contents\n page << javascript_pf_toolbar_reload('center_tb', c_tb)\n page.replace_html(\"paging_div\",\n :partial => 'layouts/pagingcontrols',\n :locals => {:pages => @pages,\n :action_url => @lastaction,\n :db => @view.db,\n :headers => @view.headers})\n end\n elsif controller_name == \"ems_cloud\"\n render :template => \"shared/views/ems_common/show\"\n else\n render :action => \"show\"\n end\n end\n end", "def print_detail(item_number)\n addon_details = {:Phone => @seller.phone, :'Listing #' => @id}\n puts '',\n self.summary_detail_row(item_number),\n @item.details_to_string(addon_details)\n end", "def detail_for record, &block\n concat(render_default_css, block.binding) unless @_showhide_css_done\n div_for(record, 'detail_for', :style => 'display: none;', :class => 'detail', &block)\n @_showhide_css_done = true # prevents to print the CSS multiple times\n nil\n end", "def additional_details\n\n end", "def print_details\n #variables\n name_length = @full_name.length\n \n #format\n puts(\"\\n\")\n puts(@full_name)\n puts (\"-\") * name_length\n puts(\"Date of Birth: \" + @dob.strftime(\"%d/%B/%Y\"))\n puts(\"\\n\")\n puts(\"Email Addresses: \")\n @emails.each {|x| puts \"- #{x}\"}\n puts(\"\\n\")\n puts(\"Phone Number: \")\n @phone_numbers.each {|i| puts \"- #{i}\"}\n puts(\"\\n\")\n return nil\n \n end", "def set_show_detail\n @show_detail = true\n end", "def details\n @details\n end", "def inspect_details\n ''\n end", "def get_item_detail_info(detail_page)\n item_detail_info = {}\n \n #get detail info by approciate syn_fieldsetItem\n item_detail_info[:status] = get_status(detail_page)\n item_detail_info[:responsible_officer_name] = get_responsible_officer_name(detail_page)\n item_detail_info[:responsible_officer_property] = get_responsible_officer_property(detail_page)\n \n return item_detail_info\nend", "def print_details\n puts \"\"\n puts \"#{@fullname}\"\n puts \"---------\" ' '\n puts \"Date of Birth:#{@dob}\"\n \n puts \"\"\n puts \"Email addresses:\"\n @emails.each do |e|\n puts \"- \" + e.to_s\n end \n \n puts \"\"\n puts \"Phone Numbers:\"\n @phone_numbers.each do |n|\n puts \"- \" + n.to_s\n end\n end", "def show_details\n\t\t\treturn @selected\n\t\tend", "def display_personal_info_and_disclaimer\n display_personal_information()\n display_disclaimer()\n end", "def label_details; end", "def details=(value)\n @details = value\n end", "def toggle_details \n criteria = session[:toggle_details] ? \"Expanded\" : \"List\"\n link_to_function( \"#{criteria} view\", \"toggle_details()\", :id => 'toggle_details' )\n\tend", "def fetch_details\n\t\tself.send(\"fetch_details_from_#{self.provider.downcase}\")\n\tend", "def details\n raise NotImplementedError, \"Define DETAILS method in child class.\"\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Takes a host to and initializes `task` attribute as an empty array.
def initialize(host=nil) @host = host @tasks = [] end
[ "def initialize # Initialize method that is similar to a Constructor in Java\n @all_tasks = [] # Method includes an array that stores all tasks\n end", "def initialize\n @task_list = []\n end", "def tasksOnHost(filter, host)\n java_import 'java.net.URL'\n java_import 'java.net.SocketException'\n java_import 'java.io.InputStreamReader'\n java_import 'org.apache.hbase.thirdparty.com.google.gson.JsonParser'\n\n infoport = @admin.getClusterMetrics.getLiveServerMetrics.get(host).getInfoServerPort.to_s\n\n begin\n schema = \"http://\"\n url = schema + host.hostname + ':' + infoport + '/rs-status?format=json&filter=' + filter\n json = URL.new(url).openStream\n rescue SocketException => e\n # Let's try with https when SocketException occur\n schema = \"https://\"\n url = schema + host.hostname + ':' + infoport + '/rs-status?format=json&filter=' + filter\n json = URL.new(url).openStream\n end\n\n parser = JsonParser.new\n\n # read and parse JSON\n begin\n tasks_array_list = parser.parse(InputStreamReader.new(json, 'UTF-8')).getAsJsonArray\n ensure\n json.close\n end\n # convert to an array of TaskMonitor::Task instances\n tasks = []\n tasks_array_list.each do |t|\n tasks.unshift Task.new(t.getAsJsonObject, host)\n end\n\n tasks\n end", "def serial_execute hosts, task, args\n hosts.each do |host|\n task2 = task.clone\n task2.target_host = host\n saved_hosts_env = ENV[\"HOSTS\"]\n begin\n ENV[\"HOSTS\"] = host\n Thread.current[:task] = task2\n case block.arity\n when 1 then block.call task2\n else block.call task2, args\n end\n Thread.current[:task] = nil\n ensure\n ENV[\"HOSTS\"] = saved_hosts_env\n end\n end\n end", "def initialize_task\n @task = Task.new\n end", "def tasks=(value)\n @tasks = value\n end", "def tasks\n @tasks ||= Harvest::API::Tasks.new(credentials)\n end", "def initialize_group( task )\n @task = task\n end", "def tasks\n @tasks ||= Thor::CoreExt::OrderedHash.new\n end", "def initialize(task)\n super()\n @task= task \n end", "def get_taskarray\n @task_array\n end", "def tasks=( array )\n @tasks = array\n update_internal_task_lists()\n end", "def create_host!(host)\n @_hosts_created << host.name\n end", "def tasks() []; end", "def tasks\n @tasks ||= Evoke.tasks\n end", "def task_names\n @tasks ? @tasks.split(':') : []\n end", "def push_taskarray(task)\n @task_array << task\n end", "def jeweler_tasks=(_arg0); end", "def hosts=(hosts)\n @host = nil\n @hosts = hosts\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
This method will be called by the Logging framework when it first initializes. Here we require the redis appender code.
def initialize_redis require File.expand_path('../appenders/redis', __dir__) end
[ "def setup\n self.logger.reopen(File.open(File.join(Lokii::Config.root, Lokii::Config.log), \"a\")) if daemon? && self.logger\n self.logger ||= ::Logger.new(File.join(Lokii::Config.root, Lokii::Config.log))\n end", "def setup_redis_log\n as :root do\n create_file! '/etc/rsyslog.d/99-redis.conf', <<-RSYSLOG\n:programname, isequal, \"redis\" #{REDIS_LOG}\n RSYSLOG\n\n create_file! '/etc/logrotate.d/redis', <<-LOGROTATE\n#{REDIS_LOG} {\n weekly\n missingok\n rotate 20\n compress\n delaycompress\n notifempty\n postrotate\n reload rsyslog >/dev/null 2>&1 || true\n endscript\n}\n LOGROTATE\n end\n\n props[:REDIS_LOG] = REDIS_LOG\n end", "def log_startup\n log_environment\n log_dispatcher\n log_app_name\n end", "def configure_log\n\n end", "def initialize\n Logging.setup(Logger::INFO)\n end", "def log_startup\n log_environment\n log_dispatcher\n log_app_name\n end", "def on_app_initializing(_event)\n info \"Initializing Karafka framework #{::Process.pid}\"\n end", "def initialize( )\n ::Logging.init unless ::Logging.initialized?\n\n @name = 'root'\n @appenders = []\n @additive = false\n @caller_tracing = false\n @level = 0\n ::Logging::Logger.define_log_methods(self)\n end", "def initialize\n @logProvider = DefaultLogger.new\n end", "def init(*opts)\n reset!\n @logger = logger_for(BeanStalk::Worker::Config[:log_location])\n if @logger.respond_to?(:formatter=)\n if BeanStalk::Worker::Config[:log_formatter].eql?(:json)\n @logger.formatter = Mixlib::Log::JSONFormatter.new()\n else\n @logger.formatter = Mixlib::Log::Formatter.new()\n end\n end\n @logger.level = Logger.const_get(\n BeanStalk::Worker::Config[:log_level].to_s.upcase)\n @logger\n end", "def on_app_initializing(_event)\n info 'Initializing Karafka framework'\n end", "def initialize\n @logger = LogManager.instance\n end", "def enable_logging\n initialize_logger\n end", "def configure_logging\n @logger = Logging.logger[self]\n end", "def init\n Config.load_yaml\n Log.init\n reset\n end", "def initialize(log_device)\n @logger = Logging.logger[ProcessManager::Config.config[:program_name]]\n @logger.add_appenders(\n Logging.appenders.rolling_file('rolling_file_appender',\n :filename => log_device,\n :age => 'daily',\n :keep => 7,\n :layout => Logging.layouts.pattern(:pattern => '%d %-5l [%c(%p)]: %m\\n')\n )\n )\n if ProcessManager::Config.config[:verbose]\n self.level = 'debug'\n else\n self.level = 'info'\n end\n end", "def configure \n#--{{{\n init_logging\n configurator = Configurator::new self\n configurator.configure\n#--}}}\n end", "def initialize(*args)\n base_initialize(*args)\n init_log!\n self\n end", "def before_run\n logger.info 'Master starting'\n @redis = Antelopes::Redis.new(size: 5).connection\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
helper method for self.highest_pledge to abstract away the process to get highest pledge amount
def highest_pledge_amount pledges.reduce(0) do |highest_pledge_amount, pledge| highest_pledge_amount = pledge.amount > highest_pledge_amount ? pledge.amount : highest_pledge_amount end end
[ "def highest_pledge()\n self.pledges().max_by() { | pledge | pledge.amount() }\n end", "def user_highest_pledge\n self.pledges.empty? ? 0 : self.pledges.map {|pledge| pledge.amount}.max \n end", "def biggest_investment\n max = 0\n funding_rounds.each do |round| \n if round.investment > max\n max = round.investment\n end\n end\n max\n \n end", "def biggest_investment\n funding_rounds.max_by {|funding_round| funding_round.investment}\n end", "def highest_bid\n list_of_bids = bids.order(amount: :desc)\n if list_of_bids.blank? \n return 0\n end\n list_of_bids[0].amount\n end", "def most_money(rideshare)\n highest_earned = 0.0\n high_earner = \"\"\n # call method from #2\n money_made(rideshare).map do |driver, cost|\n if cost > highest_earned\n highest_earned = cost\n high_earner = driver\n end\n end\n return high_earner\nend", "def get_highest_paid(drivers)\n highest_paid = nil\n highest_income = 0\n drivers.each do |driver, rides| \n current_income = get_income(rides)\n if highest_income < current_income\n highest_income = current_income\n highest_paid = driver\n end\n end\n return highest_paid\nend", "def driver_with_most_money(rides)\n driver_with_max_money = total_money_by_driver(rides).max_by {|k, v| v}\n return driver_with_max_money\nend", "def max_buy\r\n max = $game_party.max_item_number(@item) - $game_party.item_number(@item)\r\n buying_price == 0 ? max : [max, money / buying_price].min\r\n end", "def max_reward_price\n @max_reward_price_cache ||= if self.offers_reward?\n result = Money.new(0, self.default_currency)\n self.rewards.visible.each {|reward| result = Money.max(result, reward.price)} # # .convert_to(self.default_currency)\n result\n end\n end", "def biggest_investment\n self.funding_rounds.max_by do |round|\n round.startup.uniq \n end\n end", "def franchise_with_highest_profit\n company = get_company\n no_franchises(company)\n big_earner = company.franchises.max_by { |franchise| franchise.profit }\n puts \"Franchise #{big_earner.id} has the highest profit, having earned $#{big_earner.profit}.\"\n menu\n end", "def get_highest_price\n highest_bid = self.get_highest_bid\n if highest_bid.nil?\n 0\n else\n highest_bid.price\n end\n end", "def output_driver_with_most_money(earned_per_each_driver)\n max_earned = earned_per_each_driver.max_by do |current_driver|\n current_driver[:driver_earned]\n end\n\n puts \"\\nDriver #{max_earned[:driver_id]} earned the most money $#{max_earned[:driver_earned]}.\"\nend", "def highest_volume_price\n self.product.master.volume_prices.where(:currency => order.currency).maximum(:amount)\n end", "def highest_price\n # CARYN SAYS:the way you describe this above is incorrect! Reread the README\n # It should be the integer price for the most expensive listing of this recipe \n Recipe.all.max { |recipe_a, recipe_b| recipe_a.average_price <=> recipe_b.average_price }\n end", "def max_bid\n if bids.count == 0\n return 0\n else\n return self.bids.max_by(&:amount).amount\n end\n end", "def max_buy\n max = $game_party.max_item_number(@item) - $game_party.item_number(@item) \n max_crafts = HudellBazaar::get_max_crafts(@item)\n max = [max, max_crafts].min\n\n buying_price == 0 ? max : [max, money / buying_price].min\n end", "def highest_investment\n count = Hash.new(0)\n self.transactions.each do |transaction|\n count[transaction.user] += transaction.num_of_shares\n end\n count.max_by {|k,v| v}\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Build an instance of BrandedCallInstance
def get_instance(payload) BrandedCallInstance.new(@version, payload, ) end
[ "def new_api_call_builder\n @api_call.new_builder\n end", "def get_instance(payload)\n return CallInstance.new(\n @version,\n payload,\n )\n end", "def get_instance(payload)\n PhoneCallInstance.new(@version, payload, )\n end", "def get_instance(payload)\n CallInstance.new(@version, payload)\n end", "def get_instance(payload)\n ArchivedCallInstance.new(@version, payload)\n end", "def get_instance(payload)\n CurrentCallInstance.new(@version, payload, )\n end", "def initialize(calls = [])\n @calls = calls\n end", "def make_call call, player = nil\n assert_call(call.class)\n # Calls must be distinct.\n raise InvalidCallError, \"#{call.inspect} is invalid\" unless self.valid_call?(call)\n\n self.calls << call\n if self.complete? and not self.passed_out?\n self.contract = Contract.new(self)\n end\n true\n end", "def get_instance(payload)\n BrandedChannelInstance.new(\n @version,\n payload,\n business_sid: @solution[:business_sid],\n brand_sid: @solution[:brand_sid],\n )\n end", "def get_instance(payload)\n CallSummaryInstance.new(@version, payload, call_sid: @solution[:call_sid])\n end", "def get_or_create_call\n @call = Call.find_by(uid: params['CallSid'])\n @call = Call.create(\n uid: params['CallSid'],\n from: params['Caller'],\n ) if @call.nil?\n end", "def get_instance(payload)\n CallSummaryInstance.new(@version, payload, )\n end", "def build_contract\n self.class.contract_klass.new(context.record)\n end", "def get_instance(payload)\n AuthTypeCallsInstance.new(\n @version,\n payload,\n account_sid: @solution[:account_sid],\n domain_sid: @solution[:domain_sid],\n )\n end", "def add(call, call_id = nil, &block)\n unless call.kind_of?(Google::APIClient::Reference)\n call = Google::APIClient::Reference.new(call)\n end\n call_id ||= new_id\n if @calls.assoc(call_id)\n raise BatchError,\n 'A call with this ID already exists: %s' % call_id\n end\n callback = block_given? ? block : @global_callback\n @calls << [call_id, call, callback] \n return self\n end", "def add(call, call_id = nil, &block)\n unless call.kind_of?(Google::APIClient::Reference)\n call = Google::APIClient::Reference.new(call)\n end\n if call_id.nil?\n call_id = new_id\n end\n if @calls.include?(call_id)\n raise BatchError,\n 'A call with this ID already exists: %s' % call_id\n end\n @calls[call_id] = call\n @order << call_id\n if block_given?\n @callbacks[call_id] = block\n elsif @global_callback\n @callbacks[call_id] = @global_callback\n end\n return self\n end", "def makecall\n \n client = Twilio::REST::Client.new(TWILIO_CONFIG['sid'], TWILIO_CONFIG['token'])\n @call = client.account.calls.create(\n :from => TWILIO_CONFIG['from'], # From your Twilio number\n :to => '+19493228496', # To any number\n # Fetch instructions from this URL when the call connects\n :url => 'http://snipit.herokuapp.com/voicein'\n )\n end", "def initialize(&builder)\n @school = School.new\n @contact = ContactDetails.new\n instance_eval(&builder)\n end", "def make(to, from, url, optional_params = {})\n opts = { :To => to, :From => from, :Url => url }.merge(optional_params)\n response = Network.post(['Calls'], opts)\n Call.new(response)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
check if the left child of is greater than parent
def left_child_greater?(array, index) array[left_child(index)] && (left_child(index) > array[index]) end
[ "def check_left\n\t\tunless @left_child.nil?\n\t\t\treturn @left_child.value\n\t\tend\n\tend", "def is_left_child?\n return false if is_root?\n self == parent.left_child\n end", "def is_parent_greater_than_child(index, parent)\n @tree[parent].rating > @tree[index].rating\n end", "def isLeftChild?\r\n return nil if isRoot?\r\n self == parent.leftChild\r\n end", "def left_outer?\n @page <= @left\n end", "def one_shift_greater_than_parent?(node, actual_indent)\n parent_indent = node_indent(node_indent_parent(node)).length\n expected_indent = parent_indent + @indent_width\n expected_indent == actual_indent\n end", "def child\n if (@left.nil? && @right.nil?) || (!@left.nil? && !@right.nil?)\n puts 'not one childed'\n elsif @left.nil?\n @right\n elsif @right.nil?\n @left\n end\n end", "def child_compare(parent)\n\t\tunless parent.nil?\n\t\t\tif parent.left.nil? && parent.right.nil?\n\t\t\t\tnil\n\t\t\telsif parent.right.nil?\n\t\t\t\tif parent.value > parent.left.value\n\t\t\t\t\tswap(parent.left, parent)\n\t\t\t\tend\n\t\t\telse\t\t\n\t\t\t\tif parent.value > parent.left.value || parent.value > parent.right.value\n\t\t\t\t\tif parent.left.value < parent.right.value\n\t\t\t\t\t\tswap(parent.left, parent)\n\t\t\t\t\t\tchild_compare(parent.left)\n\t\t\t\t\telse\n\t\t\t\t\t\tswap(parent.right, parent)\n\t\t\t\t\t\tchild_compare(parent.right)\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\tend", "def asgn_left?\n OP_ASSIGN.include?(parent_type) && parent.node.children.first.equal?(node)\n end", "def right_child?(node_index)\n\t\tnode_index[-1] == '1'\n\tend", "def is_descendant_of?(other)\n left > other.left && left <= other.right\n end", "def fully_left_of(rect)\n @bottom_right.x < rect.top_left.x\n end", "def child_of?(parent); end", "def is_child? (node_label)\n # Get the position in adjacent interval of the label given\n position = get_node_pos(node_label)\n # Check if the position is between left and right bounds\n return (position >= left_tree and position <= right_tree)\n end", "def which_child\n return nil if parent.nil?\n if coordA < parent.coordA\n if coordB > parent.coordB\n 0\n else\n 2\n end\n else\n if coordB > parent.coordB\n 1\n else\n 3\n end\n end\n end", "def left_child\n return nil if self.is_leaf?\n self.children[0]\n end", "def child_compare(parent)\n\t\tunless parent.nil?\n\t\t\tif parent.left.nil? && parent.right.nil?\n\t\t\t\tnil\n\t\t\telsif parent.right.nil?\n\t\t\t\tif parent.rating > parent.left.rating\n\t\t\t\t\tswap(parent.left, parent)\n\t\t\t\tend\n\t\t\telse\t\t\n\t\t\t\tif parent.rating > parent.left.rating || parent.rating > parent.right.rating\n\t\t\t\t\tif parent.left.rating < parent.right.rating\n\t\t\t\t\t\tswap(parent.left, parent)\n\t\t\t\t\t\tchild_compare(parent.left)\n\t\t\t\t\telse\n\t\t\t\t\t\tswap(parent.right, parent)\n\t\t\t\t\t\tchild_compare(parent.right)\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\tend", "def is_ancestor_of?(other)\n other.left > left && other.left <= right\n end", "def left_outer?\n @slice <= @options[:left]\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Below Sets params for admin library id from inherited AdminLibraries class.
def set_library @library = AdminLibrary.friendly.find(params[:admin_library_id]) end
[ "def init_params(params)\n super\n end", "def create_admin_library \n library = AdminLibrary.new(name: \"#{self.name} Archive\", admin_id: self.id)\n library.save\n end", "def model_attributes(_form_params)\n super.tap do |params|\n params[:admin_set_id] = admin_set_id if params[:admin_set_id].blank?\n end\n end", "def libraries_id_array\n staff_member_library_ids = []\n self.library.each do |l|\n staff_member_library_ids << l.id\n end\n staff_member_library_ids\n end", "def edit_book_library_id(select)\n\tshow_all_libraries\n\tprint \"\\nWhich library would you like to call home for this book \"\n\tlibrary_id = gets.chomp.to_i\n\tlibrary_id = verify_library_exists(library_id)\n\tb = Book.find(select)\n\tb.update_attributes(library_id: library_id)\nend", "def set_client_id(opts)\n opts = check_params(opts,[:client_ids])\n super(opts)\n end", "def initialize(distributor_options={})\n @id = distributor_options[\"id\"]\n @company = distributor_options[\"company\"]\n end", "def solr_doc_params(id=nil)\n #Rails.logger.debug(\"Debugging in the solr_doc_params field!#{id}\")\n if params[\"DocId\"]\n id ||= params[\"DocId\"]\n else \n id ||= params[:id]\n end\n puts \"SolrhelperGem solr_doc_params\"\n # add our document id to the document_unique_id_param query parameter\n p = blacklight_config.default_document_solr_params.merge({\n # this assumes the request handler will map the unique id param\n # to the unique key field using either solr local params, the \n # real-time get handler, etc.\n #:id => id # this assumes the document request handler will map the 'id' param to the unique key field\n # URL Escaping does not in fact work - at least not with the whole string with vitroIndividual:.. etc.\n blacklight_config.document_unique_id_param => id\n \n })\n \n p[:qt] ||= blacklight_config.document_solr_request_handler\n #If, instead of document handler, you want to use the regular\n # search handler, the following will work\n #p = blacklight_config.default_document_solr_params.merge({\n #:id => id # this assumes the document request handler will map the 'id' param to the unique key field\n # URL Escaping does not in fact work - at least not with the whole string with vitroIndividual:.. etc.\n # :q => \"id:\" + id\n # })\n #Also, the p[:qt] above would be commented out \n \n p\n end", "def component_initialize\n # Only create and supply a random ID this when a :heading is present\n return unless heading\n new_id = options[:id].nil? ? random_id : options[:id]\n options[:id] = new_id\n end", "def wrapper_options\n return super unless root_widget?\n super.merge :id => component.object_id\n end", "def set_unit_id(opts)\n opts = check_params(opts,[:unit_ids])\n super(opts)\n end", "def set_module_id\n p \"==========\" \n self.module = MODULE.index(self.module)\n end", "def self_and_subclass_param_ids\n subclass_param_ids.insert(0, self_param_id)\n end", "def set_params_id\n unless params[:id]\n if current_user\n params[:id] = current_user.id\n else\n require_user\n end\n end\n end", "def set_configuration_identifier\n super\n end", "def permissions_solr_doc_params(id=nil)\n id ||= params[:id]\n # just to be consistent with the other solr param methods:\n {\n :qt => :permissions,\n :id => id # this assumes the document request handler will map the 'id' param to the unique key field\n }\n end", "def set(*params)\n\t\tparams = params[0] #extract the named-param hash from the array\n\t\tparams.each do |key, val|\n\t\t\tcase key\n\t\t\t\twhen :libs then\n\t\t\t\t\tlibsyms = ensureArray(val)\n\t\t\t\t\tcustomize(:cxxcmp => lambda {|cxxcmp| libsyms.each {|libsym| cxxcmp.addIncludeDirs(ExtlibRegistry::incdirsForLib(libsym))}}) #add a customization for all targets in our dir(s)\n\t\t\t\telse raise \"Raker::set(): unrecognized param #{key}\" \n\t\t\tend\n\t\tend\n\tend", "def permissions_solr_doc_params(id=nil)\n id ||= params[:id]\n # just to be consistent with the other solr param methods:\n {\n :qt => :permissions,\n :id => id # this assumes the document request handler will map the 'id' param to the unique key field\n }\n end", "def common_params\n {\n :apiid => @apiid,\n :apipass => @apipass,\n :rtype => 'json',\n :domainid => @domain_id\n }\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Below Provides params for new library upload record.
def upload_params params.require(:library_upload).permit(:title, :description, :tags, :image, :file) end
[ "def upload_params\n {\n extension: params.require(:extension),\n tarball: params.require(:tarball)\n }\n end", "def create_file_params(ios)\n # multi-doc uploading capabilities, each doc needs to be it's own param\n file_params = {}\n ios.each_with_index do |io,index|\n file_params.merge!(\"file#{index + 1}\" => io)\n end\n file_params\n end", "def s3_upload_params\n Dydra::Client.rpc.call('dydra.repository.upload.params', path)\n end", "def upload_params #:doc:\n params.require(:upload).permit(:binary, :file)\n end", "def get_upload\n study_file_params.to_h['upload']\n end", "def set_upload_attributes\n set_upload_attributes_with_block do |type, direct_upload_url_data, direct_upload_head|\n case(type)\n when :pdf\n self.pdf_file_name = direct_upload_url_data[:filename]\n self.pdf_file_size = direct_upload_head.content_length\n self.pdf_content_type = direct_upload_head.content_type\n self.pdf_updated_at = direct_upload_head.last_modified\n when :doc\n self.doc_file_name = direct_upload_url_data[:filename]\n self.doc_file_size = direct_upload_head.content_length\n self.doc_content_type = direct_upload_head.content_type\n self.doc_updated_at = direct_upload_head.last_modified\n end\n end\n end", "def uploader_options; end", "def create_multipart_upload\n self.s3_object||= Upload.get_s3_object(self.object_key)\n self.multipart_upload = s3_object.multipart_upload(\n content_type: params[:type],\n server_side_encryption: S3BrowserMultipart::Engine.\n config.s3_config[:server_side_encryption]\n )\n self.upload_id = self.multipart_upload.upload_id\n Rails.logger.warn \"Created multipart_upload_id: #{self.upload_id} object_key: #{self.object_key}\"\n end", "def sample_params\n params.require(:sample).permit(:filename, :malz, :hash, :tags_list)\n end", "def uploader_options=(_arg0); end", "def initialize(upload)\n @filename = upload[\"filename\"]\n @url = upload[\"url\"]\n @params = {}\n upload[\"param\"].each do |param|\n @params[param[\"name\"]] = param[\"content\"]\n end\n end", "def to_jq_upload\n {\n \"id\" => read_attribute(:id),\n \"title\" => read_attribute(:title),\n \"description\" => read_attribute(:description),\n \"name\" => read_attribute(:file),\n \"city_id\" => read_attribute(:city_id),\n \"size\" => file.size,\n \"url\" => file.url,\n \"medium_url\" => file.medium.url,\n \"thumbnail_url\" => file.thumb.url,\n \"rollover_url\" => file.rollover.url,\n \"delete_url\" => picture_path(:id),\n \"delete_type\" => \"DELETE\" \n }\n end", "def media_file_params\n params.require(:media_files).permit(:media_type, :ref, :image_file)\n end", "def create\n upload_item params[params[:item_type]], session['username'], params[:alternative_name], params[:tag_name], params[:item_type], params[:details]\n end", "def upload_params(prefix, opts={})\n return if prefix.blank?\n\n expires = (opts[:expires] || 30.minutes).from_now.utc.strftime('%Y-%m-%dT%H:%M:%S.000Z')\n bucket = AmazonHelper.bucket\n acl = (opts[:public] == true) ? 'public-read' : 'private'\n max_filesize = opts[:max_filesize] || 200.megabyte\n\n policy = Base64.encode64(\n \"{'expiration': '#{expires}',\n 'conditions': [\n {'bucket': '#{bucket}'},\n ['starts-with', '$key', '#{prefix}'],\n {'acl': '#{acl}'},\n {'success_action_status': '201'},\n ['content-length-range', 0, #{max_filesize}]\n ]\n }\").gsub(/\\n|\\r/, '')\n\n signature = Base64.encode64(\n OpenSSL::HMAC.digest(\n OpenSSL::Digest::Digest.new('sha1'),\n secret_key,\n policy)\n ).gsub(\"\\n\",\"\")\n\n {\n :key => \"#{prefix}/${filename}\",\n :acl => acl,\n :policy => policy,\n :signature => signature,\n :AWSAccessKeyId => access_key,\n :success_action_status => 201\n }\n end", "def upload_create(project_id, params)\n path = sprintf(\"/api/v2/projects/%s/uploads\", project_id)\n data_hash = {}\n post_body = nil\n \n if params.present?\n unless params.kind_of?(PhraseApp::RequestParams::UploadParams)\n raise PhraseApp::ParamsHelpers::ParamsError.new(\"Expects params to be kind_of PhraseApp::RequestParams::UploadParams\")\n end\n end\n if params.autotranslate != nil\n data_hash[\"autotranslate\"] = (params.autotranslate == true)\n end\n\n if params.branch != nil\n data_hash[\"branch\"] = params.branch\n end\n\n if params.convert_emoji != nil\n data_hash[\"convert_emoji\"] = (params.convert_emoji == true)\n end\n\n if params.file != nil\n post_body = []\n post_body << \"--#{PhraseApp::MULTIPART_BOUNDARY}\\r\\n\"\n post_body << \"Content-Disposition: form-data; name=\\\"file\\\"; filename=\\\"#{File.basename(params.file )}\\\"\\r\\n\"\n post_body << \"Content-Type: text/plain\\r\\n\"\n post_body << \"\\r\\n\"\n post_body << File.read(params.file)\n post_body << \"\\r\\n\"\n end\n\n if params.file_encoding != nil\n data_hash[\"file_encoding\"] = params.file_encoding\n end\n\n if params.file_format != nil\n data_hash[\"file_format\"] = params.file_format\n end\n\n if params.format_options != nil\n params.format_options.each do |key, value|\n data_hash[\"format_options\"][key] = value\n end\n end\n\n if params.locale_id != nil\n data_hash[\"locale_id\"] = params.locale_id\n end\n\n if params.locale_mapping != nil\n params.locale_mapping.each do |key, value|\n data_hash[\"locale_mapping\"][key] = value\n end\n end\n\n if params.mark_reviewed != nil\n data_hash[\"mark_reviewed\"] = (params.mark_reviewed == true)\n end\n\n if params.skip_unverification != nil\n data_hash[\"skip_unverification\"] = (params.skip_unverification == true)\n end\n\n if params.skip_upload_tags != nil\n data_hash[\"skip_upload_tags\"] = (params.skip_upload_tags == true)\n end\n\n if params.tags != nil\n data_hash[\"tags\"] = params.tags\n end\n\n if params.update_descriptions != nil\n data_hash[\"update_descriptions\"] = (params.update_descriptions == true)\n end\n\n if params.update_translations != nil\n data_hash[\"update_translations\"] = (params.update_translations == true)\n end\n\n \n \n reqHelper = PhraseApp::ParamsHelpers::BodyTypeHelper.new(data_hash, post_body)\n rc, err = PhraseApp.send_request(@credentials, \"POST\", path, reqHelper.ctype, reqHelper.body, 201)\n if err != nil\n return nil, err\n end\n \n return PhraseApp::ResponseObjects::Upload.new(JSON.load(rc.body)), err\n end", "def upload_id; end", "def create\n @vendor_upload = VendorUpload.new(vendor_id: @vendor.id, vendor_upload_type_id: vendor_upload_params['vendor_upload_type_id'], expiration_date: vendor_upload_params['expiration_date'], name: vendor_upload_params['name'], notes: vendor_upload_params['notes'])\n\n\n respond_to do |format|\n if @vendor_upload.save\n p @vendor_upload.uploads.create(upload: vendor_upload_params['upload'])\n format.html { redirect_to vendor_url(@vendor_upload.id), notice: 'Vendor upload was successfully created.' }\n format.json { render :show, status: :created, location: @vendor_upload }\n else\n format.html { render :new }\n format.json { render json: @vendor_upload.errors, status: :unprocessable_entity }\n end\n end\n end", "def recording_allowed_params\n\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Counts the number of quote characters in a line, excluding escaped quotes.
def count_quote_chars(line, quote_char) return 0 if line.nil? || quote_char.nil? count = 0 previous_char = '' line.each_char do |char| count += 1 if char == quote_char && previous_char != '\\' previous_char = char end count end
[ "def quotes_word_count\n @quotes.reduce(0) { | quote, num| quote + num.word_count }\n end", "def count_lines string\n string.to_s.scan(/$/).length\n end", "def count_occurances_delimiter(line)\n delimiter_count.keys.each do |key|\n #Count the occurances of delimiter in a line\n total_count_delimiter = line.substr_count(key)\n #count the occurances of delimiter between quotes inside a line to disregard them\n quoted_delimiter_count = getting_contents_of_quoted_values(line).substr_count(key)\n delimiter_count[key] += total_count_delimiter - quoted_delimiter_count\n end\n end", "def line_length(line)\n line.chomp.gsub(/\\e\\[[\\d;]*m/, '').length\n end", "def line_length(line)\n line.chomp.gsub(/\\e\\[[\\d;]*m/, '').length\n end", "def count\n csv_content.count(\"\\n\") - 1\n end", "def countQuotes\n\tFile.foreach('Quotes.csv') {}\n\t$numberofQuotes = $.\n\tputs \"There are currently: #{$numberofQuotes} quotes in the CSV\"\nend", "def quotes_count\n @quotes.count\n end", "def count\n line_count\n end", "def line_count(file)\n # line count\n line_count = 0\n File.open(file) { |f| line_count = f.read.count(\"\\n\") }\n line_count\nend", "def quotes_word_count\n total_words_in_quotes = 0\n for post in quotes\n total_words_in_quotes += post.word_count\n end\n return total_words_in_quotes\n end", "def count_lines(*args)\n args.inject(0) do |sum, el|\n sum + el.count(\"\\n\")\n end\n end", "def line_length(line); end", "def count_escrowed_raw\n return @count_escrowed_raw\n end", "def count_lines identifier\n count = 0\n\n File.open(identifier.to_cache_path) do |file|\n # Process file block-by-block to keep memory usage low.\n while block = file.read(4096)\n count += block.count(\"\\n\")\n end\n end\n\n return count\nend", "def actual_length( string_with_escapes )\n string_with_escapes.to_s.gsub(/\\e\\[\\d{1,2}m/, \"\").length\n end", "def count_lines(file)\n n = 0\n while file.gets\n n += 1\n end\n n\n end", "def paragraph_count(all_lines_from_file)\n all_lines_from_file.split(/\\n\\n/).length\nend", "def char_count\n tokens.join.size\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Ruby equivalent of the Cextension for parse_line parses a single line: either a CSV header and body line quoting rules compared to RFC4180 are somewhat relaxed we are not assuming that quotes inside a fields need to be doubled we are not assuming that all fields need to be quoted (0 is even) works with multichar col_sep if header_size is given, only up to header_size fields are parsed We use header_size for parsing the body lines to make sure we always match the number of headers in case there are trailing col_sep characters in line Our convention is that empty fields are returned as empty strings, not as nil. the purpose of the max_size parameter is to handle a corner case where CSV lines contain more fields than the header. In which case the remaining fields in the line are ignored
def parse_csv_line_ruby(line, options, header_size = nil) return [] if line.nil? line_size = line.size col_sep = options[:col_sep] col_sep_size = col_sep.size quote = options[:quote_char] quote_count = 0 elements = [] start = 0 i = 0 previous_char = '' while i < line_size if line[i...i+col_sep_size] == col_sep && quote_count.even? break if !header_size.nil? && elements.size >= header_size elements << cleanup_quotes(line[start...i], quote) previous_char = line[i] i += col_sep.size start = i else quote_count += 1 if line[i] == quote && previous_char != '\\' previous_char = line[i] i += 1 end end elements << cleanup_quotes(line[start..-1], quote) if header_size.nil? || elements.size < header_size [elements, elements.size] end
[ "def line_parse(validated_line)\n return unless validated_line\n row = validated_line.split(',')\n return unless row.any?\n if @headers.empty?\n @headers = row\n else\n @data_hash.merge!(row_to_hsh(row))\n @valid_rows << @data_hash\n end\n end", "def parse_csv_line(text_line)\n columns = text_line.split(\",\")\n values = columns.each { |x| values << x.strip }\nend", "def parse_headers(row = nil)\n if @headers.nil? # header row\n @headers = case @use_headers # save headers\n # Array of headers\n when Array then @use_headers\n # HBCSV header String\n when String\n self.class.parse_line( @use_headers,\n col_sep: @col_sep,\n row_sep: @row_sep,\n quote_char: @quote_char )\n # first row is headers\n else row\n end\n\n # prepare converted and unconverted copies\n row = @headers if row.nil?\n @headers = convert_fields(@headers, true)\n @headers.each { |h| h.freeze if h.is_a? String }\n\n if @return_headers # return headers\n return self.class::Row.new(@headers, row, true)\n elsif not [Array, String].include? @use_headers.class # skip to field row\n return shift\n end\n end\n\n self.class::Row.new(@headers, convert_fields(row)) # field row\n end", "def parse_tsv_line_by_line( expected_row_size, tsv )\n parsed_data = []\n\n data_by_line = tsv.split(\"\\n\")\n data_by_line.each do |line|\n elements = []\n\n if CSV.const_defined? :Reader\n # Ruby < 1.9 CSV code\n elements = CSV::parse_line( line, \"\\t\" ) || []\n else\n # Ruby >= 1.9 CSV code\n begin\n elements = CSV::parse_line( line, { :col_sep => \"\\t\" } ) || []\n rescue CSV::MalformedCSVError => e\n elements = []\n end\n end\n\n if elements.size == 0\n # This is a bad line (causing the above Exception), try and use split to recover.\n elements = line.split(\"\\t\")\n if line =~ /\\t$/\n # If the last attribute resturn is empty add a nil\n # value to the array as it would have been missed\n # by the split function!\n elements.push(nil)\n end\n\n # Substitute blank strings for nils\n elements.map! do |elem|\n if elem === \"\"\n nil\n else\n elem\n end\n end\n\n # Add a safety clause...\n if elements.size === expected_row_size\n parsed_data.push(elements)\n end\n else\n parsed_data.push(elements)\n end\n end\n\n return parsed_data\n end", "def validate_csv_line( line, record_type )\n schema = record_schema( EdiHelper.current_out_flow, record_type )\n values = line.chomp.split(@field_delimiter)\n schema.each_with_index do |field, index|\n if values[index].nil? || '' == values[index] || '\"\"' == values[index]\n raise EdiValidationError, \"value required for field: #{field.to_s}\" unless field['required'] && 'false' == field['required']\n end\n end\n end", "def test_s_parseAndCreate\n colSize = 8\n csvStr = \"foo,!!!foo!!!,!foo,bar!,!!!!!!,!!,,!\\r!,!\\r\\n!\\nNaHi,!!!Na!!!,!Na,Hi!,!\\r.\\n!,!\\r\\n\\n!,!!!!,!\\n!,!\\r\\n!\".gsub!('!', '\"')\n csvStrTerminated = csvStr + \"\\n\"\n\n myStr = csvStr.dup\n res1 = []; res2 = []\n idx = 0\n col, idx = CSV::parse_row(myStr, 0, res1)\n col, idx = CSV::parse_row(myStr, idx, res2)\n\n buf = ''\n col = CSV::generate_row(res1, colSize, buf)\n col = CSV::generate_row(res2, colSize, buf)\n assert_equal(csvStrTerminated, buf)\n\n parsed = []\n CSV::Reader.parse(csvStrTerminated) do |row|\n parsed << row\n end\n\n buf = ''\n CSV::Writer.generate(buf) do |writer|\n parsed.each do |row|\n\twriter.add_row(row)\n end\n end\n assert_equal(csvStrTerminated, buf)\n\n buf = ''\n CSV::Writer.generate(buf) do |writer|\n parsed.each do |row|\n\twriter << row\n end\n end\n assert_equal(csvStrTerminated, buf)\n end", "def enough_fields?(line)\n ncols = line.split(SPLIT_PATTERN).length\n return true if fmt.include?('T') && ncols >= fmt.length\n return true if ncols == fmt.length\n\n raise(WavefrontCli::Exception::UnparseableInput,\n format('Expected %<expected>s fields, got %<got>s',\n expected: fmt.length,\n got: ncols))\n end", "def parse_blank_line; end", "def parse(input)\n rownum = 0\n @header = nil if first_line_is_header\n lines = input.each_line.to_a\n lines.each do |line|\n line = line.encode('utf-8')\n rownum += 1\n\n next if rownum <= skip_initial_rows\n next if rownum > lines.size - skip_trailing_rows\n\n values = line.chomp.split(separator)\n\n if first_line_is_header and @header.nil?\n @header = values\n next\n end\n\n begin\n @entries << make_entry(values)\n rescue RuntimeError => e\n raise ParseStreamError.new(\"line #{rownum}: #{e.message}\", e, rownum, line)\n end\n end\n end", "def parse_line_for_csv_record(page, start_line, comuna)\r\n page_as_csv = \"\"\r\n last_record = \"\"\r\n (start_line...page.length).each {|i|\r\n print \"\\nline #{i} of #{page.length}\"\r\n if (page[i] =~ /\\s/) != 0\r\n if Comunas.include?(comuna)\r\n if !Comunas[comuna].any? { |c| page[i].strip =~ /#{Regexp.quote(c)}\\s\\s\\s+./} & ((page[i+1] =~ /\\n/) == 0)\r\n name = last_record[0...last_record.index(';')]\r\n remain = last_record[last_record.index(';')...last_record.length]\r\n new_record = name + ' ' + page[i].strip + remain\r\n page_as_csv.gsub!(last_record, new_record)\r\n elsif (!Comunas[comuna].any? { |c| page[i].strip =~ /#{Regexp.quote(c)}\\s\\s\\s+./} | !(Comunas[comuna].any? {|c| page[i].rindex(c).nil? ? false : page[i].index(/(\\d+\\.)?(\\d+\\.)?\\d+-\\S+/) < page[i].rindex(c) }))\r\n string = page[i].strip\r\n counter = i + 1\r\n until ((Comunas[comuna].any? { |c| string =~ /#{Regexp.quote(c)}\\s\\s\\s+./} ) & !(string =~ /(\\d+\\.)?(\\d+\\.)?\\d+-\\S+/).nil? & Comunas[comuna].any? {|c| string.rindex(c).nil? ? false : string.index(/(\\d+\\.)?(\\d+\\.)?\\d+-\\S+/) < string.rindex(c) } ) | (counter == page.length)\r\n if page[counter][0] != \"\\n\"\r\n string += ' ' + page[counter].strip\r\n end\r\n counter += 1\r\n end\r\n last_record = string_to_csv(string, Comunas[comuna])\r\n page_as_csv += last_record\r\n elsif (Comunas[comuna].any? { |c| page[i].include?(c)} ) & !(page[i] =~ /(\\d+\\.)?(\\d+\\.)?\\d+-\\S+/).nil?\r\n last_record = string_to_csv(page[i], Comunas[comuna])\r\n page_as_csv += last_record\r\n end\r\n else\r\n if !page[i].include?(comuna) & ((page[i+1] =~/\\n/) == 0)\r\n name = last_record[0...last_record.index(';')]\r\n remain = last_record[last_record.index(';')...last_record.length]\r\n new_record = name + ' ' + page[i].strip + remain\r\n page_as_csv.gsub!(last_record, new_record)\r\n elsif ((page[i] =~ /#{Regexp.quote(comuna)}\\s\\s\\s+./).nil? | ( page[i].rindex(comuna).nil? ? false : page[i].index(/(\\d+\\.)?(\\d+\\.)?\\d+-\\S+/) > page[i].rindex(comuna)))\r\n string = page[i].strip\r\n counter = i + 1\r\n until (!(string =~ /#{Regexp.quote(comuna)}\\s\\s\\s+./).nil? & !(string =~ /(\\d+\\.)?(\\d+\\.)?\\d+-\\S+/).nil? & (string.rindex(comuna).nil? ? false : string.index(/(\\d+\\.)?(\\d+\\.)?\\d+-\\S+/) < string.rindex(comuna)) ) | (counter == page.length)\r\n if page[counter][0] != \"\\n\"\r\n string += ' ' + page[counter].strip\r\n end\r\n counter += 1\r\n end\r\n last_record = string_to_csv(string, comuna)\r\n page_as_csv += last_record\r\n elsif (page[i].include?(comuna)) & !(page[i] =~ /(\\d+\\.)?(\\d+\\.)?\\d+-\\S+/).nil?\r\n last_record = string_to_csv(page[i], comuna)\r\n page_as_csv += last_record\r\n end\r\n end\r\n end\r\n }\r\n page_as_csv\r\nend", "def process_header_line(line)\n # override in subclass if you like\n end", "def pbGetCsvRecord(rec,lineno,schema)\n record = []\n repeat = false\n start = 0\n if schema[1][0,1]==\"*\"\n repeat = true\n start = 1\n end\n begin\n for i in start...schema[1].length\n chr = schema[1][i,1]\n case chr\n when \"u\"\n record.push(csvPosInt!(rec,lineno))\n when \"v\"\n field = csvPosInt!(rec,lineno)\n raise _INTL(\"Field '{1}' must be greater than 0\\r\\n{2}\",field,FileLineData.linereport) if field==0\n record.push(field)\n when \"i\"\n record.push(csvInt!(rec,lineno))\n when \"U\", \"I\"\n field = csvfield!(rec)\n if field==\"\"\n record.push(nil)\n elsif !field[/^\\d+$/]\n raise _INTL(\"Field '{1}' must be 0 or greater\\r\\n{2}\",field,FileLineData.linereport)\n else\n record.push(field.to_i)\n end\n when \"x\"\n field = csvfield!(rec) \n if !field[/^[A-Fa-f0-9]+$/]\n raise _INTL(\"Field '{1}' is not a hexadecimal number\\r\\n{2}\",field,FileLineData.linereport)\n end\n record.push(field.hex)\n when \"s\"\n record.push(csvfield!(rec))\n when \"S\"\n field = csvfield!(rec)\n record.push((field==\"\") ? nil : field)\n when \"n\" # Name\n field = csvfield!(rec)\n if !field[/^(?![0-9])\\w+$/]\n raise _INTL(\"Field '{1}' must contain only letters, digits, and\\r\\nunderscores and can't begin with a number.\\r\\n{2}\",field,FileLineData.linereport)\n end\n record.push(field)\n when \"N\" # Optional name\n field = csvfield!(rec)\n if field==\"\"\n record.push(nil)\n else\n if !field[/^(?![0-9])\\w+$/]\n raise _INTL(\"Field '{1}' must contain only letters, digits, and\\r\\nunderscores and can't begin with a number.\\r\\n{2}\",field,FileLineData.linereport)\n end\n record.push(field)\n end\n when \"b\"\n record.push(csvBoolean!(rec,lineno))\n when \"e\"\n record.push(csvEnumField!(rec,schema[2+i-start],\"\",FileLineData.linereport))\n end\n end\n break if repeat && rec==\"\"\n end while repeat\n return (schema[1].length==1) ? record[0] : record\nend", "def parse_csv(path)\n puts \"parse csv\"\n FasterCSV.read(path, :headers => true)\n end", "def split_bjobs_output_line(line, num_columns:, jobname_column_idx:)\n values = line.strip.split\n\n if(values.count > num_columns)\n # if the line has more fields than the number of columns, that means one\n # field value has spaces, so it was erroneously split into\n # multiple fields; we assume that is the jobname field, and we will\n # collapse the fields into a single field\n #\n # FIXME: assumes jobname_column_idx is not first or last item\n j = jobname_column_idx\n\n # e.g. if 15 fields and jobname is 7th field\n # values = values[0..5] + [values[6..-9].join(\" \")] + values[-8..-1]\n values = values[0..(j-1)] + [values[j..(j-num_columns)].join(\" \")] + values[(j+1-num_columns)..-1]\n end\n\n values\n end", "def parse_header(raw)\n header = Hash.new([].freeze)\n field = nil\n raw.each_line{|line|\n case line\n when /^([A-Za-z0-9!\\#$%&'*+\\-.^_`|~]+):\\s*(.*?)\\s*\\z/om\n field, value = $1, $2\n field.downcase!\n header[field] = [] unless header.has_key?(field)\n header[field] << value\n when /^\\s+(.*?)\\s*\\z/om\n value = $1\n unless field\n raise HTTPStatus::BadRequest, \"bad header '#{line}'.\"\n end\n header[field][-1] << \" \" << value\n else\n raise HTTPStatus::BadRequest, \"bad header '#{line}'.\"\n end\n }\n header.each{|key, values|\n values.each(&:strip!)\n }\n header\n end", "def test_getCSV_emptyValue\n \n #empty field at last slot\n emptyAtEnd = get_CSV_line(\"Samantha,25,\")\n assert_equal [\"Samantha\",\"25\",], emptyAtEnd\n \n #empty field in middle slot\n emptyAtMid = get_CSV_line(\"Samantha,,92\")\n assert_equal [\"Samantha\",\"\",\"92\"], emptyAtMid\n \n #empty field in first slot (beginning)\n emptyAtBeg = get_CSV_line(\",25,92\")\n assert_equal [\"\",\"25\",\"92\"], emptyAtBeg\n\n #empty field in all slots\n emptyAtAll = get_CSV_line(\",,\") \n assert_equal [], emptyAtAll\n \n end", "def parse_csv_file_content(content)\n deep_compact(CSV.parse(content, col_sep: \"\\t\"))\nend", "def proccess_lines(input)\n CSV.foreach(input, headers: true, header_converters: :symbol) do |row|\n line_parser = LineParser.new(row)\n\n if line_parser.valid?\n output_file << line_parser.to_csv\n elsif !line_parser.empty?\n error_file << line_parser.to_csv\n end\n end\n end", "def split_header; end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
If file has headers, then guesses column separator from headers. Otherwise guesses column separator from contents. Raises exception if none is found.
def guess_column_separator(filehandle, options) skip_lines(filehandle, options) delimiters = [',', "\t", ';', ':', '|'] line = nil has_header = options[:headers_in_file] candidates = Hash.new(0) count = has_header ? 1 : 5 count.times do line = readline_with_counts(filehandle, options) delimiters.each do |d| candidates[d] += line.scan(d).count end rescue EOFError # short files break end rewind(filehandle) if candidates.values.max == 0 # if the header only contains return ',' if line.chomp(options[:row_sep]) =~ /^\w+$/ raise SmarterCSV::NoColSepDetected end candidates.key(candidates.values.max) end
[ "def detect_col_sep\n available_col_seps.each do |col_sep|\n begin\n SmarterCSV.process filename, chunk_size: 1, headers_in_file: false, user_provided_headers: [:timestamp, :value], col_sep: col_sep do |chunk|\n data = chunk.first\n break if data[:timestamp].to_s.include?(';')\n timestamp = UTC.parse(data[:timestamp])\n value = parse_float(data[:value])\n return col_sep if timestamp.present? && value.present?\n break\n end\n rescue\n end\n end\n nil\n end", "def inferred_separator\n SUPPORTED_SEPARATORS.each do |sep|\n return sep if data.scan(sep).length > 0\n end\n\n raise UnknownFormat.new(@path)\n end", "def delimiter\n self.options[:col_sep]\n end", "def determine_encodings!(safe_path, col_sep = nil)\n # delimiter encoding => # FasterCSV encoding string\n supported_encodings = {\n 'UTF-8' => 'bom|utf-8',\n 'Windows-1252' => 'windows-1252:utf-8'\n }\n\n successful_options = nil\n supported_encodings.each do |delimiter_encoding, csv_encoding|\n begin\n options = {\n :col_sep => (col_sep || ',').force_encoding(delimiter_encoding),\n :encoding => csv_encoding\n }\n\n row_num = 0\n # Iterate through the file; if we reach the end, this encoding worked:\n CSVLibrary.foreach(safe_path, options) { |_line| row_num += 1 }\n rescue ArgumentError => e\n next if e.message =~ /invalid byte sequence/ # This encoding didn't work\n raise(e)\n rescue CSVLibrary::MalformedCSVError => e\n description = (col_sep ? col_sep.inspect + ' delimited' : 'CSV')\n\n raise(CSVLibrary::MalformedCSVError, \"Invalid #{description} format \" \\\n \"on row #{row_num + 1} of #{::File.basename(safe_path)}. Original: #{e.message}\")\n end\n\n # We got this far => encoding choice worked:\n successful_options = options\n break\n end\n\n # We tried them all, and none worked:\n unless successful_options\n fail \"None of the encodings #{supported_encodings.values.inspect} were successful!\"\n end\n\n successful_options\n end", "def determine_line_ending(file) \n\t\tdelimiter = nil\n\t\tsaw_cr = false\n\t\tfile.each_byte do |c| \n\t\t\tif c == ?\\n \n\t\t\t\tdelimiter = nil # Let CSV take care of it\n\t\t\t\tbreak\n\t\t\telsif c == ?\\r then\n\t\t\t\tsaw_cr = true\n\t\t\telsif saw_cr then\n\t\t\t\tif c == ?\\n then\n\t\t\t\t\tdelimiter = nil # Let CSV take care of it\n\t\t\t\t\tbreak\n\t\t\t\telse\n\t\t\t\t\tdelimiter = ?\\r\n\t\t\t\t\tbreak\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\n\t\tfile.rewind\n\t\treturn delimiter\n\tend", "def header\n csv.first.split(sep) # returns first element of array, and then it divides string into substrings based on a delimiter, returning an array of these substrings.\n end", "def column_headers(header_style = :grid)\n headers = []\n if header_style == :file\n row = row_dimensions\n (0...column_count).each do |col_num|\n unless @suppress_cols && @suppress_cols[col_num]\n cell = (0...column_dimension_count).map do |col_dim|\n column_members(col_num)[col_dim]\n end\n row << cell.join(':')\n end\n end\n headers << row\n elsif header_style == :grid\n row_spacers = Array.new(row_dimension_count)\n (0...column_dimension_count).each do |col_dim|\n row = row_spacers.dup\n (0...column_count).each do |col_num|\n unless @suppress_cols && @suppress_cols[col_num]\n row << column_members(col_num)[col_dim]\n end\n end\n headers << row\n end\n end\n headers\n end", "def separator_with_most_columns\n counts = count_columns_for_separators\n return if counts.empty?\n\n top_separators = counts.max_by(&:first).last\n top_separators.first if top_separators.length == 1\n end", "def getCSVContent\r\n begin\r\n # use of .lines[2..-1] to remove the header lines of the file\r\n # gsub to replace double quotes by two double quotes (csv formating)\r\n csv_text = File.read(\"../../data/raw/#{@file_name}\", encoding: \"#{@file_encoding}:utf-8\").lines[2..-1].join.gsub(/\\\\\"/,'\"\"')\r\n\r\n rescue\r\n puts \"Wrong file name or encoding\"\r\n return -1\r\n end\r\n #convert csv_text into a csv array with headers\r\n @csv_array = CSV.parse(csv_text, headers: true, header_converters: lambda do |h|\r\n #rename column name\r\n @replacements.each{|replacement| h.gsub!(replacement[0], replacement[1]) }\r\n return h\r\n end\r\n )\r\n end", "def read_column_order_from(fpath)\n result = ColumnOrder.new\n begin\n prev_line = nil\n fpath.open {|inf| inf.each_line do |line|\n if prev_line && /^-+(?:\\|-+)*$/ =~ line.gsub(' ', '')\n result.read_order_from_headers(prev_line)\n break\n end\n prev_line = line\n end}\n rescue Errno::ENOENT\n # No problem\n end\n return result\n end", "def determine_encodings!(safe_path)\n # delimiter encoding => # FasterCSV encoding string\n supported_encodings = {\n 'UTF-8' => 'bom|utf-8',\n 'Windows-1252' => 'windows-1252:utf-8'\n }\n\n successful_options = nil\n supported_encodings.each do |delimiter_encoding, csv_encoding|\n begin\n col_sep = @options['col_sep']\n options = {\n :col_sep => (col_sep || ',').force_encoding(delimiter_encoding),\n :encoding => csv_encoding\n }\n\n row_num = 0\n # Iterate through the file; if we reach the end, this encoding worked:\n CSVLibrary.foreach(safe_path, options) { |_line| row_num += 1 }\n rescue ArgumentError => e\n next if e.message =~ /invalid byte sequence/ # This encoding didn't work\n raise(e)\n rescue CSVLibrary::MalformedCSVError => e\n description = (col_sep ? col_sep.inspect + ' delimited' : 'CSV')\n\n raise(CSVLibrary::MalformedCSVError, \"Invalid #{description} format \" \\\n \"on row #{row_num + 1} of #{::File.basename(safe_path)}. Original: #{e.message}\")\n end\n\n # We got this far => encoding choice worked:\n successful_options = options\n break\n end\n\n # We tried them all, and none worked:\n unless successful_options\n fail \"None of the encodings #{supported_encodings.values.inspect} were successful!\"\n end\n\n successful_options\n end", "def cell_separator(value)\n possibly_header = value.is_a? Symbol\n\n if @headers.empty?\n unless possibly_header\n raise SyntaxError, 'The table must start with a header row of symbols'\n end\n add_header value\n\n elsif cells_empty?\n if possibly_header\n add_header value\n else\n append_cell value\n end\n else\n append_cell value\n end\n\n self\n end", "def skip_headers\n start_of_file? ? (@headers = read_row) : false\n end", "def headers?\n !self.data.strip.each.first.split(column_seperator).any? { |column| Date.parse(column) rescue false }\n end", "def identify_delimiter(filename_or_sample)\n #filename_or_sample input can be either a File or an Array or a string - Return delimiter for File or an Array of strings (if found)\n if filename_or_sample.class == String\n if File::exists?(filename_or_sample)\n current_line_number = 0\n File.foreach(filename_or_sample) do |line|\n count_occurances_delimiter(line)\n current_line_number += 1\n if current_line_number > 3\n break\n end\n end\n else\n # count_occurances_delimiter(filename_or_sample)\n return FileNotFound.new\n end\n return_plausible_delimiter\n elsif filename_or_sample.class == Array\n filename_or_sample.each do |line|\n count_occurances_delimiter(line)\n end\n return_plausible_delimiter\n else\n InvalidInput.new\n end\n end", "def load_scenario_csv_schema_headers\n # rubocop: disable Security/Open\n scenario_csv_schema = open(File.expand_path('../default_reports/schema/scenario_csv_columns.txt', File.dirname(__FILE__)))\n # rubocop: enable Security/Open\n\n scenario_csv_schema_headers = []\n File.readlines(scenario_csv_schema).each do |line|\n l = line.delete(\"\\n\")\n a = l.delete(\"\\t\")\n r = a.delete(\"\\r\")\n scenario_csv_schema_headers << r\n end\n return scenario_csv_schema_headers\n end", "def check4sep (cell, sep, sheetname, row, column)\n if !(cell.nil?) and cell.to_s.count(sep)>0\n puts %|separator found in sheet #{sheetname} : (#{row}, #{column}) : \"#{cell}\"|\n end\nend", "def header_char_for( filename )\n\t\tcase File.extname( filename )\n\t\twhen '.md' then return '#'\n\t\twhen '.rdoc' then return '='\n\t\twhen ''\n\t\t\tif filename == 'Rakefile'\n\t\t\t\treturn '#'\n\t\t\tend\n\t\tend\n\n\t\traise \"Don't know what header character is appropriate for %s\" % [ filename ]\n\tend", "def headerFetcher(filename)\n headers = []\n f = File.open(filename,'r')\n f.each_with_index do |l, i|\n break if i > 0\n headers = l.split(\"\\t\")\n headers[-1] = headers[-1][0..-2]\n end\n f.close\n return headers\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
GET /litigantes GET /litigantes.json
def index @litigantes = Litigante.all end
[ "def show\n @litra = Litra.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @litra }\n end\n end", "def index\n @lesuurs = Lesuur.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @lesuurs }\n end\n end", "def show\n @lieu = Lieu.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @lieu }\n end\n end", "def show\n @leito = Leito.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @leito }\n end\n end", "def index\n @lemurs = Lemur.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @lemurs }\n end\n end", "def index\n @litters = Litter.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @litters }\n end\n end", "def index\n @lieus = Lieu.paginate(page: params[:page], per_page: 20)\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @lieus }\n end\n end", "def show\n @lote = Lote.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @lote }\n end\n end", "def show\n @lei = Lei.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @lei }\n end\n end", "def show\n @lancamento = Lancamento.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @lancamento }\n end\n end", "def index\n @lokasi_atms = LokasiAtm.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @lokasi_atms }\n end\n end", "def index\n @lacs = Lac.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @lacs }\n end\n end", "def show\n @livre = Livre.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @livre }\n end\n end", "def index\n @tuantis = Tuanti.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @tuantis }\n end\n end", "def index\n @lophs = Loph.all\n respond_to do |format|\n format.html\n format.json { render json: @lophs}\n end\n end", "def index\n @conseilles = Conseille.all\n respond_to do |format|\n format.html\n format.json { render json: @conseilles}\n end\n end", "def index\n @villas = Villa.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @villas }\n end\n end", "def index\n @tenures = Tenure.all\n render json: @tenures\n end", "def show\n @loteo = Loteo.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @loteo }\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
POST /litigantes POST /litigantes.json
def create @litigante = Litigante.new(litigante_params) respond_to do |format| if @litigante.save format.html { redirect_to @litigante, notice: 'Litigante was successfully created.' } format.json { render :show, status: :created, location: @litigante } else format.html { render :new } format.json { render json: @litigante.errors, status: :unprocessable_entity } end end end
[ "def create\n @status = [[\"Nouveau\"], [\"en cours de traitement\"], [\"traité\"]]\n @litige_params = user_litige_params\n if admin_signed_in?\n @litige_params = litige_params\n end\n @litige_params = {identifiant: Litige.idenfifiant_generator( 2, 3)}.merge(@litige_params)\n @litige = Litige.new(@litige_params)\n respond_to do |format|\n if @litige.save\n format.html { redirect_to @litige, notice: 'Litige was successfully created.' }\n format.json { render :show, status: :created, location: @litige }\n else\n format.html { render :new, locals:{status: @status} }\n format.json { render json: @litige.errors, status: :unprocessable_entity }\n end\n end\n \n end", "def create\n @leito = Leito.new(params[:leito])\n\n respond_to do |format|\n if @leito.save\n format.html { redirect_to @leito, notice: 'Leito was successfully created.' }\n format.json { render json: @leito, status: :created, location: @leito }\n else\n format.html { render action: \"new\" }\n format.json { render json: @leito.errors, status: :unprocessable_entity }\n end\n end\n end", "def create_allele(a)\n allele_data = request(\n :url => \"alleles.json\",\n :method => \"post\",\n :payload => { :allele => a }.to_json\n )\n allele = JSON.parse(allele_data)[\"allele\"]\n return allele\nend", "def create\n @loteriat = Loteriat.new(loteriat_params)\n\n respond_to do |format|\n if @loteriat.save\n format.html { redirect_to @loteriat, notice: 'Loteriat was successfully created.' }\n format.json { render :show, status: :created, location: @loteriat }\n else\n format.html { render :new }\n format.json { render json: @loteriat.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @lei = Lei.new(params[:lei])\n\n respond_to do |format|\n if @lei.save\n format.html { redirect_to leis_path, notice: \"Lei criada com sucesso.\" }\n format.json { render json: @lei, status: :created, location: @lei }\n else\n format.html { render action: \"new\" }\n format.json { render json: @lei.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @luta = Luta.new(luta_params)\n\n respond_to do |format|\n if @luta.save\n format.html { redirect_to @luta, notice: 'Luta was successfully created.' }\n format.json { render :show, status: :created, location: @luta }\n else\n format.html { render :new }\n format.json { render json: @luta.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @terreno = Terreno.all\n @loteamento = Loteamento.new(loteamento_params)\n\n respond_to do |format|\n if @loteamento.save\n format.html { redirect_to @loteamento, notice: 'Loteamento was successfully created.' }\n format.json { render :show, status: :created, location: @loteamento }\n else\n format.html { render :new }\n format.json { render json: @loteamento.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @lancamento = Lancamento.new(lancamento_params)\n\n respond_to do |format|\n if @lancamento.save\n format.html { redirect_to lancamentos_path, notice: @@titulo + t('msg.salva') }\n format.json { render :show, status: :created, location: @lancamento }\n else\n format.html { redirect_to lancamentos_path, alert: @lancamento.errors.messages }\n format.json { render json: @lancamento.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @estudiante = Estudiante.new(params[:estudiante])\n\n if @estudiante.save\n render json: @estudiante, status: :created, location: @estudiante\n else\n render json: @estudiante.errors, status: :unprocessable_entity\n end\n end", "def create\n @lei = Lei.new(params[:lei])\n\n respond_to do |format|\n if @lei.save\n format.html { redirect_to @lei, notice: 'Lei was successfully created.' }\n format.json { render json: @lei, status: :created, location: @lei }\n else\n format.html { render action: \"new\" }\n format.json { render json: @lei.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @lancamento = Lancamento.new(params[:lancamento])\n\n respond_to do |format|\n if @lancamento.save\n format.html { redirect_to @lancamento, notice: 'Lancamento was successfully created.' }\n format.json { render json: @lancamento, status: :created, location: @lancamento }\n else\n format.html { render action: \"new\" }\n format.json { render json: @lancamento.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @lutador = Lutador.new(lutador_params)\n\n respond_to do |format|\n if @lutador.save\n format.html { redirect_to @lutador, notice: 'Lutador was successfully created.' }\n format.json { render :show, status: :created, location: @lutador }\n else\n format.html { render :new }\n format.json { render json: @lutador.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @tela = Tela.new(tela_params)\n\n respond_to do |format|\n if @tela.save\n format.html { redirect_to @tela, notice: 'Tela was successfully created.' }\n format.json { render :show, status: :created, location: @tela }\n else\n format.html { render :new }\n format.json { render json: @tela.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @lent = Lent.new(lent_params)\n\n respond_to do |format|\n if @lent.save\n format.html { redirect_to @lent, notice: 'Новость добавлена.' }\n format.json { render :show, status: :created, location: @lent }\n else\n format.html { render :new }\n format.json { render json: @lent.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @tecnico = Tecnico.new(params[:tecnico])\n\n respond_to do |format|\n if @tecnico.save\n format.html { redirect_to @tecnico, :notice => 'Tecnico was successfully created.' }\n format.json { render :json => @tecnico, :status => :created, :location => @tecnico }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @tecnico.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @etnia = Etnia.new(params[:etnia])\n\n respond_to do |format|\n if @etnia.save\n format.html { redirect_to @etnia, notice: 'Etnia was successfully created.' }\n format.json { render json: @etnia, status: :created, location: @etnia }\n else\n format.html { render action: \"new\" }\n format.json { render json: @etnia.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @lote = Lote.new(params[:lote])\n\n respond_to do |format|\n if @lote.save\n format.html { redirect_to @lote, notice: 'Lote was successfully created.' }\n format.json { render json: @lote, status: :created, location: @lote }\n else\n format.html { render action: \"new\" }\n format.json { render json: @lote.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @lutein = Lutein.new(lutein_params)\n\n respond_to do |format|\n if @lutein.save\n format.html { redirect_to @lutein, notice: 'Lutein was successfully created.' }\n format.json { render :show, status: :created, location: @lutein }\n else\n format.html { render :new }\n format.json { render json: @lutein.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @lider_treinamento = LiderTreinamento.new(lider_treinamento_params)\n\n respond_to do |format|\n if @lider_treinamento.save\n format.html { redirect_to @lider_treinamento, notice: 'Lider treinamento was successfully created.' }\n format.json { render :show, status: :created, location: @lider_treinamento }\n else\n format.html { render :new }\n format.json { render json: @lider_treinamento.errors, status: :unprocessable_entity }\n end\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
PATCH/PUT /litigantes/1 PATCH/PUT /litigantes/1.json
def update respond_to do |format| if @litigante.update(litigante_params) format.html { redirect_to @litigante, notice: 'Litigante was successfully updated.' } format.json { render :show, status: :ok, location: @litigante } else format.html { render :edit } format.json { render json: @litigante.errors, status: :unprocessable_entity } end end end
[ "def api_patch(path, data = {})\n api_request(:patch, path, :data => data)\n end", "def patch(path, params: {}, headers: {})\n request_json :patch, path, params, headers\n end", "def patch *args\n make_request :patch, *args\n end", "def update\n @leito = Leito.find(params[:id])\n\n respond_to do |format|\n if @leito.update_attributes(params[:leito])\n format.html { redirect_to @leito, notice: 'Leito was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @leito.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @pelicula = Pelicula.find(params[:id])\n @pelicula.update(update_params)\n render json: @pelicula, status: :ok\n end", "def update\n @core_termo_lacre = Core::TermoLacre.find(params[:id])\n\n respond_to do |format|\n if @core_termo_lacre.update_attributes(params[:core_termo_lacre])\n format.html { redirect_to @core_termo_lacre, notice: 'Termo lacre was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @core_termo_lacre.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @loteriat.update(loteriat_params)\n format.html { redirect_to @loteriat, notice: 'Loteriat was successfully updated.' }\n format.json { render :show, status: :ok, location: @loteriat }\n else\n format.html { render :edit }\n format.json { render json: @loteriat.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @livro = Livro.find(params[:id])\n\n respond_to do |format|\n if @livro.update_attributes(params[:livro])\n format.html { redirect_to @livro, :notice => 'Livro was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @livro.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @lit.update(lit_params)\n format.html { redirect_to @lit, notice: 'Lit was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @lit.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @oferta = Oferta.find(params[:id])\n\n respond_to do |format|\n if @oferta.update_attributes(params[:oferta])\n format.html { redirect_to [:admin, @oferta], :notice => 'Exemplo was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @oferta.errors, :status => :unprocessable_entity }\n end\n end\n end", "def rest_edit(path, options={}, &blk)\n callback = Proc.new { |*args|\n @object = yield(*args) or pass\n rest_params.each { |k, v| @object.send :\"#{k}=\", v unless k == 'id' }\n\n return 400, @object.errors.to_json unless @object.valid?\n\n @object.save\n rest_respond @object\n }\n\n # Make it work with `Backbone.emulateHTTP` on.\n put path, &callback\n post path, &callback\n end", "def update\n if signed_in?\n @litra = Litra.find(params[:id])\n\n respond_to do |format|\n if @litra.update_attributes(params[:litra])\n format.html { redirect_to @litra, notice: 'Litra was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @litra.errors, status: :unprocessable_entity }\n end\n end\n end\nend", "def update\n @veiculo = Veiculo.find(params[:id])\n\n respond_to do |format|\n if @veiculo.update_attributes(params[:veiculo])\n format.html { redirect_to @veiculo, :notice => 'Veiculo was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @veiculo.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @kolegij = Kolegij.find(params[:id])\n\n respond_to do |format|\n if @kolegij.update_attributes(params[:kolegij])\n format.html { redirect_to @kolegij, notice: 'Kolegij was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @kolegij.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @lancamento = Lancamento.find(params[:id])\n\n respond_to do |format|\n if @lancamento.update_attributes(params[:lancamento])\n format.html { redirect_to @lancamento, notice: 'Lancamento was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @lancamento.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @tela.update(tela_params)\n format.html { redirect_to @tela, notice: 'Tela was successfully updated.' }\n format.json { render :show, status: :ok, location: @tela }\n else\n format.html { render :edit }\n format.json { render json: @tela.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @vitola.update(vitola_params)\n format.html { redirect_to @vitola, notice: 'Vitola was successfully updated.' }\n format.json { render :show, status: :ok, location: @vitola }\n else\n format.html { render :edit }\n format.json { render json: @vitola.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @lectivo = Lectivo.find(params[:id])\n\n respond_to do |format|\n if @lectivo.update_attributes(params[:lectivo])\n format.html { redirect_to @lectivo, notice: 'Lectivo was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @lectivo.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @livre.update!(livre_params)\n format.html { redirect_to @livre, notice: 'Le livre a été modifié avec succès..' }\n format.json { render :show, status: :ok, location: @livre }\n else\n format.html { render :edit }\n format.json { render json: @livre.errors, status: :unprocessable_entity }\n end\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Destroys the like tied to this user on this likeable model
def unliked_by(user) self.send(self.class.like_label.tableize.to_sym).find_by_user_id(user.id).destroy rescue false end
[ "def destroy\n @user_like = UserLike.find(params[:id])\n @user_like.destroy\n\n respond_to do |format|\n format.html { redirect_to(scaffold_user_likes_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @user_like = UserLike.find(params[:id])\n @user_like.destroy\n\n respond_to do |format|\n format.html { redirect_to user_likes_url }\n format.json { head :no_content }\n end\n end", "def destroy\n\t\tlike = Like.where(secret: Secret.find(params[:id]), user: User.find(session[:user_id]))\n\t\tlike.destroy_all if User.find(session[:user_id]) == current_user\n\t\tredirect_to :back\n\tend", "def unlike!(liking_user)\n this_like = Like.find(:all, :conditions => { :user_id => liking_user.id,\n :post_id => self.id } )\n if (!this_like.nil?)\n if (liking_user != self.user)\n self.user.unlike!\n end\n self.like_count = self.like_count - 1\n self.likes.delete(this_like)\n song.unlike!\n self.save\n logger.debug \"Post :: User #{liking_user.username} unliked post \" <<\n \"#{self.attributes.inspect}\"\n end\n end", "def remove_movie_from_likes\n @user = User.find(params[:user_id])\n binding.pry\n @user.likes.delete(params[:id])\n @user.save\n redirect_to user_path(@user)\n end", "def destroy\n @like_special_part_user_user.destroy\n respond_to do |format|\n format.html { redirect_to like_special_part_user_users_url }\n format.json { head :no_content }\n end\n end", "def unlike!(articulo, like_id)\n articulo.like = articulo.like-1\n articulo.save\n like = self.likes.find_by_id(like_id)\n like.destroy!\n end", "def destroy\n @storylike.destroy\n end", "def like_removed(user_id, like)\n like_modified(user_id, like)\n end", "def remove_likers(likeable)\n self.where(:likeable_type => likeable.class.name.classify).\n where(:likeable_id => likeable.id).destroy_all\n end", "def destroy\n @like = @comment.likes.find(params[:id])\n @like.destroy\n respond_to do |format|\n format.html { redirect_to proposal_comment_likes_path(@proposal, @comment), notice: 'Like was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @post_like = PostLike.find(params[:id])\n @post_like.destroy\n\n respond_to do |format|\n format.html { redirect_to post_likes_url }\n format.json { head :no_content }\n end\n end", "def unfavorite\n turing_machine = TuringMachine.find(params[:id])\n user = User.find(session[:user_id])\n if user.like_ids.include? turing_machine.id\n turing_machine.decrease_users_count\n user.likes.delete turing_machine\n end\n \n respond_to do |format|\n format.html { redirect_to :back }\n format.json { render :json => { 'favorited' => false,'href' => favorite_turing_machine_path(turing_machine), 'users_count' => turing_machine.users_count } }\n end \n end", "def unlike!(photo)\n like = self.likes.find_by_photo_id(photo.id)\n like.destroy!\n end", "def destroy\n @like_system_dislike = LikeSystem::Dislike.find(params[:id])\n @like_system_dislike.destroy\n\n respond_to do |format|\n format.html { redirect_to like_system_dislikes_url }\n format.json { head :ok }\n end\n end", "def destroy\n @likes_comments_by_user.destroy\n respond_to do |format|\n format.html { redirect_to likes_comments_by_users_url, notice: 'Likes comments by user was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @hitcher_like.destroy\n respond_to do |format|\n format.html { redirect_to hitcher_likes_url, notice: 'Hitcher like was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n user = current_user\n item_id = Sneaker.find(params[:id])\n Wishlist.where(sneaker_id: params[:id]).destroy_all\n item_id.destroy\n redirect_to user_path(user)\n end", "def destroy\n @api_v1_like.destroy\n respond_to do |format|\n format.html { redirect_to api_v1_likes_url, notice: 'Like was successfully destroyed.' }\n format.json { head :no_content }\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
matches `[PERCENTAGE] of [NUM]` and `[PERCENTAGE] (off|on) [NUM]`
def parse_percentage_expression regex = /(?<percentage>-?[\d.]+)% (?<operator>(off?|on)) (?<expr>.*$)/ match = @expression.match(regex) if match operator = match.named_captures["operator"] percentage = match.named_captures["percentage"] expr = match.named_captures["expr"] percentage_expr = "#{expr} * #{percentage.to_f/100}" case operator when 'of' @expression = percentage_expr when 'off' @expression = "#{expr} - (#{percentage_expr})" when 'on' @expression = "#{expr} + (#{percentage_expr})" end end end
[ "def percent? = unit == 'percent'", "def percent!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 17)\n\n type = PERCENT\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 359:10: '\\\\%'\n match(?\\%)\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 17)\n\n end", "def percent!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 29 )\n\n type = PERCENT\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 163:11: '%'\n match( 0x25 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 29 )\n\n end", "def percent!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 24 )\n\n type = PERCENT\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 162:11: '%'\n match( 0x25 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 24 )\n\n end", "def percent_of(expected); end", "def match_percentage?(instance)\n percentage = percentage_flags.where(\"flaggable_type = ?\", instance.class.to_s).first.try(:percentage)\n instance.id % 10 < (percentage || 0) / 10\n end", "def analyze_tolerance tolerance, capacitance\n #of form '-20%, +80%' (if 2 matches) or '±2.5%' (if 1 match)\n tolerance_percentage = BigDecimal.new(0)\n matched = false\n tolerance.scan(/(\\d+\\.?\\d*)%/) do |match|\n matched = true\n new_percentage = BigDecimal.new(match[0])\n if new_percentage > tolerance_percentage\n tolerance_percentage = new_percentage\n end\n end\n if matched\n return (tolerance_percentage / 100)\n end\n #of form '±0.05pF', need to do percentage based math with capacitance\n absolute = analyze_capacitance tolerance\n if absolute\n return (absolute / capacitance)\n end\n return nil\n end", "def match_percent_criteria_met?\n (@user[:match_percentage] >= min_match_percentage || (@user[:match_percentage] == 0 && @user[:enemy_percentage] == 0))\n end", "def discount_is_percentage\n return false unless @invoice[:discount]\n\n !!@invoice[:discount][:coupon][:percent_off]\n end", "def validate_percentage\n (0..100).include?(percentage) ? true : raise(InvalidDiscountPercentage)\n end", "def validate_format_percentage(name, value)\n DataTypeValidator.validate name, Float, value, ->(arg) { arg >= 0.0 && arg <= 1.0 }\n end", "def amount_or_percentage\n return if !!amount ^ !!percentage\n\n errors.add(:base, I18n.t('activerecord.errors.models.coupon.amount_or_percentage'))\n end", "def allow_percent_symbols(*args)\n # This assumes you are storing percents as whole numbers 50.0 = 50% \n # rather than .5 = 50% \n add_equal_method PERCENTAGE_CHARS, args\n end", "def percent_off=(val, no_dirty = false)\n attribute_set(:percent_off, val, no_dirty)\n attribute_set(:amount_off, \"\", no_dirty)\n end", "def matches\n @matches = @question.match /(-?\\d+) (plus|minus|divided by|multiplied by) (-?\\d+)/\n # puts \"@matches = #{@matches}\"\n end", "def valid_percent_of_small\n 10.percent_of(20)\n end", "def pos_percentage(pos_symbol)\n (send(pos_symbol.to_s + '_count').to_f / word_count * 100).round(2)\n end", "def percentage_tag(module_matched, total)\r\n\t\tpercentage_to_display = percentage module_matched, total\r\n\t\t\"#{percentage_to_display.round}%\"\r\n\tend", "def percent\n advanced(0)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
reformat any mathmatical functions from lowercase to upper e.g. avg(1,2) > AVG(1,2)
def reformat_math_functions funcs = %w(min max sum avg count round rounddown roundup sin cos tan) regex = /\b(?<func>#{funcs}.join('|'))\((?<expr>[^()]+)\)/ match = @expression.match(regex) if match func = match.named_captures["func"] expr = match.named_captures["expr"] @expression = "#{func.upcase}(#{expr})" end end
[ "def tolower\n set_function_and_argument(:tolower, nil)\n end", "def zebulansNightmare(functionName)\n new_name = functionName.split(\"_\").map!(&:capitalize).join(\"\")\n if new_name[0]\n new_name[0] = new_name[0].downcase\n end\n new_name\nend", "def alphabetize\n\nend", "def upcase!() end", "def fb_to_ar_case(column_name)\n column_name =~ /[[:lower:]]/ ? column_name : column_name.downcase\n end", "def canonicalize_algorithm(algo) #:nodoc:\n algo.to_s.downcase.gsub(/_/,'-')\n end", "def fb_to_ar_case(column_name)\n column_name =~ /[[:lower:]]/ ? column_name : column_name.downcase\n end", "def make_caps(string)\n string.upcase\nend", "def upcase() end", "def do_magic(str)\n str.gsub!(/[^A-Z]/, '').to_s #delete small leters and uprinted sumbols\n str.downcase.to_s #make all leters small\nend", "def solve s\n s.chars.map{|letter| letter.upcase == letter ? 'upper' : 'lower'}.count('upper') > 0.5*s.length ? s.upcase : s.downcase\nend", "def name_upcase\n name.upcase\n end", "def proofread(s)\n s.downcase.gsub(\"ie\",\"ei\").capitalize.gsub(/\\. ([a-z])/, &:upcase)\nend", "def shout(input)\n \"#{input.upcase}\"\nend", "def shout(phrase)\n phrase.upcase\nend", "def lettercaseRatio str\n n_up = 0\n str.split(\"\").each {|i| if i.upcase == i then n_up += 1 end}\n por_up = n_up / str.length.to_f * 100.0\n printf(\"lowercase: %.2f uppercase: %.2f\\n\",100.0 - por_up, por_up)\nend", "def transform_text text, transform\n case transform\n when :uppercase, 'uppercase'\n uppercase_pcdata text\n when :lowercase, 'lowercase'\n lowercase_mb text\n else\n text\n end\n end", "def pre_normalize(text)\n text = text.to_s.downcase\n preprocess = Chronic.translate([:pre_normalize, :preprocess])\n if preprocess.is_a? Proc\n text = preprocess.call(text)\n else\n preprocess.each do |proc|\n text = proc.call(text)\n end\n end\n Chronic.translate([:pre_normalize, :pre_numerize]).each do |sub|\n text.gsub!(*sub)\n end\n text = Chronic::Numerizer.numerize(text)\n Chronic.translate([:pre_normalize, :pos_numerize]).each do |sub|\n text.gsub!(*sub)\n end\n text\n end", "def capitalize!() end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
comments resemble cstyle, singleline statements `//[...]` when commented out, the processed expression will be blank
def detect_comments if @input =~ %r{^\s*[/]{2}} @mode = :comment @expression = '' end end
[ "def comment (comment)\n return \"/** #{comment} */\\n\" \nend", "def single_line_comment\n # //\n if @codes[@pos] == 0x2f and @codes[@pos + 1] == 0x2f\n @pos += 2\n pos0 = @pos\n while (code = @codes[@pos]) and !line_terminator?(code)\n @pos += 1\n end\n return ECMA262::SingleLineComment.new(@codes[pos0...@pos].pack(\"U*\"))\n else\n nil\n end\n end", "def multi_line_comment\n # /*\n if @codes[@pos] == 0x2f and @codes[@pos + 1] == 0x2a\n @pos += 2\n pos0 = @pos\n # */\n while (code = @codes[@pos] != 0x2a) or @codes[@pos + 1] != 0x2f\n raise ParseError.new(\"no `*/' at end of comment\", self) if code.nil?\n @pos += 1\n end\n @pos +=2\n return ECMA262::MultiLineComment.new(@codes[pos0...(@pos-2)].pack(\"U*\"))\n else\n nil\n end\n end", "def comment!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 95)\n\n type = COMMENT\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 533:9: '/*' ( options {greedy=false; } : . )* '*/'\n match(\"/*\")\n # at line 533:14: ( options {greedy=false; } : . )*\n while true # decision 30\n alt_30 = 2\n look_30_0 = @input.peek(1)\n\n if (look_30_0 == ?*) \n look_30_1 = @input.peek(2)\n\n if (look_30_1 == ?/) \n alt_30 = 2\n elsif (look_30_1.between?(0x0000, ?.) || look_30_1.between?(?0, 0xFFFF)) \n alt_30 = 1\n\n end\n elsif (look_30_0.between?(0x0000, ?)) || look_30_0.between?(?+, 0xFFFF)) \n alt_30 = 1\n\n end\n case alt_30\n when 1\n # at line 533:42: .\n match_any\n\n else\n break # out of loop for decision 30\n end\n end # loop for decision 30\n match(\"*/\")\n # --> action\n channel=HIDDEN;\n # <-- action\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 95)\n\n end", "def comments\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 31 )\n\n\n begin\n # at line 132:11: DIV WORD DIV\n match( DIV, TOKENS_FOLLOWING_DIV_IN_comments_1406 )\n match( WORD, TOKENS_FOLLOWING_WORD_IN_comments_1408 )\n match( DIV, TOKENS_FOLLOWING_DIV_IN_comments_1410 )\n\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 31 )\n\n\n end\n\n return \n end", "def comment\n transform_comments(\" / \") { |c| first_sentence(c) }\n end", "def line_comments_option; end", "def multiline_comment!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 50 )\n\n\n\n type = MULTILINE_COMMENT\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 229:21: '/*' ( . )* '*/'\n match( \"/*\" )\n\n # at line 229:26: ( . )*\n while true # decision 7\n alt_7 = 2\n look_7_0 = @input.peek( 1 )\n\n if ( look_7_0 == 0x2a )\n look_7_1 = @input.peek( 2 )\n\n if ( look_7_1 == 0x2f )\n alt_7 = 2\n elsif ( look_7_1.between?( 0x0, 0x2e ) || look_7_1.between?( 0x30, 0xffff ) )\n alt_7 = 1\n\n end\n elsif ( look_7_0.between?( 0x0, 0x29 ) || look_7_0.between?( 0x2b, 0xffff ) )\n alt_7 = 1\n\n end\n case alt_7\n when 1\n # at line 229:26: .\n match_any\n\n else\n break # out of loop for decision 7\n end\n end # loop for decision 7\n\n\n match( \"*/\" )\n\n\n # --> action\n channel = HIDDEN;\n # <-- action\n\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 50 )\n\n\n end", "def consume_line_comments\n comment_lines = []\n # optimized code for shortest execution path\n while !(next_line = get_line).nil?\n if next_line.match(REGEXP[:comment])\n comment_lines << next_line\n else\n unshift_line next_line\n break\n end\n end\n\n comment_lines\n end", "def comment=(*) end", "def comment_ruby_syntax(line)\n if line == \"end\"\n \"#\" + line\n elsif /do$/ =~ line\n line.insert /do$/ =~ line, \"#\"\n else\n line\n end\nend", "def comment(text = '')\n puts \"Warning: Authors should avoid putting two or more adjacent hyphens inside comments.\" if text =~ /--/\n\n conditional = text =~ /\\[if .*\\]/\n\n rawtext \"<!--\"\n rawtext text\n rawtext \">\" if conditional\n\n if block_given?\n rawtext \"\\n\"\n yield\n rawtext \"\\n\"\n end\n\n rawtext \"<![endif]\" if conditional\n rawtext \"-->\\n\"\n end", "def allow_comments?; end", "def comments_before_line(line); end", "def parse_comment_lines\n ruby_omit = false\n rdoc_omit = false\n lines = [\n [0, \"Load #{File.basename(file)} script.\\n\"],\n [0, \"\\n\"],\n [0, \" require '#{file}'\\n\"]\n ]\n index = 1\n File.readlines(file).each do |l|\n case l\n when /^=begin(?!\\s+qed)/\n ruby_omit = true\n when /^=end/\n ruby_omit = false\n when /^\\s*\\#\\-\\-\\s*$/\n rdoc_omit = true\n when /^\\s*\\#\\+\\+\\s*$/\n rdoc_omit = false\n ##when /^\\s*\\#\\ \\-\\-/ # not needed just double comment\n ## # -- skip internal comments\n when /^\\s*##/\n ## skip internal comments\n when /^\\s*\\#/\n lines << [index, l.lstrip.sub(/^\\#\\ ?/, '')] unless (ruby_omit or rdoc_omit)\n else\n lines << [index, \"\\n\"] unless lines.last[1] == \"\\n\" unless (ruby_omit or rdoc_omit)\n end\n index += 1\n end\n lines\n end", "def handle_comment(node)\n if node.type == :erb && node.children.size == 4 &&\n node.children[0]&.type == :indicator && node.children[0].children[0] == '#' &&\n node.children[2]&.type == :code\n\n # Do not continue parsing this node\n comment = node.children[2]\n @comments << ::Parser::Source::Comment.new(comment.location.expression)\n return\n end\n\n node\n end", "def visit_comment(node); end", "def ml_comment!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 40)\n\n type = ML_COMMENT\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 478:4: '/*' ( . )* '*/'\n match(\"/*\")\n # --> action\n if @input.peek(1) == ?* then type = DOC_COMMENT else channel = HIDDEN end \n # <-- action\n # at line 478:88: ( . )*\n loop do #loop 4\n alt_4 = 2\n look_4_0 = @input.peek(1)\n\n if (look_4_0 == ?*) \n look_4_1 = @input.peek(2)\n\n if (look_4_1 == ?/) \n alt_4 = 2\n elsif (look_4_1.between?(0x0000, ?.) || look_4_1.between?(?0, 0xFFFF)) \n alt_4 = 1\n\n end\n elsif (look_4_0.between?(0x0000, ?)) || look_4_0.between?(?+, 0xFFFF)) \n alt_4 = 1\n\n end\n case alt_4\n when 1\n # at line 478:88: .\n match_any\n\n else\n break #loop 4\n end\n end\n match(\"*/\")\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 40)\n\n end", "def comment(message)\n self << \"/* #{message} */\"\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
if input matches `[VAR] = [EXPRESSION]` extract variable name and expression
def process_variable_assignment regex = %r{(?<name>\w+)( = )(?<expression>.*$)} match = @input.match(regex) if match @name = match.named_captures["name"] @expression = match.named_captures["expression"] end end
[ "def extractVariable( *args )\r\n rval = scanVariable( *args ) or return nil\r\n return rval[:match]\r\n end", "def parse_variable_value\n variable = gets.chomp\n return variable.to_i, false if variable.is_number?\n\n is_an_expression = variable.split(' ').length > 1\n if is_an_expression\n # find the variable\n exp_var = NIL\n variable.split(' ').each do |v|\n if @variables.key?(v)\n if !exp_var.nil? && exp_var != @variables[v]\n raise Exception('Cannot have two different variables in the expression!')\n end\n exp_var = @variables[v]\n end\n end\n\n return VariableExpression.new(exp_var, variable), true\n end\n\n unless @variables.key?(variable)\n puts @variables\n @variables.key?(variable)\n raise Exception.new(\"Variable #{variable} does not exist!\")\n end\n return @variables[variable], true\n end", "def expand_variables\n # self.tokens = self\n\n # var_regex = /^[\\w]+$/\n # var_regex = /([\\s\\b])[\\w]+([\\s\\b])/\n\n # expanded = []\n # self.tokens.each do |token|\n # expanded << expand_token(token)\n # end\n # @expression = expanded.join(' ')\n\n @expression = self.tokens.map do |token|\n expand_token(token)\n end.join(' ')\n\n # @expression = @expression.split(' ').map do |token|\n # if !valid_var_name?(token)\n # return token\n # elsif is_var?(token)\n # return get_var(token)\n # else\n # @mode = :invalid\n # @errors << {message: \"invalid variable name\", info: token}\n # return token\n # end\n # end.join(' ')\n end", "def findVars (regexp_string, vars, output)\n r = Regexp.new( regexp_string )\n match_data = r.match( output )\n output = Hash.new\n if (!match_data.nil?)\n matches = match_data.to_a\n matches = matches[1,matches.length]\n matches.each_with_index do |data, index|\n str, var = vars[index]\n saveVar( str, var, data )\n end\n else\n return false\n #no match data\n end\n return true\n end", "def variable_name_expr; end", "def process_expression(expression)\n begin\n parts = expression.split\n\n # Assignment operation\n if parts[0] == \"let\"\n if parts[2] != \"=\"\n puts \"usage: let var_name = 'expression'\"\n return\n end\n var = parts[1]\n sub_expression = parts[3, parts.count - 3]\n value = evaluate(sub_expression)\n if value.nil?\n puts \"You cannot assign this expression to #{var}: #{expression}\"\n else\n @store[var] = value\n puts \"#{var} = #{value}\"\n end\n else\n value = evaluate(parts)\n if value.nil?\n puts \"Your expression could not be processed: #{expression}\" \n else\n puts value\n end\n end\n rescue\n puts \"Your expression could not be processed: #{expression}\"\n end \n end", "def extract_variables(formula)\n f = formula.split('')\n f.map! {|t| is_variable?(t) ? evaluate_variable(t) : t}\n f.join('')\n end", "def variable_expression\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 179 )\n return_value = VariableExpressionReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n variable_expression_start_index = @input.index\n\n root_0 = nil\n\n success = false # flag used for memoization\n\n begin\n # rule memoization\n if @state.backtracking > 0 and already_parsed_rule?( __method__ )\n success = true\n return return_value\n end\n root_0 = @adaptor.create_flat_list\n\n\n # at line 967:2: \n # - - - - - - - rule clean up - - - - - - - -\n return_value.stop = @input.look( -1 )\n\n if @state.backtracking == 0\n\n return_value.tree = @adaptor.rule_post_processing( root_0 )\n @adaptor.set_token_boundaries( return_value.tree, return_value.start, return_value.stop )\n\n end\n success = true\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 179 )\n memoize( __method__, variable_expression_start_index, success ) if @state.backtracking > 0\n\n end\n \n return return_value\n end", "def extract_variables_from_string(string)\n string.split(/ /).map do |defintion|\n match = defintion.match STRING_REG_EX\n next nil unless match\n\n { match[1] => match[2] }\n end.reject(&:nil?)\n end", "def parseVarExpr\n #puts \"parseVarExpr #{token}\"\n nextToken # eat the var.\n varNames = [] # of arrays of 2\n # At least one variable name is required.\n return mismatch(\"identifier after var\") if token != :identifier\n while (1) \n name = @lexer.current_identifier\n nextToken # eat identifier.\n # Read the optional initializer.\n init = nil\n if token == '='\n nextToken # eat the '='.\n return nil unless init = parseExpression\n end\n varNames << [name, init]\n break if (token != ',') # End of var list, exit loop.\n nextToken # eat the ','.\n return mismatch(\"identifier list after var\") if token != :identifier\n end\n # At this point, we have to have 'in'.\n return mismatch(\"'in' keyword after 'var'\") if token != :tok_in\n nextToken # eat 'in'.\n return nil unless body = parseExpression\n return VarExprAST.new(varNames, body)\n end", "def parse_expr\n if @s.scan(/(?=\\w+\\ *=)/)\n parse_assign\n else\n parse_paren or parse_lit or parse_capture or parse_var_ref or parse_global_var_ref or parse_create_node or parse_create_list or parse_call\n end\n end", "def variable_names\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 67 )\n return_value = VariableNamesReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n variable_names_start_index = @input.index\n\n root_0 = nil\n __COMMA516__ = nil\n variable_name515 = nil\n variable_name517 = nil\n\n tree_for_COMMA516 = nil\n\n success = false # flag used for memoization\n\n begin\n # rule memoization\n if @state.backtracking > 0 and already_parsed_rule?( __method__ )\n success = true\n return return_value\n end\n root_0 = @adaptor.create_flat_list\n\n\n # at line 463:4: variable_name ( COMMA variable_name )*\n @state.following.push( TOKENS_FOLLOWING_variable_name_IN_variable_names_2981 )\n variable_name515 = variable_name\n @state.following.pop\n if @state.backtracking == 0\n @adaptor.add_child( root_0, variable_name515.tree )\n end\n # at line 463:18: ( COMMA variable_name )*\n while true # decision 126\n alt_126 = 2\n look_126_0 = @input.peek( 1 )\n\n if ( look_126_0 == COMMA )\n alt_126 = 1\n\n end\n case alt_126\n when 1\n # at line 463:20: COMMA variable_name\n __COMMA516__ = match( COMMA, TOKENS_FOLLOWING_COMMA_IN_variable_names_2985 )\n if @state.backtracking == 0\n\n tree_for_COMMA516 = @adaptor.create_with_payload( __COMMA516__ )\n @adaptor.add_child( root_0, tree_for_COMMA516 )\n\n end\n @state.following.push( TOKENS_FOLLOWING_variable_name_IN_variable_names_2987 )\n variable_name517 = variable_name\n @state.following.pop\n if @state.backtracking == 0\n @adaptor.add_child( root_0, variable_name517.tree )\n end\n\n else\n break # out of loop for decision 126\n end\n end # loop for decision 126\n # - - - - - - - rule clean up - - - - - - - -\n return_value.stop = @input.look( -1 )\n\n if @state.backtracking == 0\n\n return_value.tree = @adaptor.rule_post_processing( root_0 )\n @adaptor.set_token_boundaries( return_value.tree, return_value.start, return_value.stop )\n\n end\n success = true\n\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n return_value.tree = @adaptor.create_error_node( @input, return_value.start, @input.look(-1), re )\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 67 )\n memoize( __method__, variable_names_start_index, success ) if @state.backtracking > 0\n\n end\n \n return return_value\n end", "def do_expression(expr, vars)\n parse_expression(expr).each do |k|\n expr = expr.gsub(\"$(#{k})\", vars[k])\n end\n expr\nend", "def lookup_variable(name, expression)\n distance = @locals[expression]\n return @environment.get_at(distance, name) unless distance.nil?\n return @globals.get(name.lexeme)\n end", "def var_values(var, term)\n term if var == name\n end", "def parse_assign\n if var_name = @s.scan(/[a-z]\\w*/)\n @s.skip(/ *= */)\n expr = expect :parse_expr\n Assign.new Token.new(\"name.var\", var_name), expr\n end\n end", "def substitute_variable var\n subbed = @regexp.source.gsub(/\\:#{var.name}(\\s*)/, \"(#{var.pattern.source})\\\\1\")\n subbed.strip! if subbed\n Regexp.new subbed\n end", "def _get_var(vars, name, single_match: :force)\n dot_split_regexp = /([^.]+)(?:\\.|$)/\n\n var_value = name.to_s\n .scan(dot_split_regexp)\n .map { |match_group| match_group[0] }\n .reject(&:empty?)\n .inject(vars) do |current_objects, name_component|\n\n # Intermediate `current_objects` are lists because every step returns\n # a list.\n # If a step is not-final (i.e. intermediate), we need to be sure there\n # is only one element.\n if current_objects.is_a?(Array)\n if current_objects.length == 1\n current_objects = current_objects.first\n else\n error(\"Variabile name diverges: multiple intermediate paths are \" +\n \"taken (`[#{current_objects}]`).\")\n end\n end\n\n # Fill `next_objects`.\n next_objects = if current_objects.has_key?(name_component)\n # Exact match.\n Array[current_objects[name_component]]\n elsif current_objects.has_key?(name_component.to_sym)\n # Exact match.\n Array[current_objects[name_component.to_sym]]\n else\n # Check if there are elements with key matching\n # `name_component` as regexp.\n # Return `nil` if nothing is found.\n current_objects.select do |k, v|\n k.to_s =~ Regexp.new(/^#{name_component}$/)\n end.values\n end\n\n # Adjust `next_objects`, according to `single_match` argument.\n next_objects = if single_match\n if single_match == :force && next_objects.length != 1\n error(\"Expected a single match for variable \" +\n \"`#{name}`, but instead got \" +\n \"`#{next_objects.length}`\")\n end\n next_objects.first\n else\n next_objects\n end\n\n if next_objects.nil? ||\n (next_objects.is_a?(Array) && next_objects.empty?)\n break nil\n end\n next_objects\n end\n\n # Filter found variable value.\n Fizzy::Filter.apply(var_value)\n end", "def variable_name\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 68 )\n return_value = VariableNameReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n variable_name_start_index = @input.index\n\n root_0 = nil\n identifier518 = nil\n\n\n success = false # flag used for memoization\n\n begin\n # rule memoization\n if @state.backtracking > 0 and already_parsed_rule?( __method__ )\n success = true\n return return_value\n end\n root_0 = @adaptor.create_flat_list\n\n\n # at line 466:4: identifier\n @state.following.push( TOKENS_FOLLOWING_identifier_IN_variable_name_3000 )\n identifier518 = identifier\n @state.following.pop\n if @state.backtracking == 0\n @adaptor.add_child( root_0, identifier518.tree )\n end\n # - - - - - - - rule clean up - - - - - - - -\n return_value.stop = @input.look( -1 )\n\n if @state.backtracking == 0\n\n return_value.tree = @adaptor.rule_post_processing( root_0 )\n @adaptor.set_token_boundaries( return_value.tree, return_value.start, return_value.stop )\n\n end\n success = true\n\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n return_value.tree = @adaptor.create_error_node( @input, return_value.start, @input.look(-1), re )\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 68 )\n memoize( __method__, variable_name_start_index, success ) if @state.backtracking > 0\n\n end\n \n return return_value\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
currently variables are only expanded when surrounded by whitespace or ends of line
def expand_variables # self.tokens = self # var_regex = /^[\w]+$/ # var_regex = /([\s\b])[\w]+([\s\b])/ # expanded = [] # self.tokens.each do |token| # expanded << expand_token(token) # end # @expression = expanded.join(' ') @expression = self.tokens.map do |token| expand_token(token) end.join(' ') # @expression = @expression.split(' ').map do |token| # if !valid_var_name?(token) # return token # elsif is_var?(token) # return get_var(token) # else # @mode = :invalid # @errors << {message: "invalid variable name", info: token} # return token # end # end.join(' ') end
[ "def local_variables() end", "def capture_variables(line)\n noncomment, _ = line.split(\"#\", 2)\n noncomment.scan(/ENV(?:\\[|\\.fetch\\()['\"]([^'\"]+)['\"]/).flatten\n end", "def compile_expand!\n instance_eval \"def expand(vars); \\\"#{expand_code_fragment}\\\"; end\", __FILE__, __LINE__\n end", "def interpolate_variables(raw_command)\n raw_command.scan(/[^\\\\]\\$[-_a-zA-Z]+\\$/).each do |match|\n match = match[0..0] == \"$\" ? match : match[1..(match.size - 1)]\n match.strip!\n raw_command.gsub!(match, matched_variable(match))\n end\n raw_command.gsub(\"\\\\$\", \"$\")\n end", "def substitute_vars(args)\n\n #split breaks a string into an array of component pieces which makes it easier to perform substitutions\n #in the present situation the component pieces are variables and non-variables.\n #whitespace is preserved in this split process\n def split(str)\n return [] if str.nil? or str.empty?\n #The function originally would split out quoted strings which would not be scanned\n# if result=/\\\\[\"']/.match(str) # split out escaped quotes\n# return split(result.pre_match) + [result[0]] + split(result.post_match)\n# end\n# if result=/(([\"'])[^\"']+\\2)/.match(str) #split out legitimately quoted strings\n# return split(result.pre_match) + [result[0]] + split(result.post_match)\n# end\n# if result=/[\"']/.match(str) #split out dangling quotes\n# return split(result.pre_match) + [result[0]] + split(result.post_match)\n# end\n# if result=/\\s+/.match(str) #split on whitespace (this way we can preserve it)\n# return split(result.pre_match) + [result[0]] + split(result.post_match)\n# end\n if result=/[\\\\]?\\$[A-Za-z]\\w*/.match(str) #split on variables\n return split(result.pre_match) + [result[0]] + split(result.post_match)\n end\n return [str] #return what's left\n end\n\n # A variable is something that starts with a $ character followed by a letter then followed zero or more letters or\n # numbers\n # Variable substitution comes from the global singleton GlobalVars\n def substitute(args)\n args.map { |val|\n if result=/^\\$([A-Za-z]\\w*)/.match(val)\n GlobalVars.instance[result[1]]\n else\n val\n end\n }\n end\n\n #Removes the escaping on the $ character which is used to prevent variable substitution\n def unescape(args)\n args.gsub(/\\\\\\$/, '$')\n end\n\n debug(2,:msg=>\"Pre substitution\",:var=>args)\n args=unescape(substitute(split(args)).join)\n debug(2,:var=>args,:msg=>\"Post substitution\")\n\n return args\n end", "def rescope_vars(content, append='_new')\n content.inject([]) do |memo, l|\n # preserve the original data since we need it for diff mode (options[:diff])\n line = l.dup\n\n # replace default[] with @default_new[]\n line.gsub!(/(default|default_unless|override)\\[/, \"@\\\\1#{append}[\")\n\n # replace kernel[] with @kernel[]\n line.gsub!(/(kernel|lsb)\\[/, \"@\\\\1[\")\n\n # replace `platform' variable (not platform node attribute) with @platform\n line.gsub!(/([^'\"])platform/, \"\\\\1@platform\")\n\n # replace platform_family variable with @platform_family\n line.gsub!(/platform_family/, '@platform_family')\n\n # regex to identify node[][]..[] variables\n node_regex = %r{[^'\"](node((\\[['\"][^'\"]+['\"]\\]){1,}))}\n\n while line =~ node_regex\n node = $1 # grab the whole attribute\n keys = $2 # grab all the subkeys\n #q = (node =~ /'/) ? '\"' : \"'\" # choose appropriate quotes to use\n #defanged = q + node + q # surround with quotes\n line.gsub!(/#{Regexp.quote(node)}/, \"@default#{append}#{keys}\")\n end\n memo << line\n end\nend", "def convert_global_assignments(line)\n\t\t\treturn line if remark?(line)\n\t\t\tif line.strip =~ /^\\$([\\w_]+)\\s*\\=\\s*(.*)/\n\t\t\t\tline = \"@cf.site['\"+$1+\"'] = \"+$2\n\t\t\t\t@global_names.push $1\n\t\t\tend\n\t\t\tline\n\t\tend", "def expand s, evars\n return unless s\n\n ns = s.clone\n t = @vars.merge(evars)\n\n for var in t.keys do\n p = t[var]\n if p.respond_to? :call\n p = p.call(var)\n end\n\n ns.gsub!(var, p)\n end\n\n ns\n end", "def variable_name_expr; end", "def set_variables(activity, line_num, file_read)\n val = activity[0].upcase.ord\n return could_not_eval(line_num) unless check_variables([activity[0]])\n if activity[1].nil?\n puts \"Line #{line_num}: operator LET applied to empty stack\"\n error_eval(2, line_num, nil, file_read)\n else\n activity.shift\n store = evaluate_expression(activity, line_num, file_read)\n $user_variables[val - 65] = store unless store.nil?\n end\n end", "def gather_call_parms( line, p )\n\n return if line.length < 2 #empty line, just a \\n\n\n _line = line[7..71]\n\n if _line =~ / USING +.*$/\n $&.split.each do |parm|\n p << parm.tr( \"\\.\", '' )\n end\n else # we've past the USING phrase\n _line.split.each do |parm|\n p << parm.tr(\"\\.\", '')\n end\n end\n print \"DEBUG: gather_call() > #{p}\\n\" \n\n $stmt_ends_in_dot = true if endof_curr_stmt?( _line )\nend", "def expansion(string)\n return string unless string.is_a?(String) # in case of nil\n\n string = string.dup\n vars = string.scan(/:\\w+/) # => [\":ENV\", \":BUILD_DIR\"]\n vars.each do |var|\n string.gsub!(var, var_value(var))\n end\n strip(string)\n end", "def lex_en_expr_variable; end", "def parse_global_assign\n if var_name = @s.scan(/\\$[a-z]\\w*/)\n @s.skip(/ *= */)\n expr = expect :parse_expr\n GlobalAssign.new Token.new(\"name.var\", var_name), expr\n end\n end", "def inline_variables( command )\n\t\tpairs = command.scan( ENV_VARIABLE_REGEX )\n\t\treturn Hash[ *pairs.flatten ]\n\tend", "def insel_content\n tmp_content=File.read(@filename)\n tmp_content.gsub!(/\\$([\\w ]+)(?:\\[(\\d+)\\] *)?(?:\\|\\|([\\w \\.]*))?\\$/){\n variable_name = $1.strip.to_sym\n index = $2\n default = $3\n (index && parameters[variable_name] && parameters[variable_name][index.to_i].to_s) || parameters[variable_name] || default || raise(\"UndefinedValue for #{variable_name} in #{name}\")\n }\n tmp_content.gsub(/i '(.*?)'/){File.read($1)}\n end", "def preprocessor_expand str, definitions\n while definitions.has_key? str\n str = definitions[str].to_s\n end\n str\n end", "def alignAssignments(theLines)\n\n\tsplitAndAlign(theLines, /^(.*?)\\s+= (.*[^\\\\])$/, \"= \");\n\nend", "def extractAssignedVariables(code)\n return code.scan(/(\\$[0-9A-Za-z_]+)(?=\n (\\s*|\\[[^=]*) # Any whitespace, or if this is an array item like $a[], match almost anything\n (\\+=|&=|=[^=]|\\/=|\\*=|\\|=|%=|<<=|>>=|-=|\\^=|\\+\\+|--) # PHP assignment operators\n )/x).map {|result| result[0]}.uniq\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
remove leading and trailing whitespace from expression
def trim_whitespace @expression.strip! end
[ "def rstrip\r\n match = rewrite(/\\s+\\z/)\r\n match ? match[0] : ''\r\n end", "def trim_whitespace!\n replace(trim_whitespace)\n end", "def strip_whitespace!\n replace(self.strip_whitespace)\n end", "def trim_trailing_whitespace; end", "def trim_whitespace; end", "def strip_extra_whitespace\n self.gsub(/\\s+/,' ').strip\n end", "def rstrip!() end", "def rstrip() end", "def start_space_stripper(val)\n return if !val\n return val.gsub(/^\\s+/, \"\")\n end", "def strip\n lambda do |rec, acc|\n acc.collect! do |v|\n # unicode whitespace class aware\n v.sub(/\\A[[:space:]]+/,'').sub(/[[:space:]]+\\Z/, '')\n end\n end\n end", "def end_space_stripper(val)\n return if !val\n return val.gsub(/\\s+$/, \"\")\n end", "def strip_whitespace\n code.gsub!(WHITESPACE_REGEX, ' ')\n\n code\n end", "def remove_trailing_spaces(source)\n for_outstrings_of(source) do |str|\n str.gsub! /\\s+/im, ' '\n str.gsub! /\\s*(\\+|>|\\||~|\\{|\\}|,|\\)|\\(|;|:|\\*)\\s*/im, '\\1'\n str.gsub! /;\\}/, '}'\n str.strip\n end\n end", "def strip!() end", "def strip_whitespace\n code.gsub!(WHITESPACE_REGEX, ' ')\n code.strip!\n\n code\n end", "def without_spaces\n gsub(/\\s/, '')\n end", "def strip_excess_whitespace(code)\n code.gsub(/\\t/, ' ').squeeze(' ')\n end", "def full_strip\n remove(/\\A[^\\p{L}\\p{N}]*|[^\\p{L}\\p{N}]*\\z/)\n end", "def strip_space\n gsub(/:\\s*/, \":\").gsub(/\\n/, \"\").gsub(/\\s+/, \" \").gsub(/(\\/\\*).*?(\\*\\/)/, \"\")\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
if nonblank expression is invalid then mark mode as :invalid
def validate # @expression && !@expression.blank? if !@expression || @expression.blank? return elsif !Calculator.valid?(@expression) @expression = nil @mode = :invalid end end
[ "def invalid!\n @invalid = true\n add_class(:invalid)\n end", "def valid_regex; end", "def allowBlank; end", "def match_regex\n if !self.value.blank? && self.regex\n errors.add(:value, 'is a invalid value') unless self.value =~ Regexp.new(self.regex)\n end\n end", "def setInvalid()\n\t\t@isValid = false\n\tend", "def invalid\n @invalid_fields\n end", "def test_invalid_logic\n owner = people(:valid_person)\n rule = Rule.new(:person_id=>owner.id,\n :rule_name=>\"test\",\n :state=>\"active\",\n :logic=>\"aaaaaand\")\n assert !rule.valid?\n # The logic field should have validation errors\n assert rule.errors[:logic].any?\n end", "def box_invalid\n @text =~ /^The box file you're attempting to add is invalid./\n end", "def validate_regex_validation()\n if self.regex_validation != nil\n begin\n re = Regexp.new( self.regex_validation )\n rescue RegexpError\n errors.add( :regex_validation, \"is invalid.\" )\n end\n end\n end", "def check_username_format\n errors.add(:username, \"is not a valid username\") unless username =~ Handle.validation_regex\n end", "def test_invalid_with_empty_rule_name_or_state_or_logic\n owner = people(:valid_person)\n rule = Rule.new(:person_id => owner.id)\n assert !rule.valid?\n # The rule_name field should have validation errors\n assert rule.errors[:rule_name].any?\n # The state field should have validation errors\n assert rule.errors[:state].any?\n # The logic field should have validation errors\n assert rule.errors[:logic].any?\n end", "def validate!\n # Typically, you just wipe yourself if supplied value not valid, but\n # deriving classes might want to raise an exception\n #\n invalid unless fixed? || valid?\n end", "def complete_expression?(str); end", "def maybe_raise_if_invalid(ast_value); end", "def triple_expression?; false; end", "def investigate_invalid_values!\n invalids = @pattern.split(/,|\\/|\\-/).uniq.collect do |value|\n value unless self.class.allowed_values.to_a.include?(value.upcase)\n end.compact\n invalids.delete(\"*\")\n\n err = nil\n if invalids.include?('') || invalids.include?(' ')\n err = \"#{field_name} field's pattern is invalid, please run:\n '#{self.class}.allowed_values' to know valid values\".squish\n elsif invalids.any?\n err = \"value: '#{invalids.join(', ')}' not allowed for '#{field_name}'\n field, run: '#{self.class}.allowed_values' to know valid values\".squish\n end\n raise self.invalid_field_error_class.new(err) if err\n end", "def assert_not_blank(expression)\n assert_false expression.blank?\n end", "def name_is_valid\n errors.add(:name,'Invalid empty string for name.') unless name_is_valid?\n end", "def be_invalid_with(attribute, *values)\n BeInvalidWith.new(attribute, values)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Determines if the uploaded file should be committed to the blockchain and if so, commits it.
def commit_file if @upload.public? && @upload.file_type.public? && @upload.address.blank? && (@upload.creating? || @upload.failed?) BlockchainCommitJob.perform_later(@upload.id) end end
[ "def commit\n logger.info(\"Begin commit transaction, id: #{id}\")\n\n parts = sync_get_all_parts.map{ |p|\n Part.new(:number => p[:number], :etag => p[:etag])\n }\n @protocol.complete_multipart_upload(\n bucket, object, id, parts, @options[:callback])\n\n File.delete(@cpt_file) unless options[:disable_cpt]\n\n logger.info(\"Done commit transaction, id: #{id}\")\n end", "def committed?\n return false if status.in_wt? or status.in_index?\n files = Git::Commits.list(30).files\n files.include?(file_path)\n end", "def write(filename)\n return false if empty?\n zipfile.commit\n unless filename == self.filename\n FileUtils.cp(self.filename, filename)\n end\n true\n end", "def save\n if !super\n cleanup_file_to_upload\n return false\n else\n return true\n end\n end", "def file_comment?() !commit_file_id.nil? end", "def commit( *files, **options )\n\t\tself.server.run( :commit, *files, **options )\n\t\treturn true\n\tend", "def commit_required?\n super || @manifest.edited?\n end", "def save\n return if @blob.data == content\n repository.store(self, commit_message)\n end", "def committed?\n @state == :committed\n end", "def merge_commit?\n !squash?\n end", "def satisfied?(commit)\n false\n end", "def commit\n if RackCommitStats.config.file_mode?\n @_commit ||= CommitFromEnv.new\n else\n @_commit ||= Commit.new\n end\n\n @_commit\n end", "def committed?()\n return nil\n end", "def commit\n return if !file_path\n\n clean_nil_and_empties\n\n if empty?\n # Delete the file since an empty data store is not useful\n File.delete(file_path) if File.file?(file_path)\n else\n File.open(file_path, \"w\") { |f| f.write(to_json) }\n end\n end", "def commit? \r\n begin\r\n @ndev.rpc.commit_configuration( :check => true ) \r\n rescue => e\r\n return Junos::Ez::rpc_errors( e.rsp )\r\n end\r\n true # commit check OK!\r\n end", "def commit\n return if name.kind_of?(StringIO) || !commit_required?\n\n on_success_replace do |tmp_file|\n ::Zip::OutputStream.open(tmp_file) do |zos|\n @entry_set.each do |e|\n e.write_to_zip_output_stream(zos)\n e.dirty = false\n e.clean_up\n end\n zos.comment = comment\n end\n true\n end\n initialize(name)\n end", "def commit_pending?(path)\n status = \"\"\n Dir.chdir(path) do\n status = `git status 2>/dev/null`\n end\n return status.include?(\"Changes to be committed\") || status.include?(\"Changes not staged for commit\")\n end", "def commit\n File.open(@path, 'r+') {\n |f|\n f.seek(-TagSize, IO::SEEK_END)\n tag = f.read(3)\n if tag != 'TAG'\n\t# Append new tag\n\tf.seek(0, IO::SEEK_END)\n\tf.write('TAG')\n end\n f.write([@songname,@artist,@album, (\"%04d\" % @year), @comment, 0, @tracknum, @genre_id].pack(TAGFORMAT_WRITE))\n }\n end", "def nothing_to_commit?\n @git.status do |file, status|\n return false unless status.empty?\n end\n return true\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Determines if the uploaded file can be retrieved from the blockchain and if so, retrieves it.
def retrieve_file if @upload.public? && @upload.file_type.public? && @upload.address.present? && @upload.success? BlockchainRetrieveJob.perform_later(@upload.id) end end
[ "def retrieve_file(file)\n begin\n @ftp.getbinaryfile(file)\n return true\n rescue Exception => e\n error_message(e)\n return false\n end\n end", "def raw_file_okay?\n File.exists? uploaded_file_path\n end", "def retrieve_file(file)\n begin\n @ftp.getbinaryfile(file)\n return true\n rescue Exception => e\n error_message(e)\n return false\n end\n end", "def check\n # PS: Api#checkfiles throws exception when file cannot be found\n response = @api.checkfiles(@url).first rescue {}\n\n if response[:file_status] == :ok\n @fileid = response[:file_id]\n @filename ||= response[:file_name]\n @filesize = response[:file_size].to_i\n @server_id = response[:server_id]\n @short_host = response[:short_host]\n\n @remote_filename = @filename\n @filename = @local_filename || @remote_filename\n true\n else\n # TODO report errors according to actual file status\n @error = 'File not found'\n false\n end\n end", "def remote_file?\n file? && @remote_file\n end", "def file_allowed?\n @file_allowed\n end", "def fetch_required?\n !(File.exist?(downloaded_file) && digest(downloaded_file, digest_type) == checksum)\n end", "def file_valid?\n record.present? && record.file_attacher.file.present?\n end", "def uploaded?(file, storage_key)\n file&.storage_key == storage_key\n end", "def check_file_existance path=nil,token=nil\n\n # abort(\"#{path} --- #{uId}\")\n headers = {\"Authorization\" => \"Bearer #{token}\", \"Content-Type\" => \"application/json\" }\n uri = URI.parse(\"https://api.dropboxapi.com/2/files/get_metadata\");\n \n http = Net::HTTP.new(uri.host, uri.port)\n http.use_ssl = true\n http.verify_mode = OpenSSL::SSL::VERIFY_NONE # Sets the HTTPS verify mode\n \n request = Net::HTTP::Post.new(uri.request_uri)\n data = {\"path\" => \"#{path}\"}\n jdata = data.to_json\n response = http.post(uri.path, jdata, headers)\n rsp = response.body\n objArray = JSON.parse(rsp)\n if objArray['error_summary']\n response = \"no\"\n else\n response = \"yes\"\n end\n \n return response\n \n end", "def has_file_server\n return @payload.get_path(\"files\"){false}\n end", "def get_uploaded_file(serialized)\n uploaded_file = @shrine_class::UploadedFile.urlsafe_load(serialized)\n not_found! unless uploaded_file.exists?\n uploaded_file\n rescue Shrine::Error # storage not found\n not_found!\n end", "def file_matching_path\n !!container.stored_files.where(file_name: file_name, path: path).first\n end", "def original_file_valid?\n\t\t\tFile.exists? @original_file\n\t\tend", "def stored?(file = self.file)\n uploaded?(file, store_key)\n end", "def check_read(file, bytes); end", "def get_uploaded_file(serialized)\n uploaded_file = shrine_class::UploadedFile.urlsafe_load(serialized)\n not_found! unless uploaded_file.exists?\n uploaded_file\n rescue Shrine::Error # storage not found\n not_found!\n end", "def can_upload?\n self.assigned? || self.error?\n end", "def file_required?\n @file_required\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
PATCH/PUT /markets/1 PATCH/PUT /markets/1.json
def update respond_to do |format| if @market.update(market_params) format.html { redirect_to @market, notice: 'Market was successfully updated.' } format.json { head :no_content } else format.html { render action: 'edit' } format.json { render json: @market.errors, status: :unprocessable_entity } end end end
[ "def update\n @supermarket = Supermarket.find(params[:id]) \n respond_to do |format|\n if @supermarket.update(supermarket_params)\n format.json { render json: @supermarket, status: :ok }\n end\n end\n end", "def update\n @market = Market.find(params[:id])\n if @market.update_attributes(params[:market])\n flash[:notice] = 'Market was successfully updated.'\n end\n respond_with(@market, location: markets_url)\n end", "def update\n @pet = Pet.find(params[:id])\n if @pet.update(pet_params)\n render json: @pet\n else\n render json: @pet.errors, status: :unprocessable_entity\n end\n end", "def update_tenant_circle(args = {}) \n put(\"/tenantcircles.json/#{args[:circleId]}\", args)\nend", "def update\n respond_to do |format|\n if @api_supermarket.update(api_supermarket_params)\n format.json { render :show, status: :ok, location: @api_supermarket }\n else\n format.json { render json: @api_supermarket.errors, status: :unprocessable_entity }\n end\n end\n end", "def put\n request_method('PUT')\n end", "def update\n @seat = Seat.find(params[:id])\n\n if @seat.update(seat_params)\n head :no_content\n else\n render json: @seat.errors, status: :unprocessable_entity\n end\n end", "def update\n spice = Spice.find_by(id: params[:id])\n spice.update(spice_params)\n render json: spice\nend", "def update\n @kit = Kit.find(params[:id])\n\n respond_to do |format|\n if @kit.update_attributes(params[:kit])\n format.html { redirect_to @kit, notice: 'Kit was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @kit.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @kit = Kit.find(params[:id])\n\n respond_to do |format|\n if @kit.update_attributes(params[:kit])\n format.html { redirect_to @kit, notice: 'Kit was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @kit.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @kitten = Kitten.find(params[:id])\n\n respond_to do |format|\n if @kitten.update_attributes(params[:kitten])\n format.html { redirect_to @kitten, notice: 'Kitten was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @kitten.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @ticker = Ticker.find(params[:id])\n\n respond_to do |format|\n if @ticker.update_attributes(params[:ticker])\n format.html { redirect_to @ticker, notice: 'Ticker was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @ticker.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @lift_set = LiftSet.find(params[:id])\n\n if @lift_set.update(lift_set_params)\n head :no_content\n else\n render json: @lift_set.errors, status: :unprocessable_entity\n end\n end", "def update\n respond_to do |format|\n if @admin_market.update(admin_market_params)\n format.html { redirect_to session['previous_url'] || admin_markets_url, notice: 'Mercati è stato aggiornato con successo.' }\n format.json { render :show, status: :ok, location: @admin_market }\n else\n format.html { render :edit }\n format.json { render json: @admin_market.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @kitty = Kitty.find(params[:id])\n\n respond_to do |format|\n if @kitty.update_attributes(params[:kitty])\n format.html { redirect_to @kitty, notice: 'Kitty was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @kitty.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @stumark = Stumark.find(params[:id])\n\n respond_to do |format|\n if @stumark.update_attributes(params[:stumark])\n format.html { redirect_to @stumark, notice: 'Stumark was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @stumark.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @admin_market = Market.find(params[:id])\n\n respond_to do |format|\n if @admin_market.update_attributes(params[:market])\n format.html { redirect_to admin_market_path(@admin_market), notice: 'Market was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render :edit }\n format.json { render json: @admin_market.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @kitten.update(kitten_params)\n format.html { redirect_to @kitten, notice: 'Kitten was successfully updated.' }\n format.json { render :show, status: :ok, location: @kitten }\n else\n format.html { render :edit }\n format.json { render json: @kitten.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @kit.update(kit_params)\n format.html { redirect_to @kit, notice: 'Kit was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @kit.errors, status: :unprocessable_entity }\n end\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }