query
stringlengths
7
9.5k
document
stringlengths
10
1.07M
negatives
listlengths
19
19
metadata
dict
REFERENCE NUMBER Selects the next autogenerated reference number and inserts it into the test data set
def select_auto_reference_nbr(test_data) hide_notifications_bar ref_nbr = select_id_generator_option(reference_nbr_input, reference_nbr_options) logger.info "Selected auto-generated reference number '#{ref_nbr}'" test_data.merge!({CoreUseOfCollectionsData::REFERENCE_NBR.name => ref_nbr}) end
[ "def select_auto_reference_nbr(test_data)\n hide_notifications_bar\n ref_nbr = select_id_generator_option(reference_nbr_input, reference_nbr_options)\n test_data.merge!({ UseOfCollections::REFERENCE_NBR.name => ref_nbr})\n end", "def ref_number\n id + ORDER_NUMBER_START\n end", "def generate_ref\n self.reference = \"ABC-#{id}\"\n self.save\n end", "def store_reference\n 5.times do\n self[:reference] = generate_reference\n return reference unless self.class.where(:reference => reference).first\n end\n\n raise \"Could not generate unique reference for order\"\n end", "def sequence_ref\n\t\t\t\t\traise \"Please define numbering ref.\"\n\t\t\t\tend", "def enter_ref_num(data_set)\n ref_num = data_set[Acquisition::ACQUIS_REF_NUM.name]\n wait_for_element_and_type(acquis_ref_num_input(0), ref_num) if ref_num\n end", "def next_document_number\n self.invoice_number = get_next_number(type) || account.invoice_no_start\n self.save!\n end", "def enter_acquisition_ref_num(data_set)\n acquis_ref_num = data_set[CoreAcquisitionData::ACQUIS_REF_NUM.name]\n logger.debug \"Entering reference number '#{acquis_ref_num}'\"\n ref_num_options_locator = input_options_locator([], CoreAcquisitionData::ACQUIS_REF_NUM.name)\n wait_for_options_and_type(ref_num_input_locator, ref_num_options_locator, acquis_ref_num)\n end", "def set_auto_ref\n # get the auto_ref_count value to use\n self.auto_ref = user.next_auto_ref.to_s\n end", "def assign_seq_num\n self.seq_num = next_seq_num\n end", "def generate_reference\n begin\n ref = SecureRandom.hex(5)\n end while Order.exists?(reference: ref)\n\n ref\n end", "def add_number\n return unless %w[techreport manual].include? @bib.type\n\n did = @bib.docidentifier.detect { |i| i.primary == true }\n did ||= @bib.docidentifier.first\n @item.number = did.id if did\n end", "def assign_invoice_number\n \t\t# wrap in a transaction to prevent race conditions\n Invoice.transaction do\n if draft\n self.number = nil\n elsif self.number.nil?\n self.number = series.next_number\n end\n yield\n end\n end", "def generate_order_number\n record = true\n while record\n random = \"#{ez.env_to_prefix}#{Array.new(9){rand(9)}.join}\"\n record = self.class.find(:first, :conditions => [\"number = ?\", random])\n end\n self.number = random if self.number.blank?\n self.number\n end", "def verify_reference_nbr(test_data)\n verify_values_match(test_data[UseOfCollections::REFERENCE_NBR.name], element_value(reference_nbr_input))\n end", "def enter_conservation_ref_num(data_set)\n conserv_num = data_set[CoreConservationData::CONSERV_NUM.name]\n logger.debug \"Entering reference number '#{conserv_num}'\"\n ref_num_options_locator = input_options_locator([], CoreConservationData::CONSERV_NUM.name)\n wait_for_options_and_type(ref_num_input_locator, ref_num_options_locator, conserv_num)\n end", "def increment_insert_id\n @insert_id = @insert_id.next\n end", "def add_reference_number(opts = {})\n shipment_root << reference_number(opts[:code], opts[:value])\n end", "def enter_valuation_control_ref_num(data_set)\n vc_ref_num = data_set[CoreValuationControlData::VALUE_NUM.name]\n logger.debug \"Entering reference number '#{vc_ref_num}'\"\n ref_num_options_locator = input_options_locator([], CoreValuationControlData::VALUE_NUM.name)\n wait_for_options_and_type(ref_num_input_locator, ref_num_options_locator, vc_ref_num)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Verifies that the reference number matches test data
def verify_reference_nbr(test_data) verify_values_match(test_data[CoreUseOfCollectionsData::REFERENCE_NBR.name], element_value(reference_nbr_input)) end
[ "def verify_reference_nbr(test_data)\n verify_values_match(test_data[UseOfCollections::REFERENCE_NBR.name], element_value(reference_nbr_input))\n end", "def test_check_initial_hash_reference_wrong\n ver = Verifier.new\n\n assert_equal(ver.check_initial_hash_reference(1),false)\n assert_equal(ver.check_initial_hash_reference(-1),false)\n assert_equal(ver.check_initial_hash_reference(1000),false)\n end", "def test_check_initial_hash_reference_zero_correct\n ver = Verifier.new\n\n assert_equal(ver.check_initial_hash_reference(0),true)\n end", "def valid_ref?(ref)\n ref.present? && ref =~ /\\A\\d+\\z/ && ref.length == QRReferenceNumber::LENGTH\n end", "def test_verify_correct_line\n assert_equal \"288d\", @VERIFY.verify_line(@split_line)\n end", "def test_verify_prev_hash_match_valid\n map = create_maps(@full)\n assert_equal true, verify_prev_hash_match(map)\n end", "def test_creditor_reference_fi_checksum\n reference = @tester.creditor_reference('FI','5151418034751285')\n assert reference == 'RF835151418034751285'\n end", "def test_block_number_correct_true\n assert @bv.block_number_correct?(1, 1)\n end", "def test_compute_hash_incorrect\n ver = Verifier.new\n\n refute_equal(ver.compute_hash($block_number,$reference_previous_hash,$transactions,$epoch_time),\"23d2\")\n end", "def test_check_block_num_equal\r\n assert_equal true, @g.check_block_num(1, 1)\r\n end", "def test_two_the_same_numbers\n result = look_and_see(44)\n assert_equal(\"24\",result)\n end", "def test_prev_hash_correct_true\n assert @bv.prev_hash_correct?(0, \"abcd\", \"abcd\")\n end", "def compare_plink(person)\n# print \"#{person} looking for match...\"\n plink_person = person_from_plink_file(person.number.to_i)\n# puts \"Checking comparison\" \n approx_num_different = person.genotype_binary_str(@snps).to_i(2) ^ plink_person.genotype_binary_str(@snps).to_i(2)\n print \"#{person} \"\n if 0 == approx_num_different then\n puts \"matches\"\n else\n puts \"mismatch\"\n end\n # puts \"#{person} #{approx_num_different}\"\n # puts \"\"\n # puts person.genotype_binary_str(@snps)\n # puts plink_person.genotype_binary_str(@snps)\n # puts \"\"\n end", "def test_verify_correct_line2\n split_line = [\"0\", \"0\", \"SYSTEM>569274(100)\", \"1553184699.650330000\", \"f311\"]\n assert_equal \"f311\", @VERIFY.verify_line(split_line)\n end", "def test_incorrect_set_numbers\n sample1 = Card.new(1, 'diamond', 'solid', 'green')\n sample2 = Card.new(2, 'oval', 'striped', 'red')\n sample3 = Card.new(2, 'squiggle', 'open', 'purple')\n input1 = [sample1, sample2, sample3]\n out1 = CheckerSet.new\n assert_equal(false, out1.match?(input1))\n end", "def checkMatchNum()\n\t\t#Uses methods to check if the last and forth from last cards have equal numbers.\n\t\tif getLast(@hand).equalNum(getForthLast(@hand).getNum())\n\t\t\treturn true\n\t\telse\n\t\t\treturn false\n\t\tend\n\tend", "def test_validity_of_examples\n @dfa_examples.each do |e|\n assert_equal(true, e.deterministic?)\n end\n @nfa_examples.each do |e|\n assert_equal(false, e.deterministic?)\n end\n end", "def verify_note(test_data)\n verify_values_match(test_data[CoreUseOfCollectionsData::NOTE.name], element_value(note_text_area))\n end", "def verify_note(test_data)\n verify_values_match(test_data[UseOfCollections::NOTE.name], element_value(note_text_area))\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
PROJECT ID Selects the autogenerated project ID and returns it. If a test data set is given, inserts the ID.
def select_auto_project_id(test_data=nil) hide_notifications_bar id = select_id_generator_option(project_id_input, project_id_options) logger.info "Selected auto-generated project ID '#{id}'" test_data.merge!({CoreUseOfCollectionsData::PROJECT_ID.name => id}) if test_data id end
[ "def select_auto_project_id(test_data=nil)\n hide_notifications_bar\n id = select_id_generator_option(project_id_input, project_id_options)\n test_data.merge!({ UseOfCollections::PROJECT_ID.name => id}) if test_data\n id\n end", "def project_id=(value)\n @project_id = value\n end", "def project_id\n project_id = @options[:project_id] || Git.get_config(KEY_PROJECT_ID, :inherited)\n\n if project_id.empty?\n project_id = choose do |menu|\n menu.prompt = 'Choose project associated with this repository: '\n\n PivotalTracker::Project.all.sort_by { |project| project.name }.each do |project|\n menu.choice(project.name) { project.id }\n end\n end\n\n Git.set_config(KEY_PROJECT_ID, project_id, :local)\n puts\n end\n\n project_id\n end", "def project_id\n @project_id ||= stashed_param(self.class.id_at)\n end", "def select_project_id(test_data)\n hide_notifications_bar\n logger.info \"Entering project ID '#{test_data[CoreUseOfCollectionsData::PROJECT_ID.name]}'\"\n wait_for_options_and_select(project_id_input, project_id_options, test_data[CoreUseOfCollectionsData::PROJECT_ID.name])\n end", "def project_id\n data[:project_id]\n end", "def add_project_id\n return unless !persisted? && submitted_for\n temp = submitted_for.projects.select(:id).where(assignment_id: pa_form.assignment.id)\n self.project_id = temp[0].id unless temp.empty?\n end", "def project_id\n project_id = Util::Git.get_config KEY_PROJECT_ID, :inherited\n\n if project_id.empty?\n project_id = choose do |menu|\n menu.prompt = 'Choose project associated with this repository: '\n\n client = TrackerApi::Client.new(:token => api_token)\n\n client.projects.sort_by { |project| project.name }.each do |project|\n menu.choice(project.name) { project.id }\n end\n end\n\n Util::Git.set_config KEY_PROJECT_ID, project_id, :local\n puts\n end\n\n project_id\n end", "def set_ProjectID(value)\n set_input(\"ProjectID\", value)\n end", "def project_id\n @gapi[\"datasetReference\"][\"projectId\"]\n end", "def project_id\n @gapi.dataset_reference.project_id\n end", "def project_id project_name\n if @version < \"1.0\"\n project = projects.find { |project| project[:name] == project_name }\n else\n project = project_by_name(project_name).first\n end\n\n project.nil? ? nil : project[:id].to_i\n end", "def project_id\n @project.id\n end", "def project_id(project_key)\n project_info(project_key)['id']\n end", "def project_id; end", "def get_current_project_id\n @project.id rescue nil\n end", "def project_id\n config_filename('.project_id', 'the Pivotal tracker project id')\n end", "def project_id\n service.project\n end", "def project_id\n return @project_id unless @project_id.nil?\n project_number = self.project_number || @workforce_pool_user_project\n\n # if we missing either project number or scope, we won't retrieve project_id\n return nil if project_number.nil? || @scope.nil?\n\n url = \"#{CLOUD_RESOURCE_MANAGER}#{project_number}\"\n response = connection.get url do |req|\n req.headers[\"Authorization\"] = \"Bearer #{@access_token}\"\n req.headers[\"Content-Type\"] = \"application/json\"\n end\n\n if response.status == 200\n response_data = MultiJson.load response.body, symbolize_names: true\n @project_id = response_data[:projectId]\n end\n\n @project_id\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Enters a project ID per a given set of test data
def select_project_id(test_data) hide_notifications_bar logger.info "Entering project ID '#{test_data[CoreUseOfCollectionsData::PROJECT_ID.name]}'" wait_for_options_and_select(project_id_input, project_id_options, test_data[CoreUseOfCollectionsData::PROJECT_ID.name]) end
[ "def select_auto_project_id(test_data=nil)\n hide_notifications_bar\n id = select_id_generator_option(project_id_input, project_id_options)\n test_data.merge!({ UseOfCollections::PROJECT_ID.name => id}) if test_data\n id\n end", "def select_auto_project_id(test_data=nil)\n hide_notifications_bar\n id = select_id_generator_option(project_id_input, project_id_options)\n logger.info \"Selected auto-generated project ID '#{id}'\"\n test_data.merge!({CoreUseOfCollectionsData::PROJECT_ID.name => id}) if test_data\n id\n end", "def verify_project_id(test_data)\n verify_values_match(test_data[UseOfCollections::PROJECT_ID.name], element_value(project_id_input))\n end", "def verify_project_id(test_data)\n verify_values_match(test_data[CoreUseOfCollectionsData::PROJECT_ID.name], element_value(project_id_input))\n end", "def set_ProjectID(value)\n set_input(\"ProjectID\", value)\n end", "def project_by_name project_name\n args = { :testprojectname => project_name }\n make_call('tl.getTestProjectByName', args, \"1.0\")\n end", "def project_id=(value)\n @project_id = value\n end", "def add_project_id\n return unless !persisted? && submitted_for\n temp = submitted_for.projects.select(:id).where(assignment_id: pa_form.assignment.id)\n self.project_id = temp[0].id unless temp.empty?\n end", "def create_runs()\n print \"\\n07) Creating runs...\\n\"\n @all_runs = Array.new\n @all_run_ids = Array.new\n\n #---------------------------------------------------------\n suite_id = @all_suites[0]['id']\n print \"\\tcreating 1 run in suite_id '#{suite_id}' with all test cases which are in the same suite...\\n\"\n uri = \"add_run/#{@new_project['id']}\"\n fields = { 'suite_id' => suite_id,\n 'name' => 'My test run name 1',\n 'description' => 'My test run description 1',\n 'milestone_id' => nil,\n 'assigned_to' => nil,\n 'include_all' => true,\n 'case_ids' => nil}\n begin\n new_run = @tr_con.send_post(uri, fields)\n # Returns:\n\t\t#\t\t{\"id\"=>200,\n\t\t#\t\t \"suite_id\"=>266,\n\t\t#\t\t \"name\"=>\"My test run name\",\n\t\t#\t\t \"description\"=>\"My test run description\",\n\t\t#\t\t \"milestone_id\"=>nil,\n\t\t#\t\t \"assignedto_id\"=>nil,\n\t\t#\t\t \"include_all\"=>true,\n\t\t#\t\t \"is_completed\"=>false,\n\t\t#\t\t \"completed_on\"=>nil,\n\t\t#\t\t \"config\"=>nil,\n\t\t#\t\t \"config_ids\"=>[],\n\t\t#\t\t \"passed_count\"=>0,\n\t\t#\t\t \"blocked_count\"=>0,\n\t\t#\t\t \"untested_count\"=>8,\n\t\t#\t\t \"retest_count\"=>0,\n\t\t#\t\t \"failed_count\"=>0,\n\t\t#\t\t \"custom_status1_count\"=>0,\n\t\t#\t\t \"custom_status2_count\"=>0,\n\t\t#\t\t \"custom_status3_count\"=>0,\n\t\t#\t\t \"custom_status4_count\"=>0,\n\t\t#\t\t \"custom_status5_count\"=>0,\n\t\t#\t\t \"custom_status6_count\"=>0,\n\t\t#\t\t \"custom_status7_count\"=>0,\n\t\t#\t\t \"project_id\"=>95,\n\t\t#\t\t \"plan_id\"=>nil,\n\t\t#\t\t \"created_on\"=>1438630002,\n\t\t#\t\t \"created_by\"=>1,\n\t\t#\t\t \"url\"=>\"https://tsrally.testrail.com/index.php?/runs/view/200\"}\n rescue Exception => ex\n print \"EXCEPTION occurred on TestRail API 'send_post(#{uri}, #{fields})':\\n\"\n print \"\\t#{ex.message}\\n\"\n raise UnrecoverableException.new(\"\\tFailed to create a new run in the suite\", self)\n end\n print \"\\trun id:'#{new_run['id']}' name:'#{new_run['name']}\\n\"\n @all_runs.push(new_run)\n @all_run_ids.push(new_run['id'])\n\n #---------------------------------------------------------\n suite_id = @all_suites[1]['id']\n print \"\\tcreating 1 run in suite_id '#{suite_id}' with 2 test cases which are in the same suite...\\n\"\n uri = \"add_run/#{@new_project['id']}\"\n fields = { 'suite_id' => suite_id,\n 'name' => 'My test run name 2',\n 'description' => 'My test run description 2',\n 'milestone_id' => nil,\n 'assigned_to' => nil,\n 'include_all' => false,\n 'case_ids' => [@all_cases[2]['id'],@all_cases[3]['id']]}\n begin\n new_run = @tr_con.send_post(uri, fields)\n rescue Exception => ex\n print \"EXCEPTION occurred on TestRail API 'send_post(#{uri}, #{fields})':\\n\"\n print \"\\t#{ex.message}\\n\"\n raise UnrecoverableException.new(\"\\tFailed to create a new run in the suite\", self)\n end\n print \"\\trun id:'#{new_run['id']}' name:'#{new_run['name']}\\n\"\n @all_runs.push(new_run)\n @all_run_ids.push(new_run['id'])\n\n return\nend", "def harvest_project_id(proj_name)\n @harvest_projects.each do |project|\n return project['id'] if project['name'].downcase.to_s == proj_name.to_s\n end\n end", "def select_project(array)\n return array.first['id'] if array.size == 1\n\n str_format = \"\\n %#{array.count.to_s.size}s: %s\"\n question = set_color \"\\nWhich project should I use?\", :yellow\n answers = {}\n\n array.sort_by { |e| e['name'] }.each_with_index do |project, index|\n i = (index + 1).to_s\n answers[i] = project['id']\n question << format(str_format, i, project['name'])\n end\n\n puts question\n reply = ask(\"> \").to_s\n if answers[reply]\n answers[reply]\n else\n say \"Not a valid selection, I'm out of here!\", :red\n exit 1\n end\n end", "def create_project(name, description, client)\n\n\tproject = create(:project,\n\tname: name,\n\tstart_date: Date.today - rand(0..60),\n\tdeadline: Date.today + rand(30..60), \n\tdescription: description,\n\tclient: client)\n\n\t# Assign all three users to the project\n\tproject.users << User.all\n\n\t# Tasks\n\tget_tasks.each do |task|\n\t\tcreate(:project_task, project: project, task: task, hours_planned: rand(10..120))\n\tend\n\t\n\t# Some inputs\n\tinputs_count = rand(20..40)\n\tinputs_count.times do\n\t\tproject_task = project.project_tasks.all.sample\n\t\tuser = User.all.sample\n\t\tinput_date = Date.today - rand(-2..20)\n\t\thours = rand(1..24)\n\n\t\tcreate(:input, project_task: project_task, user: user , input_date: input_date, hours: hours)\n\tend\nend", "def project_id\n project_id = Util::Git.get_config KEY_PROJECT_ID, :inherited\n\n if project_id.empty?\n project_id = choose do |menu|\n menu.prompt = 'Choose project associated with this repository: '\n\n client = TrackerApi::Client.new(:token => api_token)\n\n client.projects.sort_by { |project| project.name }.each do |project|\n menu.choice(project.name) { project.id }\n end\n end\n\n Util::Git.set_config KEY_PROJECT_ID, project_id, :local\n puts\n end\n\n project_id\n end", "def project_id\n project_id = @options[:project_id] || Git.get_config(KEY_PROJECT_ID, :inherited)\n\n if project_id.empty?\n project_id = choose do |menu|\n menu.prompt = 'Choose project associated with this repository: '\n\n PivotalTracker::Project.all.sort_by { |project| project.name }.each do |project|\n menu.choice(project.name) { project.id }\n end\n end\n\n Git.set_config(KEY_PROJECT_ID, project_id, :local)\n puts\n end\n\n project_id\n end", "def set_Project(value)\n set_input(\"Project\", value)\n end", "def get_project_info()\n print \"\\n--------------------------------------------------------\\n\"\n print \"04) Searching for project: '#{$my_testrail_project}'...\\n\"\n my_proj_info = nil\n\n # First, get all projects...\n uri = 'get_projects'\n begin\n all_PROJECTs = @tr_con.send_get(uri)\n rescue Exception => ex\n print \"\\tEXCEPTION occurred on TestRail API 'send_get(#{uri})':\\n\"\n print \"\\t#{ex.message}\\n\"\n print \"\\tFailed to get information about all TestRail projects'\\n\"\n exit ERR_EXIT_GETPROJS\n end\n\n # Try to find our desired project from the list of projects...\n if all_PROJECTs.length > 0\n all_PROJECTs.each do |item|\n if item['name'] == $my_testrail_project\n @tr_proj_info = item\n break\n end\n end\n else\n print \"ERROR: No projects found in TestRail.'\\n\"\n exit ERR_EXIT_NOPROJS\n end\n if @tr_proj_info == nil\n print \"ERROR: Could not find project named: '#{$my_testrail_project}'\\n\"\n exit ERR_EXIT_PROJNF\n end\n p = @tr_proj_info\n print \"\\tfound project:\\n\"\n print \"\\t id = #{p['id']}\\n\"\n print \"\\t name = #{p['name']}\\n\"\n print \"\\t suite_mode = #{p['suite_mode']}\\n\"\n print \"\\t is_completed = #{p['is_completed']}\\n\"\n print \"\\t url = #{p['url']}\\n\"\n if p['announcement'].nil?\n str = 'nil'\n else\n str = p['announcement'].gsub(/\\n/,\"\\n\\t\\t\\t\")\n end\n print \"\\t announcement = #{str}\\n\"\n\n # Get all suites in our project...\n @tr_proj_id = @tr_proj_info['id']\n uri = \"get_suites/#{@tr_proj_id}\"\n begin\n @tr_suites = @tr_con.send_get(uri)\n rescue Exception => ex\n print \"\\tEXCEPTION occurred on TestRail API 'send_get(#{uri})':\\n\"\n print \"\\t#{ex.message}\\n\"\n print \"\\tFailed to get information about all TestRail suites in project.\\n\"\n exit ERR_EXIT_SUITES\n end\n suiteids = Array.new # Build an array of suite ID's for display...\n @tr_suites.each_with_index do |this_suite, index_suite|\n suiteids.push(this_suite['id'])\n end\n print \"\\n\\tFound '#{@tr_suites.length}' suites in the project: '#{suiteids}'\\n\"\n\n return @tr_proj_info, @tr_suites\nend", "def pivotal_project_id=(project_id)\n pivotal_custom_value('Pivotal Project ID').update_attributes :value => project_id.to_s\n end", "def project_id\n data[:project_id]\n end", "def addTaskToProject\n puts \"Tasks:\"\n Central.instance.Tasks.each do |task|\n puts task.ID.to_s + \" \" + task.Description\n end \n puts \"Projects:\"\n Central.instance.Projects.each do |project| \n puts project.ID.to_s + \" \" + project.Description\n end \n puts \"Assign Task ID:\"\n taskid = $stdin.gets.to_i \n puts \"To project ID:\" \n projid = $stdin.gets.to_i\n Central.instance.addTaskToProject(taskid,projid) \n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Verifies a project ID matches test data
def verify_project_id(test_data) verify_values_match(test_data[CoreUseOfCollectionsData::PROJECT_ID.name], element_value(project_id_input)) end
[ "def verify_project_id(test_data)\n verify_values_match(test_data[UseOfCollections::PROJECT_ID.name], element_value(project_id_input))\n end", "def verify_project_desc(test_data)\n verify_values_match(test_data[CoreUseOfCollectionsData::PROJECT_DESC.name], element_value(project_desc_text_area))\n end", "def verify_project_desc(test_data)\n verify_values_match(test_data[UseOfCollections::PROJECT_DESC.name], element_value(project_desc_text_area))\n end", "def compare_to(project_name)\n compare_contents @project_path, project_fixture(project_name)\n end", "def test_if_factory_is_valid # test:unit way to define a test\n assert @project.valid?\n end", "def test_ut_t4_mtv_pu_004\n pu = Pu.find(1)\n pj_id = 5\n assert_equal FALSE, pu.check_pj_belong_to_pu?(pj_id)\n end", "def test_show_if_contains_valid_transaction_id\n assert_equal @order.contains_valid_transaction_id?, false\n assert @order.update_attributes(:auth_transaction_id => 123)\n assert_equal @order.contains_valid_transaction_id?, true\n end", "def validate_project_id\n if Project.find_by(id: project_id).nil?\n errors.add(:project_id, \"must correspond to a valid project.\")\n end\n end", "def validate_project_id\n if Project.find_by(id: project_id).nil?\n errors.add(:project_id, \"must be a valid project\")\n end\n end", "def validate_project\n file = ProjectFile.find_by(id: project_file_id)\n comment = Comment.find_by(id: comment_id)\n if file.project.id != comment.project.id\n errors.add(:project_file_id, \n \"must be part of the same project as the comment\")\n end\n end", "def test_game_id\n assert_equal(10000, @mygame.game_id)\n end", "def test_match_seed_valid\n sim = Simulator.new\n assert sim.match_seed('1')\n end", "def dataset_data_match?(domo_client, dataset_id, expected_data, should_fail=false)\n data = export_dataset(domo_client, dataset_id)\n\n if data.nil?\n unless expected_data.nil?\n puts \"Got no data back from Domo.\"\n puts \"Expected data: #{expected_data}\"\n return false\n end\n return true\n end\n\n if expected_data.is_a? Hash\n return false unless data.size == 1\n data = data[0]\n end\n\n # Sort the expected and actual data so we don't go chasing down row order differences.\n unless data.is_a? Hash\n data.sort! { |a,b| b[\"Event Name\"] <=> a[\"Event Name\"] }\n end\n unless expected_data.is_a? Hash\n expected_data.sort! { |a,b| b[\"Event Name\"] <=> a[\"Event Name\"] }\n end\n\n unless data == expected_data\n missing_data = Array.new\n expected_data.each do |d|\n unless data.include? d\n missing_data << d\n end\n end\n unless should_fail\n puts \"-----\"\n puts \"Actual data length: #{data.length}\"\n puts \"Expected data length: #{expected_data.length}\"\n puts \"-----\"\n puts \"Missing Data\"\n puts missing_data\n puts \"-----\"\n puts \"Actual Data\"\n puts data\n puts \"-----\"\n end\n return false\n end\n true\n end", "def verify_result(test_data)\n verify_values_match(test_data[CoreUseOfCollectionsData::RESULT.name], element_value(result_text_area))\n end", "def test_ut_t4_mtv_pu_003\n pu = Pu.find(1)\n pj_id = 1\n assert_equal TRUE, pu.check_pj_belong_to_pu?(pj_id)\n pj_id = 3\n assert_equal FALSE, pu.check_pj_belong_to_pu?(pj_id)\n end", "def translations_verify(project_id, params)\n path = sprintf(\"/api/v2/projects/%s/translations/verify\", project_id)\n data_hash = {}\n post_body = nil\n \n if params.present?\n unless params.kind_of?(PhraseApp::RequestParams::TranslationsVerifyParams)\n raise PhraseApp::ParamsHelpers::ParamsError.new(\"Expects params to be kind_of PhraseApp::RequestParams::TranslationsVerifyParams\")\n end\n end\n \n data_hash = params.to_h\n err = params.validate\n if err != nil\n return nil, err\n end\n reqHelper = PhraseApp::ParamsHelpers::BodyTypeHelper.new(data_hash, post_body)\n rc, err = PhraseApp.send_request(@credentials, \"PATCH\", path, reqHelper.ctype, reqHelper.body, 200)\n if err != nil\n return nil, err\n end\n \n return PhraseApp::ResponseObjects::AffectedCount.new(JSON.load(rc.body)), err\n end", "def dataTest data\n failure = RubyUnit::AssertionFailure.new(data)\n assertEqual data, failure.data, 'Assertion data Hash is incorrect'\n end", "def verify_bugs_for_1bcf224\n assert_equal 17941, Commit.joins(:commit_bugs).where(:commit_hash => '1bcf224fd40b497997827af5f04b0628e648700f').pluck(:bug_id)[0]\n end", "def validateProject(data, isSocialGood = false)\n fails = []\n # Check if project is private\n if data.private\n fails.push(\"Project is either private or doesn't exist!\")\n end \n # Check if project has license\n if data.license == nil\n fails.push(\"Project doesn't have a license\")\n end\n # Check if project has atleast 100 stars\n if data.stargazers_count < 100 && !isSocialGood\n fails.push(\"Project has less than 100 stars\")\n end\n return fails\nend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
METHODS Selects or removes UoC methods per a given set of test data
def select_methods(test_data) test_methods = test_data[CoreUseOfCollectionsData::METHOD_LIST.name] hide_notifications_bar test_methods = [{CoreUseOfCollectionsData::METHOD.name => ''}] unless test_methods prep_fieldsets_for_test_data([fieldset(CoreUseOfCollectionsData::METHOD_LIST.name)], test_methods) test_methods.each_with_index do |method, index| logger.info "Entering method data set at index #{index}: #{method}" wait_for_options_and_select(method_input(index), method_options(index), method[CoreUseOfCollectionsData::METHOD.name]) end end
[ "def select_methods(test_data)\n test_methods = test_data[UseOfCollections::METHOD_LIST.name]\n test_methods = [{ UseOfCollections::METHOD.name => ''}] unless test_methods\n prep_fieldsets_for_test_data([fieldset(UseOfCollections::METHOD_LIST.name)], test_methods)\n\n test_methods.each_with_index do |method, index|\n wait_for_options_and_select(method_input(index), method_options(index), method[UseOfCollections::METHOD.name])\n end\n end", "def verify_methods(test_data)\n test_methods = test_data[CoreUseOfCollectionsData::METHOD_LIST.name]\n test_methods = [{CoreUseOfCollectionsData::METHOD.name => ''}] unless test_methods\n test_methods.each_with_index { |test_method, index| verify_values_match(test_method[CoreUseOfCollectionsData::METHOD.name], element_value(method_input index)) }\n end", "def verify_methods(test_data)\n test_methods = test_data[UseOfCollections::METHOD_LIST.name]\n test_methods = [{ UseOfCollections::METHOD.name => ''}] unless test_methods\n test_methods.each_with_index { |test_method, index| verify_values_match(test_method[UseOfCollections::METHOD.name], element_value(method_input index)) }\n end", "def remove_all_except_these_cases_from_suite(testcases,project_id,suite_id)\n puts '=== testcases === '\n puts testcases\n # get a list of automated tests, ignore manual or on hold tests\n existing_cases = testrail_api_client.send_get(\"get_cases/#{project_id}&suite_id=#{suite_id}\").select{|t| t['type_id']==1}.map{|m| m['id']}\n puts '===== existing_cases === '\n puts existing_cases\n (existing_cases - testcases).each do |case_to_remove|\n puts \"case_to_remove #{case_to_remove}\"\n testrail_api_client.send_post(\"delete_case/#{case_to_remove}\",nil)\n end\n end", "def runnable_methods\n methods = super\n selected = Browsery.settings.tags\n\n filtered_methods = filter_methods(methods, selected)\n\n if Browsery.settings.parallel\n unless filtered_methods.empty?\n if selected.nil? || selected.empty?\n @@selected_methods = @@regression_suite\n else\n methods_to_add = filtered_methods.map { |method| method.to_sym if @@regression_suite.include?(method.to_sym) }\n @@selected_methods += methods_to_add\n end\n end\n\n @@runnables_count += 1\n browsery_runnables = Minitest::Runnable.runnables - [Minitest::Test, Minitest::Unit::TestCase]\n\n if @@runnables_count == browsery_runnables.size\n parallel = Parallel.new(Browsery.settings.parallel, @@selected_methods)\n parallel.clean_result!\n parallel.run_in_parallel!\n parallel.remove_redundant_tap if Browsery.settings.rerun_failure\n parallel.aggregate_tap_results\n exit\n end\n\n return [] # no test will run\n else\n filtered_methods\n end\n end", "def delete_testds(testds)\n not_found = []\n testds.each do |testd_td|\n descriptor = Testd.where({ 'testd.name' => testd_td['name'],\n 'testd.vendor' => testd_td['vendor'],\n 'testd.version' => testd_td['version'] }).first\n if descriptor.nil?\n logger.error 'Test Descriptor not found ' + testd_td.to_s\n not_found << testd_td\n else\n if descriptor['pkg_ref'] == 1\n descriptor.destroy\n del_ent_dict(descriptor, :testd)\n else descriptor.update_attributes(pkg_ref: descriptor['pkg_ref'] - 1)\n end\n end\n end\n not_found\n end", "def test_harness_run_clean(tests, id)\n # clean out any resources\n resource_absent_by_title(tests[:agent], tests[:resource_name], tests[id][:title])\n\n # run manifest, resource, idempotence tests\n test_harness_run(tests, id)\n\n # clean out any resources\n resource_absent_by_title(tests[:agent], tests[:resource_name], tests[id][:title])\nend", "def test_methods(patterns=[])\n return @test_methods if !@test_methods.nil?\n\n @test_methods = create.public_methods.find_all { |m|\n m.to_s =~ /^test_/\n }\n\n if !(patterns.nil? or patterns.empty?) then\n # filter\n @test_methods.reject!{ |m|\n patterns.find{ |t| m.to_s.include?(t.to_s) }.nil?\n }\n end\n\n @test_methods\n end", "def delete(*methods)\n @yuuki_methods ||= {}\n methods.each do |method|\n @yuuki_methods[method] ||= {}\n @yuuki_methods[method][:enabled] = false\n end\n end", "def remove_all_except_these_cases_from_testrun(testcases,run_id)\n run = get_run(run_id)\n unless run['include_all']\n case_ids = get_tests_in_a_run(run_id).map{|h| h['case_id']} & testcases\n update_run(run_id,{'case_ids'=>case_ids})\n end\n end", "def remove_tests\n klass = class <<self; self; end\n public_instance_methods.grep(/^test_/).each do |method|\n klass.send(:undef_method, method.to_sym)\n end\n self\n end", "def remove_inactive_tests(force=false)\n (raise TortillaError,\"No active platforms - this would remove ALL testcases!\" if self.active_platforms.length == 0) unless force==true\n @test_cases = _select_active_tests\n end", "def unselect; end", "def run_test(obj, testset)\n @cfg['testset'] = testset\n test_sets =[]\n run_cases_idx = []\n fp = File.open \"actual_result.json\", 'a'\n result =\"\"\n ##Get methods from test suite class and execute them one by one\n methods = obj.class.instance_methods(false)\n\n methods.each do |method|\n if (method.to_s == \"#{testset}_test\")\n test_sets = obj.send(method)\n\n end\n end\n\n if test_sets ==[] and testset != \"regression\"\n raise StandardError.new(\"Missing testset define for: \\\"#{testset}\\\" in your test suite\")\n end\n #-----------------------------------------------------------------\n # run pre_setup for suite if pre_setup_suite is defined\n #-----------------------------------------------------------------\n setup_teardown_wrap(obj, \"pre_setup_suite\", fp)\n\n methods.each do |method|\n if @aid_methods.include?(method.to_s.downcase)\n next\n else\n begin\n if testset == \"regression\" or test_sets == [\"all\"]\n\n #-----------------------------------------------------------------\n # execute pre setup before each case\n #-----------------------------------------------------------------\n @idx+=1\n if @cfg['run_testcases'].include?(@idx.to_s) or @cfg['run_testcases']==[]\n setup_teardown_wrap(obj, \"pre_setup_case\", fp)\n\n run_cases_idx.push(@idx.to_s)\n #-----------------------------------------------------------------\n # execute method\n #-----------------------------------------------------------------\n @log.info \"************************************************************\"\n @log.info \"idx:#{@idx} #{method}()\"\n puts \"************************************************************\"\n puts \"idx:#{@idx} #{method}()\"\n method_output =obj.send(method)\n result = {\"idx\" => @idx, \"#{method}\" => method_output}\n fp.print result.to_json\n fp.print \"\\n\"\n #-----------------------------------------------------------------\n # execute tear down after each case\n #-----------------------------------------------------------------\n setup_teardown_wrap(obj, \"tear_down_case\", fp)\n else\n next\n end\n\n elsif test_sets.include?(method.to_s)\n @idx+=1\n if @cfg['run_testcases'].include?(@idx.to_s) or @cfg['run_testcases']==[]\n #-----------------------------------------------------------------\n # execute pre setup before each case\n #-----------------------------------------------------------------\n setup_teardown_wrap(obj, \"pre_setup_case\", fp)\n run_cases_idx.push(@idx.to_s)\n #-----------------------------------------------------------------\n # execute method\n #-----------------------------------------------------------------\n @log.info \"************************************************************\"\n @log.info \"idx:#{@idx} #{method}()\"\n puts \"************************************************************\"\n puts \"idx:#{@idx} #{method}()\"\n method_output =obj.send(method)\n result = {\"idx\" => @idx, \"#{method}\" => method_output}\n fp.print result.to_json\n fp.print \"\\n\"\n #-----------------------------------------------------------------\n # execute tear down after each case\n #-----------------------------------------------------------------\n setup_teardown_wrap(obj, \"tear_down_case\", fp)\n else\n next\n end\n else\n next\n end\n\n rescue StandardError => ex\n bt = ex.backtrace.join(\"\\n\")\n @log.error \"Execute method #{method} failed,the error is #{ex} \"\n puts \"Execute method #{method} failed,the error is #{ex} \"\n end\n end\n end\n #-----------------------------------------------------------------\n # run tear down for suite if tear_down suite is defined\n #-----------------------------------------------------------------\n setup_teardown_wrap(obj, \"tear_down_suite\", fp)\n fp.close\n @log.info \"************************************************************\"\n puts \"************************************************************\"\n @log.info \"cases: #{run_cases_idx} were executed\"; puts \"cases: #{run_cases_idx} were executed\" if run_cases_idx!=[]\n end", "def selected_tests\n only = !@only.empty? ? @only : TESTS.keys.to_set\n test_ids = (only - @omit).to_a\n TESTS.slice(*test_ids)\n end", "def generalise\n Mutiny::Tests::TestSet.new(tests.map(&:generalise))\n end", "def teardown\n if Wgit::Document.remove_extension(:table_text)\n Wgit::Document.send(:remove_method, :table_text)\n end\n\n if Wgit::Document.remove_extension(:tables2)\n Wgit::Document.send(:remove_method, :tables2)\n end\n\n if Wgit::Document.remove_extension(:code_snippets)\n Wgit::Document.send(:remove_method, :code_snippets)\n end\n\n if Wgit::Document.remove_extension(:code_snippet)\n Wgit::Document.send(:remove_method, :code_snippet)\n end\n\n if Wgit::Document.remove_extension(:img_alt)\n Wgit::Document.send(:remove_method, :img_alt)\n end\n\n if Wgit::Document.remove_extension(:img)\n Wgit::Document.send(:remove_method, :img)\n end\n\n if Wgit::Document.remove_extension(:blockquote)\n Wgit::Document.send(:remove_method, :blockquote)\n end\n\n if Wgit::Document.remove_extension(:table_text2)\n Wgit::Document.send(:remove_method, :table_text2)\n end\n\n if Wgit::Document.remove_extension(:code)\n Wgit::Document.send(:remove_method, :code)\n end\n\n if Wgit::Document.remove_extension(:single)\n Wgit::Document.send(:remove_method, :single)\n end\n\n if Wgit::Document.remove_extension(:array)\n Wgit::Document.send(:remove_method, :array)\n end\n\n if Wgit::Document.remove_extension(:single2)\n Wgit::Document.send(:remove_method, :single2)\n end\n\n if Wgit::Document.remove_extension(:array2)\n Wgit::Document.send(:remove_method, :array2)\n end\n\n if Wgit::Document.remove_extension(:code2)\n Wgit::Document.send(:remove_method, :code2)\n end\n\n if Wgit::Document.remove_extension(:code3)\n Wgit::Document.send(:remove_method, :code3)\n end\n end", "def _remove_all_method\n :\"_remove_all_#{self[:name]}\"\n end", "def deactivate_all_tests\n tests.update_all(:is_active => false)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Verifies that the methods match test data
def verify_methods(test_data) test_methods = test_data[CoreUseOfCollectionsData::METHOD_LIST.name] test_methods = [{CoreUseOfCollectionsData::METHOD.name => ''}] unless test_methods test_methods.each_with_index { |test_method, index| verify_values_match(test_method[CoreUseOfCollectionsData::METHOD.name], element_value(method_input index)) } end
[ "def verify_methods(test_data)\n test_methods = test_data[UseOfCollections::METHOD_LIST.name]\n test_methods = [{ UseOfCollections::METHOD.name => ''}] unless test_methods\n test_methods.each_with_index { |test_method, index| verify_values_match(test_method[UseOfCollections::METHOD.name], element_value(method_input index)) }\n end", "def test_foo_valid\n\t\tassert_equal 'yes', @methods.foo_check('Hillman', 'Hospital')\n\tend", "def test_method_detection\n verify_method :is_method?, with: [\n {param: ' def method_def', expect: true},\n {param: ' de method_def', expect: false},\n {param: ' def method1', expect: true},\n {param: '# def method1', expect: false}\n ]\n end", "def make_assertions_on_tests( tests, method )\n assert_equal false, tests[:bad].__send__( method )\n assert_equal true, tests[:good].__send__( method )\n assert_equal true, tests[:extra_good].__send__( method )\n end", "def mock_verify\n @expected_counts.keys.each do |key|\n assert_equal @expected_counts[key], @counts[key],\n\t\"Expected method #{key} to be called #{@expected_counts[key]} times, \" +\n\t\"got #{@counts[key]}\"\n end\n end", "def valid_test_method(test_case)\n test_case.name =~ /public/i || test_case.name =~ /evaluation/i || test_case.name =~ /private/i\n end", "def test_fourth_valid\n\t\tassert_equal 'valid', @methods.fourth_check('Hospital', 'Monroeville')\n\tend", "def test_appellation_validity\n assert @valid_appellation_1.valid?\n assert @valid_appellation_2.valid?\n end", "def test_fifth_valid\n\t\tassert_equal 1, @methods.fifth_check('Downtown', 'Museum')\n\tend", "def run_verification\n verify_data_format # runs through what we have and makes sure teh values are in the correct format\n verify_dependencies # makes sure that any 'optional' columns have any dependency columns present\n verify_order # finally, all columns must be in a specific order \n end", "def expected_method; end", "def asserttest(meth, arr)\n arr.each do |n|\n assert conv(n[0]).send(meth) == n[1],\n conv(n[0]).send(meth).inspect\n end\n end", "def test_verify_args_all_valid\n assert verify_args(['1', '1', '1'])\n end", "def verify_stubbed_calls\n failed_stubs = []\n @stack.each do |method, stubs|\n next if stubs.empty?\n\n failed_stubs.concat(\n stubs.map do |stub|\n \"Expected #{method} #{stub}.\"\n end\n )\n end\n raise failed_stubs.join(' ') unless failed_stubs.empty?\n end", "def verify_args(method, args)\n matches = Chassis.signatures[method].select do |key|\n args[key]\n end\n \n misses = Chassis.signatures[method] - matches\n \n unless misses.empty?\n raise \"Required arguments missing for '#{method}': #{misses.join(\", \")}\"\n end\n end", "def verify_functions(test_data)\n test_functions = test_data[Org::FUNCTIONS.name]\n errors = []\n test_functions = [{ Org::FUNCTION.name => ''}] unless test_functions\n test_functions.each_with_index do |test_function, index|\n text_values_match?(test_function[Org::FUNCTION.name], element_value(function_input(index)), errors)\n end\n errors\n end", "def verify_functions(test_data)\n test_functions = test_data[CoreOrgData::FUNCTIONS.name]\n errors = []\n test_functions = [{CoreOrgData::FUNCTION.name => ''}] unless test_functions\n test_functions.each do |test_function|\n index = test_functions.index test_function\n text_values_match?(test_function[CoreOrgData::FUNCTION.name], element_value(function_input(index)), errors)\n end\n errors\n end", "def test_verify_too_few_args\n refute verify_args(['1', '1'])\n end", "def test_magic_ball_methods\n magic_ball = Predictor.new\n assert_respond_to magic_ball, :ask\n assert_respond_to magic_ball, :shake\n refute_respond_to magic_ball, :roll\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
COLLECTION TYPE Selects collection types per a given set of test data
def select_collection_types(test_data) types = test_data[CoreUseOfCollectionsData::COLLECTION_TYPE_LIST.name] || [{CoreUseOfCollectionsData::COLLECTION_TYPE.name => ''}] hide_notifications_bar prep_fieldsets_for_test_data([fieldset(CoreUseOfCollectionsData::COLLECTION_TYPE_LIST.name)], types) types.each_with_index do |type, index| logger.info "Entering collection type data set at index #{index}: #{type}" logger.debug "Hitting input at '#{collection_type_input(index)}'" wait_for_options_and_select(collection_type_input(index), collection_type_options(index), type[CoreUseOfCollectionsData::COLLECTION_TYPE.name]) end end
[ "def select_collection_types(test_data)\n types = test_data[UseOfCollections::COLLECTION_TYPE_LIST.name] || [{ UseOfCollections::COLLECTION_TYPE.name => ''}]\n prep_fieldsets_for_test_data([fieldset(UseOfCollections::COLLECTION_TYPE_LIST.name)], types)\n\n types.each_with_index do |type, index|\n wait_for_options_and_select(collection_type_input(index), collection_type_options(index), type[UseOfCollections::COLLECTION_TYPE.name])\n end\n end", "def verify_collection_types(test_data)\n types = test_data[UseOfCollections::COLLECTION_TYPE_LIST.name] || [{ UseOfCollections::COLLECTION_TYPE.name => ''}]\n types.each_with_index { |type, index| verify_values_match(type[UseOfCollections::COLLECTION_TYPE.name], element_value(collection_type_input index)) }\n end", "def verify_collection_types(test_data)\n types = test_data[CoreUseOfCollectionsData::COLLECTION_TYPE_LIST.name] || [{CoreUseOfCollectionsData::COLLECTION_TYPE.name => ''}]\n types.each_with_index { |type, index| verify_values_match(type[CoreUseOfCollectionsData::COLLECTION_TYPE.name], element_value(collection_type_input index)) }\n end", "def collection_typecode\n __typecode(OCI_ATTR_COLLECTION_TYPECODE) if typecode == :named_collection\n end", "def collection_type(type)\n type = (type || :list).to_sym\n @manifest.collection_type = type\n self\n end", "def type\n \"collection\"\n end", "def base_types\n @base_types ||= begin\n query = \"app:collection[cra:collectionType[child::text() = 'types']]/@href\"\n href = data.xpath(query, NS::COMBINED)\n if href.first\n url = href.first.text\n Collection.new(self, url) do |entry|\n id = entry.xpath(\"cra:type/c:id\", NS::COMBINED).text\n type_by_id id\n end\n else\n raise \"Repository has no types collection, this is strange and wrong\"\n end\n end\n end", "def generic_collection_type\n object_type = ObjectType.find_by_name(GENERIC_COLLECTION_TYPE)\n return object_type if object_type.present?\n\n object_type = ObjectType.create_from(\n {\n name: GENERIC_COLLECTION_TYPE,\n description: DESCRIPTION,\n min: 0,\n max: 1,\n handler: 'collection',\n safety: 'No safety information',\n clean_up: 'No cleanup information',\n data: 'No data',\n vendor: 'No vendor information',\n unit: 'each',\n cost: 0.01,\n release_method: 'return',\n release_description: '',\n image: '',\n prefix: '',\n rows: 8,\n columns: 12\n }\n )\n object_type.save\n object_type\n end", "def describe_metric_collection_types()\n request({\n 'Action' => 'DescribeMetricCollectionTypes',\n :idempotent => true,\n :parser => Fog::Parsers::AWS::AutoScaling::DescribeMetricCollectionTypes.new\n })\n end", "def valid_collection_type?(collection_type)\n @collections.include?(collection_type) ||\n is_view?(collection_type) ||\n collection_type.to_s == 'cloudkit-meta'\n end", "def collection(doc_or_type)\n\n @db.collection(\n doc_or_type.is_a?(String) ? doc_or_type : doc_or_type['type'])\n end", "def collections(what)\n list = []\n collections = what == 'all' ? all_collections() : local_collections()\n collections.each do |collection|\n model = collection.classify.constantize rescue nil\n next if model.nil?\n next unless model.respond_to?(:mongo_client)\n record = {'id' => collection, 'description' => I18n.t(\"helpers.label.#{collection}.tabletitle\") } \n list << [record, model_fields(collection)]\n end\n list\nend", "def valid_collection_type?(collection_type)\n @collections.include?(collection_type) || collection_type.to_s == 'cloudkit-meta'\n end", "def type\n @type ||= @collection.nil? ? nil : @collection.label.to_sym\n end", "def generic_collection\n Collection.new_collection(generic_collection_type)\n end", "def collection_type(coll_type, options={})\n unless COLLECTION_TYPES.include? coll_type\n raise ArgumentError, \"Invalid collection type: #{coll_type.to_s}\"\n end\n @collection_type = coll_type\n case @collection_type\n when :riak_kv\n self.persistence = Riagent::Persistence::RiakKVStrategy.new(self)\n if options.has_key? :list_keys_using\n if options[:list_keys_using] == :streaming_list_keys\n self.persistence = Riagent::Persistence::RiakNoIndexStrategy.new(self)\n elsif options[:list_keys_using] == :riak_dt_set\n self.persistence = Riagent::Persistence::RiakDTSetStrategy.new(self)\n end\n end\n end\n end", "def subtypes\n case result_type\n when 'lists'\n %w{ lists.owned lists.collected }\n when 'collection'\n %w{ cookmarks lists feeds friends }\n else\n super\n end\n end", "def index\n @typecollections = Typecollection.all\n end", "def selected_filters_for(type, collection)\n prefix = type.to_s[0, 1]\n ids = TaskFilter.filter_ids(session, \"filter_#{ type }\".to_sym)\n \n selected = collection.select { |o| ids.include?(o.id) }\n return objects_to_names_and_ids(selected, :prefix => prefix)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Verifies collection types match test data
def verify_collection_types(test_data) types = test_data[CoreUseOfCollectionsData::COLLECTION_TYPE_LIST.name] || [{CoreUseOfCollectionsData::COLLECTION_TYPE.name => ''}] types.each_with_index { |type, index| verify_values_match(type[CoreUseOfCollectionsData::COLLECTION_TYPE.name], element_value(collection_type_input index)) } end
[ "def verify_collection_types(test_data)\n types = test_data[UseOfCollections::COLLECTION_TYPE_LIST.name] || [{ UseOfCollections::COLLECTION_TYPE.name => ''}]\n types.each_with_index { |type, index| verify_values_match(type[UseOfCollections::COLLECTION_TYPE.name], element_value(collection_type_input index)) }\n end", "def verify_collection_data(expected_collection_data, actual_collection_data)\n if expected_collection_data.nil?\n expect(actual_collection_data).to be nil\n elsif expected_collection_data.empty?\n expect(actual_collection_data).to be_empty\n else\n expect(actual_collection_data).not_to be nil\n expect(actual_collection_data).to match_with_type(expected_collection_data)\n end\n end", "def verify_collection_data(actual_collection_data)\n expected_collection_data = test_instance.outcome_collection_data\n if expected_collection_data.nil?\n expect(actual_collection_data).to be nil\n elsif expected_collection_data.empty?\n expect(actual_collection_data).to be_empty\n else\n expect(actual_collection_data).not_to be nil\n expected_collection_data.each do |doc|\n expect(actual_collection_data).to include(doc)\n end\n actual_collection_data.each do |doc|\n expect(expected_collection_data).to include(doc)\n end\n end\n end", "def verify_types(test_data)\n test_types = test_data[Org::ORG_RECORD_TYPES.name]\n errors = []\n test_types = [{ Org::ORG_RECORD_TYPE.name => ''}] unless test_types\n test_types.each_with_index do |test_type, index|\n text_values_match?(test_type[Org::ORG_RECORD_TYPE.name], element_value(org_record_type_input(index)), errors)\n end\n errors\n end", "def verify_types(test_data)\n test_types = test_data[CoreOrgData::ORG_RECORD_TYPES.name]\n errors = []\n test_types = [{CoreOrgData::ORG_RECORD_TYPE.name => ''}] unless test_types\n test_types.each do |test_type|\n index = test_types.index test_type\n text_values_match?(test_type[CoreOrgData::ORG_RECORD_TYPE.name], element_value(org_record_type_input(index)), errors)\n end\n errors\n end", "def verify_material_types(test_data)\n types = test_data[UseOfCollections::MATERIAL_TYPE_LIST.name] || [{ UseOfCollections::MATERIAL_TYPE.name => ''}]\n types.each_with_index { |type, index| verify_values_match(type[UseOfCollections::MATERIAL_TYPE.name], element_value(material_type_input index)) }\n end", "def verify_material_types(test_data)\n types = test_data[CoreUseOfCollectionsData::MATERIAL_TYPE_LIST.name] || [{CoreUseOfCollectionsData::MATERIAL_TYPE.name => ''}]\n types.each_with_index { |type, index| verify_values_match(type[CoreUseOfCollectionsData::MATERIAL_TYPE.name], element_value(material_type_input index)) }\n end", "def verify_object_info_data(data_set)\n logger.debug \"Checking object number #{data_set[CoreObjectData::OBJECT_NUM.name]}\"\n object_data_errors = []\n text_values_match?(data_set[CoreObjectData::OBJECT_NUM.name], element_value(object_num_input), object_data_errors)\n\n other_nums = data_set[CoreObjectData::OTHER_NUM.name]\n other_nums && other_nums.each do |num|\n index = other_nums.index num\n text_values_match?(num[CoreObjectData::NUM_VALUE.name], element_value(other_num_num_input index), object_data_errors)\n text_values_match?(num[CoreObjectData::NUM_TYPE.name], element_value(other_num_type_input index), object_data_errors)\n end\n\n num_objects = data_set[CoreObjectData::NUM_OBJECTS.name]\n num_objects && text_values_match?(num_objects.to_s, element_value(num_objects_input), object_data_errors)\n\n collection = data_set[CoreObjectData::COLLECTION.name]\n collection && text_values_match?(collection, element_value(collection_input), object_data_errors)\n\n resp_depts = data_set[CoreObjectData::RESPONSIBLE_DEPTS.name]\n resp_depts && resp_depts.each { |dept| text_values_match?(dept[CoreObjectData::RESPONSIBLE_DEPT.name], element_value(resp_dept_input resp_depts.index(dept)), object_data_errors) }\n\n pub_to_list = data_set[CoreObjectData::PUBLISH_TO_LIST.name]\n pub_to_list && pub_to_list.each { |pub| text_values_match?(pub[CoreObjectData::PUBLISH_TO.name], element_value(publish_to_input pub_to_list.index(pub)), object_data_errors) }\n\n status = data_set[CoreObjectData::RECORD_STATUS.name]\n status && text_values_match?(status, element_value(record_status_input), object_data_errors)\n\n inv_statuses = data_set[CoreObjectData::INVENTORY_STATUS_LIST.name]\n inv_statuses && inv_statuses.each { |stat| text_values_match?(stat[CoreObjectData::INVENTORY_STATUS.name], element_value(inventory_status_input inv_statuses.index(stat)), object_data_errors) }\n\n brief_descrips = data_set[CoreObjectData::BRIEF_DESCRIPS.name]\n brief_descrips && brief_descrips.each { |descrip| text_values_match?(descrip[CoreObjectData::BRIEF_DESCRIP.name], element_value(brief_desc_text_area brief_descrips.index(descrip)), object_data_errors) }\n\n dist_feat = data_set[CoreObjectData::DISTINGUISHING_FEATURES.name]\n dist_feat && text_values_match?(dist_feat, element_value(dist_features_text_area), object_data_errors)\n\n comments = data_set[CoreObjectData::COMMENTS.name]\n comments && comments.each { |comment| text_values_match?(comment[CoreObjectData::COMMENT.name], element_value(comment_text_area comments.index(comment)), object_data_errors) }\n\n titles = data_set[CoreObjectData::TITLE_GRP.name]\n titles && titles.each do |title|\n index = titles.index title\n text_values_match?(title[CoreObjectData::TITLE.name], element_value(title_input index), object_data_errors)\n text_values_match?(title[CoreObjectData::TITLE_TYPE.name], element_value(title_type_input index), object_data_errors)\n text_values_match?(title[CoreObjectData::TITLE_LANG.name], element_value(title_lang_input index), object_data_errors)\n\n translations = title[CoreObjectData::TITLE_TRANSLATION_SUB_GRP.name]\n translations && translations.each do |trans|\n sub_index = translations.index trans\n text_values_match?(trans[CoreObjectData::TITLE_TRANSLATION.name], element_value(title_translation_input [index, sub_index]), object_data_errors)\n text_values_match?(trans[CoreObjectData::TITLE_TRANSLATION_LANG.name], element_value(title_translation_lang_input [index, sub_index]), object_data_errors)\n end\n end\n\n obj_names = data_set[CoreObjectData::OBJ_NAME_GRP.name]\n obj_names && obj_names.each do |name|\n index = obj_names.index name\n text_values_match?(name[CoreObjectData::OBJ_NAME_NAME.name], element_value(object_name_input index), object_data_errors)\n text_values_match?(name[CoreObjectData::OBJ_NAME_CURRENCY.name], element_value(object_name_currency_input index), object_data_errors)\n text_values_match?(name[CoreObjectData::OBJ_NAME_LEVEL.name], element_value(object_name_level_input index), object_data_errors)\n text_values_match?(name[CoreObjectData::OBJ_NAME_SYSTEM.name], element_value(object_name_system_input index), object_data_errors)\n text_values_match?(name[CoreObjectData::OBJ_NAME_TYPE.name], element_value(object_name_type_input index), object_data_errors)\n text_values_match?(name[CoreObjectData::OBJ_NAME_LANG.name], element_value(object_name_lang_input index), object_data_errors)\n text_values_match?(name[CoreObjectData::OBJ_NAME_NOTE.name], element_value(object_name_note_input index), object_data_errors)\n end\n\n object_data_errors\n end", "def check_input_types(data)\r\n if data.class != Array\r\n raise Exception.new(\"Input must be an array (#{data.class} class detected).\")\r\n end\r\n dtypes = data.each do |v|\r\n if ![Float, Fixnum].include?(v.class)\r\n raise Exception.new(\"Input array must only contain Float or Fixnum class values (#{v.class} class detected).\")\r\n end\r\n end\r\nend", "def not_a_collection(have)\n \"expected #{have.collection_name} to be a collection but it does not respond to #length, #size or #count\"\n end", "def test_have_types\n @sources.each { |s| assert(!s['type'].blank?, \"No type for #{s['uri']}\") }\n end", "def validate_data_validation_type(v); end", "def describe_type_expectation\n\t\t\treturn \"a JSON collection (Array of Objects)\"\n\t\tend", "def is_correct_data?(data, *expected_keys)\n return false until contains_expected_keys?(data, *expected_keys)\n filter_keys(data, *expected_keys)\n data.each do |key, value|\n return false until correct_type?(key, value)\n end\n data.each do |key, value|\n return false until corresponds_to_constraint?(key, value)\n end\n return true\nend", "def test_collections(collections)\n yepnope = Array.new\n for c in collections\n puts \"Testing #{c}\".green\n yepnope.push process(loadschema(\"_#{c}.yml\"))\n puts \"Finished testing #{c}\".green\n end\n yepnope\n end", "def select_collection_types(test_data)\n types = test_data[UseOfCollections::COLLECTION_TYPE_LIST.name] || [{ UseOfCollections::COLLECTION_TYPE.name => ''}]\n prep_fieldsets_for_test_data([fieldset(UseOfCollections::COLLECTION_TYPE_LIST.name)], types)\n\n types.each_with_index do |type, index|\n wait_for_options_and_select(collection_type_input(index), collection_type_options(index), type[UseOfCollections::COLLECTION_TYPE.name])\n end\n end", "def test_collection_size\r\n assert_equal 21, @lm.size\r\n end", "def valid_collection?(collection)\n !collection.nil? \\\n && collection.is_a?(WebProfiler::Model::CollectionRecord)\n end", "def is_valid_collection?(collection)\n error = UninitializedObjectError.new\n collection.each do |col|\n error.check_initialized(col)\n end\n return true\n rescue\n return false\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
MATERIAL TYPE Selects material types per a given set of test data
def select_material_types(test_data) types = test_data[CoreUseOfCollectionsData::MATERIAL_TYPE_LIST.name] || [{CoreUseOfCollectionsData::MATERIAL_TYPE.name => ''}] hide_notifications_bar prep_fieldsets_for_test_data([fieldset(CoreUseOfCollectionsData::MATERIAL_TYPE_LIST.name)], types) types.each_with_index do |type, index| logger.info "Entering material type data set at index #{index}: #{type}" wait_for_options_and_select(material_type_input(index), material_type_options(index), type[CoreUseOfCollectionsData::MATERIAL_TYPE.name]) end end
[ "def select_material_types(test_data)\n types = test_data[UseOfCollections::MATERIAL_TYPE_LIST.name] || [{ UseOfCollections::MATERIAL_TYPE.name => ''}]\n prep_fieldsets_for_test_data([fieldset(UseOfCollections::MATERIAL_TYPE_LIST.name)], types)\n\n types.each_with_index do |type, index|\n wait_for_options_and_select(material_type_input(index), material_type_options(index), type[UseOfCollections::MATERIAL_TYPE.name])\n end\n end", "def verify_material_types(test_data)\n types = test_data[UseOfCollections::MATERIAL_TYPE_LIST.name] || [{ UseOfCollections::MATERIAL_TYPE.name => ''}]\n types.each_with_index { |type, index| verify_values_match(type[UseOfCollections::MATERIAL_TYPE.name], element_value(material_type_input index)) }\n end", "def verify_material_types(test_data)\n types = test_data[CoreUseOfCollectionsData::MATERIAL_TYPE_LIST.name] || [{CoreUseOfCollectionsData::MATERIAL_TYPE.name => ''}]\n types.each_with_index { |type, index| verify_values_match(type[CoreUseOfCollectionsData::MATERIAL_TYPE.name], element_value(material_type_input index)) }\n end", "def materialType\n end", "def material_type\n attributes['material_type']\n end", "def get_material_types\n METADATA_RELATIONS_CONFIG['material_types']\n end", "def decode_type_index\n if @mat_index < 0 or @mat_type >= END_BASE\n @mode = :Builtin\n @material = df.world.raws.mat_table.builtin[@mat_type]\n\n elsif @mat_type >= PLANT_BASE\n @mode = :Plant\n @plant = df.world.raws.plants.all[@mat_index]\n @material = @plant.material[@mat_type-PLANT_BASE] if @plant\n\n elsif @mat_type >= FIGURE_BASE\n @mode = :Figure\n @figure = df.world.history.figures.binsearch(@mat_index)\n @creature = df.world.raws.creatures.all[@figure.race] if @figure\n @material = @creature.material[@mat_type-FIGURE_BASE] if @creature\n\n elsif @mat_type >= CREATURE_BASE\n @mode = :Creature\n @creature = df.world.raws.creatures.all[@mat_index]\n @material = @creature.material[@mat_type-CREATURE_BASE] if @creature\n\n elsif @mat_type > 0\n @mode = :Builtin\n @material = df.world.raws.mat_table.builtin[@mat_type]\n\n elsif @mat_type == 0\n @mode = :Inorganic\n @inorganic = df.world.raws.inorganics[@mat_index]\n @material = @inorganic.material if @inorganic\n end\n end", "def get_materials_item(id, type)\n\n rubyclass = @@supported_material_types[type]\n\n if rubyclass\n\n includes = [ :user,\n :projects,\n :subject_areas,\n :grade_levels ]\n\n item = rubyclass.includes(includes).find(id)\n\n if item\n return item\n end\n\n else\n raise ActiveRecord::RecordNotFound, \"Invalid material type (#{type})\"\n end\n\n raise ActiveRecord::RecordNotFound,\n \"Cannot find material type (#{type}) with id (#{id})\"\n end", "def gather_materials materials, grouped_operations\n show do\n # Grabbing materials\n title \"Retrieve the following item(s)\"\n \n materials.each do |mat|\n index = mat.index(\"#\");\n newmat = mat\n if(!index.nil?)\n newmat = mat.slice(0, index) + \"#{grouped_operations.length}\"\n end\n \n # Find the item id of this material if it exists.\n # If it exists, print the name and id. if not, print name\n newmat_sample = Sample.where(name: newmat).first\n if newmat_sample\n queried_items = Item.where(sample_id: newmat_sample.id)\n queried_items_sorted = queried_items.sort {|i1, i2| i2.id <=> i1.id}\n newmat_id = queried_items_sorted.first.id\n check newmat + \" (\" + newmat_id + \")\"\n else\n check newmat\n end\n \n end\n\n output_type = \"\"\n \n # Counts how many of each kind of bottle we want.\n containers = {}\n grouped_operations.each do |op|\n # raise op.output(\"Media/Reagents\").inspect\n output_type = ObjectType.find(op.output(\"Media/Reagents\").item.object_type_id).name\n if containers.key? output_type\n containers[output_type] += 1\n else\n containers[output_type] = 1\n end\n end\n \n containers.each do |container, quantity|\n check \"Grab #{quantity} of #{CONTAINERS[output_type]}\"\n end\n end\n end", "def index\n @material_types = MaterialType.all\n end", "def load_materials(materials)\n mt = 1\n materials.each do |material|\n case mt\n when 1\n self.mat_item1 = material.item_key.to_s + \" - \" + material.item\n self.mat_qty_1 = material.quantity\n when 2\n self.mat_item2 = material.item_key.to_s + \" - \" + material.item\n self.mat_qty_2 = material.quantity\n when 3\n self.mat_item3 = material.item_key.to_s + \" - \" + material.item\n self.mat_qty_3 = material.quantity\n when 4\n self.mat_item4 = material.item_key.to_s + \" - \" + material.item\n self.mat_qty_4 = material.quantity\n when 5\n self.mat_item5 = material.item_key.to_s + \" - \" + material.item\n self.mat_qty_5 = material.quantity\n when 6\n self.mat_item6 = material.item_key.to_s + \" - \" + material.item\n self.mat_qty_6 = material.quantity\n else\n #nothing\n end\n mt += 1\n end\n end", "def index\n @materials_types = MaterialsType.all\n end", "def set_material(material)\n GL::Material(GL::FRONT_AND_BACK, GL::AMBIENT, material[AMBIENCE])\n GL::Material(GL::FRONT_AND_BACK, GL::SPECULAR, material[SPECULAR])\n GL::Material(GL::FRONT_AND_BACK, GL::DIFFUSE, material[DIFFUSE])\n GL::Material(GL::FRONT_AND_BACK, GL::SHININESS, material[SHININESS])\n GL::Material(GL::FRONT, GL::EMISSION, [0.3, 0.3, 0.3, 0.7])\n end", "def color(type, o={})\n type = type.to_s\n material = MATERIAL_TYPES[type]\n raise ArgumentError.new \"Invalid color/material type: #{type}\" if !material\n\n MS.new.tap do |ms|\n ms.set_color_material material if material\n ms.set_diffuse color_for(\"diffuse\", o[:diffuse]) if o[:diffuse]\n ms.set_ambient color_for(\"ambient\", o[:ambient]) if o[:ambient]\n ms.set_specular color_for(\"specular\", o[:specular]) if o[:specular]\n ms.set_emissive color_for(\"emissive\", o[:emissive]) if o[:emissive]\n set_render_state ms\n end\n end", "def show\n\n type = params[:material_type]\n id = params[:id]\n include_related = params.has_key?(:include_related) ?\n params[:include_related].to_i :\n @@DEFAULT_RELATED_MATERIALS_COUNT\n\n status = 200\n data = {}\n\n begin\n item = get_materials_item id, type\n rescue ActiveRecord::RecordNotFound => rnf\n render json: { :message => rnf.message}, :status => 400\n return\n end\n\n if item\n array = materials_data [item], nil, include_related\n\n if array.size == 1\n\n data = array[0]\n\n else\n status = 400\n data = {:message =>\n \"Unexpected materials size #{array.size}\"}\n end\n\n else\n status = 400\n data = { :message =>\n \"Cannot find materials item type (#{type}) with id (#{id})\" }\n end\n\n render json: data, :status => status\n\n end", "def material\n end", "def get_admin_material_types\n material_types = get_material_types\n respond_to do |format|\n format.json { render json: material_types}\n end\n end", "def material; end", "def loadMaterials\n material1 = $MaterialIdentifier.getMaterial(\"texture1\")\n material2 = $MaterialIdentifier.getMaterial(\"texture2\")\n material3 = $MaterialIdentifier.getMaterial(\"texture3\")\n material4 = $MaterialIdentifier.getMaterial(\"texture4\")\n material5 = $MaterialIdentifier.getMaterial(\"texture5\")\n material6 = $MaterialIdentifier.getMaterial(\"texture6\")\nend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Verifies material types match test data
def verify_material_types(test_data) types = test_data[CoreUseOfCollectionsData::MATERIAL_TYPE_LIST.name] || [{CoreUseOfCollectionsData::MATERIAL_TYPE.name => ''}] types.each_with_index { |type, index| verify_values_match(type[CoreUseOfCollectionsData::MATERIAL_TYPE.name], element_value(material_type_input index)) } end
[ "def verify_material_types(test_data)\n types = test_data[UseOfCollections::MATERIAL_TYPE_LIST.name] || [{ UseOfCollections::MATERIAL_TYPE.name => ''}]\n types.each_with_index { |type, index| verify_values_match(type[UseOfCollections::MATERIAL_TYPE.name], element_value(material_type_input index)) }\n end", "def verify_types(test_data)\n test_types = test_data[Org::ORG_RECORD_TYPES.name]\n errors = []\n test_types = [{ Org::ORG_RECORD_TYPE.name => ''}] unless test_types\n test_types.each_with_index do |test_type, index|\n text_values_match?(test_type[Org::ORG_RECORD_TYPE.name], element_value(org_record_type_input(index)), errors)\n end\n errors\n end", "def verify_types(test_data)\n test_types = test_data[CoreOrgData::ORG_RECORD_TYPES.name]\n errors = []\n test_types = [{CoreOrgData::ORG_RECORD_TYPE.name => ''}] unless test_types\n test_types.each do |test_type|\n index = test_types.index test_type\n text_values_match?(test_type[CoreOrgData::ORG_RECORD_TYPE.name], element_value(org_record_type_input(index)), errors)\n end\n errors\n end", "def verify_object_info_data(data_set)\n logger.debug \"Checking object number #{data_set[CoreObjectData::OBJECT_NUM.name]}\"\n object_data_errors = []\n text_values_match?(data_set[CoreObjectData::OBJECT_NUM.name], element_value(object_num_input), object_data_errors)\n\n other_nums = data_set[CoreObjectData::OTHER_NUM.name]\n other_nums && other_nums.each do |num|\n index = other_nums.index num\n text_values_match?(num[CoreObjectData::NUM_VALUE.name], element_value(other_num_num_input index), object_data_errors)\n text_values_match?(num[CoreObjectData::NUM_TYPE.name], element_value(other_num_type_input index), object_data_errors)\n end\n\n num_objects = data_set[CoreObjectData::NUM_OBJECTS.name]\n num_objects && text_values_match?(num_objects.to_s, element_value(num_objects_input), object_data_errors)\n\n collection = data_set[CoreObjectData::COLLECTION.name]\n collection && text_values_match?(collection, element_value(collection_input), object_data_errors)\n\n resp_depts = data_set[CoreObjectData::RESPONSIBLE_DEPTS.name]\n resp_depts && resp_depts.each { |dept| text_values_match?(dept[CoreObjectData::RESPONSIBLE_DEPT.name], element_value(resp_dept_input resp_depts.index(dept)), object_data_errors) }\n\n pub_to_list = data_set[CoreObjectData::PUBLISH_TO_LIST.name]\n pub_to_list && pub_to_list.each { |pub| text_values_match?(pub[CoreObjectData::PUBLISH_TO.name], element_value(publish_to_input pub_to_list.index(pub)), object_data_errors) }\n\n status = data_set[CoreObjectData::RECORD_STATUS.name]\n status && text_values_match?(status, element_value(record_status_input), object_data_errors)\n\n inv_statuses = data_set[CoreObjectData::INVENTORY_STATUS_LIST.name]\n inv_statuses && inv_statuses.each { |stat| text_values_match?(stat[CoreObjectData::INVENTORY_STATUS.name], element_value(inventory_status_input inv_statuses.index(stat)), object_data_errors) }\n\n brief_descrips = data_set[CoreObjectData::BRIEF_DESCRIPS.name]\n brief_descrips && brief_descrips.each { |descrip| text_values_match?(descrip[CoreObjectData::BRIEF_DESCRIP.name], element_value(brief_desc_text_area brief_descrips.index(descrip)), object_data_errors) }\n\n dist_feat = data_set[CoreObjectData::DISTINGUISHING_FEATURES.name]\n dist_feat && text_values_match?(dist_feat, element_value(dist_features_text_area), object_data_errors)\n\n comments = data_set[CoreObjectData::COMMENTS.name]\n comments && comments.each { |comment| text_values_match?(comment[CoreObjectData::COMMENT.name], element_value(comment_text_area comments.index(comment)), object_data_errors) }\n\n titles = data_set[CoreObjectData::TITLE_GRP.name]\n titles && titles.each do |title|\n index = titles.index title\n text_values_match?(title[CoreObjectData::TITLE.name], element_value(title_input index), object_data_errors)\n text_values_match?(title[CoreObjectData::TITLE_TYPE.name], element_value(title_type_input index), object_data_errors)\n text_values_match?(title[CoreObjectData::TITLE_LANG.name], element_value(title_lang_input index), object_data_errors)\n\n translations = title[CoreObjectData::TITLE_TRANSLATION_SUB_GRP.name]\n translations && translations.each do |trans|\n sub_index = translations.index trans\n text_values_match?(trans[CoreObjectData::TITLE_TRANSLATION.name], element_value(title_translation_input [index, sub_index]), object_data_errors)\n text_values_match?(trans[CoreObjectData::TITLE_TRANSLATION_LANG.name], element_value(title_translation_lang_input [index, sub_index]), object_data_errors)\n end\n end\n\n obj_names = data_set[CoreObjectData::OBJ_NAME_GRP.name]\n obj_names && obj_names.each do |name|\n index = obj_names.index name\n text_values_match?(name[CoreObjectData::OBJ_NAME_NAME.name], element_value(object_name_input index), object_data_errors)\n text_values_match?(name[CoreObjectData::OBJ_NAME_CURRENCY.name], element_value(object_name_currency_input index), object_data_errors)\n text_values_match?(name[CoreObjectData::OBJ_NAME_LEVEL.name], element_value(object_name_level_input index), object_data_errors)\n text_values_match?(name[CoreObjectData::OBJ_NAME_SYSTEM.name], element_value(object_name_system_input index), object_data_errors)\n text_values_match?(name[CoreObjectData::OBJ_NAME_TYPE.name], element_value(object_name_type_input index), object_data_errors)\n text_values_match?(name[CoreObjectData::OBJ_NAME_LANG.name], element_value(object_name_lang_input index), object_data_errors)\n text_values_match?(name[CoreObjectData::OBJ_NAME_NOTE.name], element_value(object_name_note_input index), object_data_errors)\n end\n\n object_data_errors\n end", "def test_type_match(subject)\n if !is_well_formed_query_result?(subject)\n throw_no_match \"Can perform query matches only against a well formed query result subject\"\n end\n \n if (@golden_master.kind_of? Array) && !is_well_formed_query_result?(@golden_master)\n throw_no_match \"Cannot match against a non-well formed golden master array\"\n end\n end", "def check_generator_datatypes_attributes(modelinfo_file, hqmf_oid_file)\n expect(system(\"ruby lib/generate_models.rb #{modelinfo_file} #{hqmf_oid_file} TEST\")).to be true\n datatypes = get_datatypes_attributes(modelinfo_file)\n datatypes.each do |datatype, attributes|\n if datatype != 'Identifier' && datatype != 'Component' && datatype != 'FacilityLocation' && datatype != 'Entity' && datatype != 'ResultComponent' && datatype != 'DiagnosisComponent' && datatype != 'CarePartner' && datatype != 'Organization' && datatype != 'PatientEntity' && datatype != 'Practitioner' && datatype != 'Location'\n puts datatype\n expect(File.file?('app/models/test/qdm/' + datatype.underscore + '.rb')).to be true\n ruby_model_has_attributes(File.read('app/models/test/qdm/' + datatype.underscore + '.rb'), attributes)\n # Javascript PatientSchema was renamed to QDMPatient since it just contains the QDM data\n datatype = 'QDMPatient' if datatype == 'Patient'\n js_model_has_attributes(File.read('tmp/' + datatype + '.js'), attributes)\n else\n expect(File.file?('app/models/test/qdm/attributes/' + datatype.underscore + '.rb')).to be true\n ruby_model_has_attributes(File.read('app/models/test/qdm/attributes/' + datatype.underscore + '.rb'), attributes)\n js_model_has_attributes(File.read('tmp/attributes/' + datatype + '.js'), attributes)\n end\n end\n end", "def checkCompatibility(sampleTypes, expectedLengths) \n # can't run RNA and DNA together! error if attempted\n if(sampleTypes.uniq.length>1)\n return 0\n end\n end", "def test_validate_patient_name_and_account_number_by_giving_invalid_data\n image_type_record = ImageType.new\n image_type_record.patient_first_name = \"RAJ.jk*-9\"\n image_type_record.patient_last_name = \"C)C.9-\"\n image_type_record.patient_account_number= \"gh.gh-&&89\"\n image_type_record.image_type = \"EOB\"\n image_type_record.save\n assert !image_type_record.valid?, image_type_record.errors.full_messages.to_s\n end", "def materials_data_valid?\n @materials_data.size > 0\n end", "def test_new_specification_should_not_be_valid_if_attribute_and_unit_are_different_attr_types\n\t\t@new_specification = Specification.new(\n :spec_value => @valid_specification.spec_value,\n :spec_attribute_id => spec_attributes(:height).id,\n :unit_id => units(:pound).id )\n assert !@new_specification.valid?\n\tend", "def test_mime_types\n assert_kind_of(DataTypes::XmlData, @test_records[2])\n assert_kind_of(DataTypes::XmlData, @test_records[3])\n assert_kind_of(DataTypes::XmlData, @test_records[4])\n assert_kind_of(DataTypes::XmlData, @test_records[5])\n assert_kind_of(DataTypes::XmlData, @test_records[6])\n assert_equal(\"text/xml\", @test_records[2].mime_type)\n assert_equal(\"xml\", @test_records[2].mime_subtype)\n assert_equal(\"text/html\", @test_records[3].mime_type)\n assert_equal(\"html\", @test_records[3].mime_subtype)\n assert_equal(\"text/hnml\", @test_records[4].mime_type)\n assert_equal(\"hnml\", @test_records[4].mime_subtype)\n assert_equal(\"text/html\", @test_records[5].mime_type)\n assert_equal(\"html\", @test_records[5].mime_subtype)\n assert_equal(\"text/html\", @test_records[6].mime_type)\n assert_equal(\"html\", @test_records[6].mime_subtype)\n end", "def select_material_types(test_data)\n types = test_data[UseOfCollections::MATERIAL_TYPE_LIST.name] || [{ UseOfCollections::MATERIAL_TYPE.name => ''}]\n prep_fieldsets_for_test_data([fieldset(UseOfCollections::MATERIAL_TYPE_LIST.name)], types)\n\n types.each_with_index do |type, index|\n wait_for_options_and_select(material_type_input(index), material_type_options(index), type[UseOfCollections::MATERIAL_TYPE.name])\n end\n end", "def test_have_types\n @sources.each { |s| assert(!s['type'].blank?, \"No type for #{s['uri']}\") }\n end", "def type?(test_type); end", "def select_material_types(test_data)\n types = test_data[CoreUseOfCollectionsData::MATERIAL_TYPE_LIST.name] || [{CoreUseOfCollectionsData::MATERIAL_TYPE.name => ''}]\n hide_notifications_bar\n prep_fieldsets_for_test_data([fieldset(CoreUseOfCollectionsData::MATERIAL_TYPE_LIST.name)], types)\n\n types.each_with_index do |type, index|\n logger.info \"Entering material type data set at index #{index}: #{type}\"\n wait_for_options_and_select(material_type_input(index), material_type_options(index), type[CoreUseOfCollectionsData::MATERIAL_TYPE.name])\n end\n end", "def verify_collection_types(test_data)\n types = test_data[CoreUseOfCollectionsData::COLLECTION_TYPE_LIST.name] || [{CoreUseOfCollectionsData::COLLECTION_TYPE.name => ''}]\n types.each_with_index { |type, index| verify_values_match(type[CoreUseOfCollectionsData::COLLECTION_TYPE.name], element_value(collection_type_input index)) }\n end", "def verify_collection_types(test_data)\n types = test_data[UseOfCollections::COLLECTION_TYPE_LIST.name] || [{ UseOfCollections::COLLECTION_TYPE.name => ''}]\n types.each_with_index { |type, index| verify_values_match(type[UseOfCollections::COLLECTION_TYPE.name], element_value(collection_type_input index)) }\n end", "def ==(material2)\n end", "def run_mdc_content_tests\n assert_not_nil Strain.find(:all)\n # Check strains loaded, start with the first strain in the test file\n strain = Strain.find_by_mdc_id('MDC-03-08')\n assert_not_nil strain\n assert_equal 228, Strain.count\n \n # the last strain in the file\n strain2 = Strain.find_by_mdc_id('MDC-05-90')\n assert_not_nil strain2\n # make sure it loaded the data\n strain2_genotypes = Genotype.find_all_by_strain_id(strain2.id)\n assert_equal 9, strain2_genotypes.size\n \n snp = Snp.find_by_symbol('J500418')\n assert_not_nil snp\n assert_equal 'C/T', snp.target_allele\n \n assert_not_nil Genotype.count\n \n assert_equal (228*9), Genotype.count\n genotypes = Genotype.find_all_by_genotypable_id(snp.id)\n assert_not_nil genotypes\n assert_equal 228, genotypes.size\n \n # Get genotype data for J500418 in strain MDC-03-08\n last_genotype = Genotype.find_by_genotypable_id_and_strain_id(snp.id, strain.id)\n assert_not_nil last_genotype\n assert_equal 'TT',last_genotype.genotype_allele\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
USERS Enters users data per a given set of test data
def enter_users(test_data) users = test_data[CoreUseOfCollectionsData::USER_GRP.name] || [CoreUseOfCollectionsData.empty_user] hide_notifications_bar prep_fieldsets_for_test_data([fieldset(CoreUseOfCollectionsData::USER_GRP.name)], users) users.each_with_index do |user, index| logger.info "Entering user data set at index #{index}: #{user}" enter_auto_complete(user_name_input(index), user_name_options(index), user[CoreUseOfCollectionsData::USER.name], 'Local Persons') wait_for_options_and_select(user_type_input(index), user_type_options(index), user[CoreUseOfCollectionsData::USER_INSTITUTION_ROLE.name]) wait_for_options_and_select(user_role_input(index), user_role_options(index), user[CoreUseOfCollectionsData::USER_UOC_ROLE.name]) enter_auto_complete(user_institution_input(index), user_institution_options(index), user[CoreUseOfCollectionsData::USER_INSTITUTION.name], 'Local Organizations') end end
[ "def enter_users(test_data)\n test_users = test_data[CoreUseOfCollectionsData::USER_GRP.name]\n hide_notifications_bar\n test_users = [{CoreUseOfCollectionsData::USER.name => '', CoreUseOfCollectionsData::USER_TYPE.name => ''}] unless test_users\n prep_fieldsets_for_test_data([fieldset(CoreUseOfCollectionsData::USER_GRP.name)], test_users)\n\n test_users.each_with_index do |user, index|\n logger.info \"Entering user data set at index #{index}: #{user}\"\n enter_auto_complete(user_name_input(index), user_name_options(index), user[CoreUseOfCollectionsData::USER.name], 'Local Persons')\n wait_for_options_and_select(user_type_input(index), user_type_options(index), user[CoreUseOfCollectionsData::USER_TYPE.name])\n end\n end", "def enter_users(test_data)\n users = test_data[UseOfCollections::USER_GRP.name] || [UseOfCollections.empty_user]\n prep_fieldsets_for_test_data([fieldset(UseOfCollections::USER_GRP.name)], users)\n\n users.each_with_index do |user, index|\n enter_auto_complete(user_name_input(index), user_name_options(index), user[UseOfCollections::USER.name], 'Local Persons')\n wait_for_options_and_select(user_type_input(index), user_type_options(index), user[UseOfCollections::USER_INSTITUTION_ROLE.name])\n wait_for_options_and_select(user_role_input(index), user_role_options(index), user[UseOfCollections::USER_UOC_ROLE.name])\n enter_auto_complete(user_institution_input(index), user_institution_options(index), user[UseOfCollections::USER_INSTITUTION.name], 'Local Organizations')\n end\n end", "def verify_users(test_data)\n test_users = test_data[CoreUseOfCollectionsData::USER_GRP.name]\n test_users = [{CoreUseOfCollectionsData::USER.name => '', CoreUseOfCollectionsData::USER_TYPE.name => ''}] unless test_users\n test_users.each_with_index do |user, index|\n verify_values_match(user[CoreUseOfCollectionsData::USER.name], element_value(user_name_input index))\n verify_values_match(user[CoreUseOfCollectionsData::USER_TYPE.name], element_value(user_type_input index))\n end\n end", "def verify_users(test_data)\n users = test_data[UseOfCollections::USER_GRP.name] || [UseOfCollections.empty_user]\n users.each_with_index do |user, index|\n verify_values_match(user[UseOfCollections::USER.name], element_value(user_name_input index))\n verify_values_match(user[UseOfCollections::USER_INSTITUTION_ROLE.name], element_value(user_type_input index))\n verify_values_match(user[UseOfCollections::USER_UOC_ROLE.name], element_value(user_role_input index))\n verify_values_match(user[UseOfCollections::USER_INSTITUTION.name], element_value(user_institution_input index))\n end\n end", "def set_users(params)\n all_users = params[:users] || []\n owners = params[:assigned] || []\n watchers = all_users - owners\n\n set_owner_ids(owners)\n set_watcher_ids(watchers)\n end", "def verify_users(test_data)\n users = test_data[CoreUseOfCollectionsData::USER_GRP.name] || [CoreUseOfCollectionsData.empty_user]\n users.each_with_index do |user, index|\n verify_values_match(user[CoreUseOfCollectionsData::USER.name], element_value(user_name_input index))\n verify_values_match(user[CoreUseOfCollectionsData::USER_INSTITUTION_ROLE.name], element_value(user_type_input index))\n verify_values_match(user[CoreUseOfCollectionsData::USER_UOC_ROLE.name], element_value(user_role_input index))\n verify_values_match(user[CoreUseOfCollectionsData::USER_INSTITUTION.name], element_value(user_institution_input index))\n end\n end", "def process_workshop_users(data)\n\t\tdata.each do |item|\n\t\t\tif item[:password] == \"local\"\n\t\t\t\titem[:password] = @user.password\n\t\t\tend\n\t\t\t@user.create_user(item)\n\t\tend\n\tend", "def update_user_data\n $users.each {|user| \n if $current_user[:username] == user[0]\n user[2] = $current_user[:balance]\n user[3] = $current_user[:streak]\n CSV.open(\"user_data.csv\", \"w\") do |csv|\n $users.each {|x| csv << x}\n end\n end\n }\nend", "def update_user_data!(owner_user_ids)\n owner_user_ids.each do|user_id|\n ::User.recalculate_item_count_of!(user_id)\n end\n end", "def update_users(users, vss_users, emaildomain)\r\n vss_users.each do |name|\r\n unless users.include?(name)\r\n if emaildomain\r\n users[name] = [name, \"#{name}@#{emaildomain}\"]\r\n else\r\n users[name] = [name, \"\"]\r\n end\r\n end\r\n end\r\n users\r\n end", "def import_users\n\t\tread_header = false\n\t\tlast_id = User.last.id\n\t\tActiveRecord::Base.transaction do\n\t\t\t@dataset.each(first_name: 'first_name', last_name: 'last_name', email: 'email', birthdate: 'birthdate') do |hash|\n\t\t\t\tif read_header\n\t\t\t\t\tUser.import_user(hash, role)\n\t\t\t\telse\n\t\t\t\t\tread_header = true\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\t\tUser.find_each(start: last_id + 1) do |user|\n\t\t\tgenerated_password = Devise.friendly_token.first(8)\n\t\t\tuser.password = generated_password\n\t\t\tif user.save!\n\t\t\t\t#UserMailer.password_creation(user, generated_password).deliver\n\t\t\tend\n\t\tend\n \tend", "def sync_all_active_users\n prepare_sis_user_import\n get_canvas_user_report_file\n load_active_users\n process_updated_users\n process_new_users\n Canvas::MaintainUsers.handle_changed_sis_user_ids(@sis_user_id_updates)\n import_sis_user_csv\n end", "def add_users(course, test_users)\n ['Teacher', 'Designer', 'Lead TA', 'TA', 'Observer', 'Reader', 'Student'].each do |user_role|\n users = ''\n users_with_role = test_users.select { |user| user.role == user_role }\n users_with_role.each { |user| users << \"#{user.uid}, \" }\n if users.empty?\n logger.warn \"No test users with role #{user_role}\"\n else\n begin\n # Canvas add-user function is often flaky in test envs, so retry if it fails\n tries ||= 3\n logger.info \"Adding users with role #{user_role}\"\n load_users_page course\n wait_for_load_and_click add_people_button_element\n add_user_by_uid_element.when_visible Utils.short_wait\n sleep 1\n check_add_user_by_uid\n wait_for_element_and_type_js(user_list_element, users)\n self.user_role = user_role\n wait_for_update_and_click_js next_button_element\n users_ready_to_add_msg_element.when_visible Utils.medium_wait\n hide_canvas_footer\n wait_for_update_and_click_js next_button_element\n users_with_role.each { |user| wait_for_user user }\n rescue => e\n logger.error \"#{e.message}\\n#{e.backtrace}\"\n logger.warn 'Add User failed, retrying'\n retry unless (tries -=1).zero?\n end\n end\n end\n end", "def process_updated_users\n CSV.foreach(get_canvas_user_report_file, :headers => :first_row) do |canvas_user|\n uid = canvas_user['login_id']\n\n # process only if found in campus data\n if @active_sis_users[uid]\n active_campus_user = canvas_user_from_campus_row(@active_sis_users[uid])\n\n # if SIS User ID has changed\n if canvas_user['user_id'] != active_campus_user['user_id']\n @sis_user_id_updates[\"sis_login_id:#{canvas_user['login_id']}\"] = active_campus_user['user_id']\n end\n\n unless Canvas::MaintainUsers.provisioned_account_eq_sis_account?(canvas_user, active_campus_user)\n logger.debug(\"Updating user #{canvas_user} with #{active_campus_user}\")\n add_user_to_import(active_campus_user)\n end\n @active_sis_users.delete(uid)\n end\n end\n end", "def send_test_to_users\n params[:send_test_to_users] = true\n if request.put?\n params[:users] ||= {}\n params[:selected_users] ||= {}\n flash[:error] ||= \"\"\n user_assessments = []\n failed_user_assessments = []\n if params[:selected_users].empty?\n flash[:error] = \"Please select at least one user.\"\n get_packages \n render :action => :send_test_to_users and return\n end\n params[:selected_users].each do |user_id,on|\n #user_assessment = @assessment.user_assessments.where(:query_options => {\n # :assessment_id => @assessment.id,\n # :user_id => user_id\n #}).all[0]\n\n assessment_taker_type = Vger::Resources::Suitability::UserAssessment::AssessmentTakerType::REGULAR\n @user = Vger::Resources::User.find(user_id)\n recipients = []\n recipients |= params[:report_email_recipients].split(\",\") if params[:report_email_recipients].present?\n if params[:send_report_to_user]\n recipients.push @user.email\n assessment_taker_type = Vger::Resources::Suitability::UserAssessment::AssessmentTakerType::REPORT_RECEIVER\n end\n cc_emails = []\n cc_emails |= params[:cc_emails].to_s.split(\",\")\n if params[:options][:send_report_links_to_manager].present? || params[:options][:send_assessment_links_to_manager].present?\n if params[:options][:manager_name].blank?\n flash[:error] = \"Please enter the Notification Recipient's name<br/>\".html_safe\n get_templates\n get_packages\n render :action => :send_test_to_users and return\n end\n if !(Validators.email_regex =~ params[:options][:manager_email])\n flash[:error] += \"Please enter a valid Email Address for Notification Recipient\".html_safe\n get_templates\n get_packages\n render :action => :send_test_to_users and return\n end\n end\n options = {\n :cc_emails => cc_emails,\n :assessment_taker_type => assessment_taker_type,\n :report_link_receiver_name => params[:options][:manager_name],\n :report_link_receiver_email => params[:options][:manager_email],\n :assessment_link_receiver_name => params[:options][:manager_name],\n :assessment_link_receiver_email => params[:options][:manager_email],\n :send_report_links_to_manager => params[:options][:send_report_links_to_manager].present?,\n :send_assessment_links_to_manager => params[:options][:send_assessment_links_to_manager].present?\n }\n\n options.merge!(template_id: params[:template_id].to_i) if params[:template_id].present?\n options.merge!(link_expiry: params[:options][:link_validity]) if params[:options][:link_validity].present?\n options.merge!(package_selection: params[:options][:package_selection]) if params[:options][:package_selection].present?\n\n # create user_assessment if not present\n # add it to list of user_assessments to send email\n user_assessment = Vger::Resources::Suitability::UserAssessment.create(\n :trial => params[:trial] == \"true\",\n :applicant_id => params[:users][user_id][:applicant_id],\n :assessment_id => @assessment.id,\n :user_id => user_id,\n :responses_count => 0,\n :report_email_recipients => recipients.join(\",\"),\n :options => options,\n :language => (@assessment.languages.size == 1 ? @assessment.languages.first : nil)\n )\n if user_assessment.error_messages.present?\n failed_user_assessments << user_assessment\n else\n user_assessments.push user_assessment\n end\n end\n assessment = Vger::Resources::Suitability::CustomAssessment.send_test_to_users(\n :id => @assessment.id,\n :user_assessment_ids => user_assessments.map(&:id),\n :options => params[:options]\n ) if user_assessments.present?\n if failed_user_assessments.present?\n #flash[:error] = \"Cannot send test to #{failed_user_assessments.size} users.#{failed_user_assessments.first.error_messages.join('<br/>')}\"\n #redirect_to users_url\n flash[:error] = \"#{failed_user_assessments.first.error_messages.join('<br/>')}\"\n get_templates\n get_packages\n render :action => :send_test_to_users and return\n else\n if @assessment.assessment_type == Vger::Resources::Suitability::CustomAssessment::AssessmentType::BENCHMARK\n flash[:notice] = \"You have successfully sent the Benchmark!\"\n else\n flash[:notice] = \"Test was sent successfully!\"\n end\n redirect_to users_url\n end\n else\n redirect_to add_users_url\n end\n end", "def process_users(users)\n users.each do |element|\n user = User.new(element, @bot)\n @users[user.id] = user\n end\n end", "def updateUserResults\n results = Pick.where(week_id: session[:currentWeek]).order(\"user_id, slipnum\")\n groupedpicks = flattenResults(results)\n # get the winners in a hash\n winners = getGameWinners\n\n # delete all the results for a given week and then loop through and enter in the results\n UserResult.where(week_id: session[:currentWeek]).delete_all\n # run the bulk insert\n UserResult.bulk_insert(:user_id, :week_id, :version, :slipnum, :wins, :losses, :winner) do |worker|\n # loop through each of the picks and insert new records with their results\n groupedpicks.each do |picks|\n #loop through all five results and calculate wins and losses to create user result\n wins = 0\n winner = 0\n losses = 0\n picks.each do |p|\n if winners.has_key?(p.team_id)\n wins += 1\n elsif winners.has_value?(p.team_id)\n losses += 1\n end\n end\n if wins == 5\n winner = 1\n elsif losses > 0\n winner = -1\n end\n worker.add user_id: picks[0].user_id, week_id: picks[0].week_id, version: 0, slipnum: picks[0].slipnum, wins: wins, losses: losses, winner: winner\n end\n end\n end", "def modify_user_data(data)\n @user_data.merge!(data)\n end", "def load_users\n user_set = Set[]\n @users_config.each_key do |provider|\n @users_config[provider].each do |a|\n add_user(a, user_set, provider)\n end\n end\n add_users_to_group(user_set)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Verifies that the users data matches test data
def verify_users(test_data) users = test_data[CoreUseOfCollectionsData::USER_GRP.name] || [CoreUseOfCollectionsData.empty_user] users.each_with_index do |user, index| verify_values_match(user[CoreUseOfCollectionsData::USER.name], element_value(user_name_input index)) verify_values_match(user[CoreUseOfCollectionsData::USER_INSTITUTION_ROLE.name], element_value(user_type_input index)) verify_values_match(user[CoreUseOfCollectionsData::USER_UOC_ROLE.name], element_value(user_role_input index)) verify_values_match(user[CoreUseOfCollectionsData::USER_INSTITUTION.name], element_value(user_institution_input index)) end end
[ "def verify_users(test_data)\n test_users = test_data[CoreUseOfCollectionsData::USER_GRP.name]\n test_users = [{CoreUseOfCollectionsData::USER.name => '', CoreUseOfCollectionsData::USER_TYPE.name => ''}] unless test_users\n test_users.each_with_index do |user, index|\n verify_values_match(user[CoreUseOfCollectionsData::USER.name], element_value(user_name_input index))\n verify_values_match(user[CoreUseOfCollectionsData::USER_TYPE.name], element_value(user_type_input index))\n end\n end", "def verify_users(test_data)\n users = test_data[UseOfCollections::USER_GRP.name] || [UseOfCollections.empty_user]\n users.each_with_index do |user, index|\n verify_values_match(user[UseOfCollections::USER.name], element_value(user_name_input index))\n verify_values_match(user[UseOfCollections::USER_INSTITUTION_ROLE.name], element_value(user_type_input index))\n verify_values_match(user[UseOfCollections::USER_UOC_ROLE.name], element_value(user_role_input index))\n verify_values_match(user[UseOfCollections::USER_INSTITUTION.name], element_value(user_institution_input index))\n end\n end", "def verify_staff(test_data)\n staff = test_data[UseOfCollections::STAFF_GRP.name] || [UseOfCollections.empty_staff]\n staff.each_with_index do |staf, index|\n verify_values_match(staf[UseOfCollections::STAFF_NAME.name], element_value(staff_name_input index))\n verify_values_match(staf[UseOfCollections::STAFF_ROLE.name], element_value(staff_role_input index))\n verify_values_match(staf[UseOfCollections::STAFF_HOURS_SPENT.name], element_value(staff_hours_spent_input index))\n verify_values_match(staf[UseOfCollections::STAFF_NOTE.name], element_value(staff_note_input index))\n end\n end", "def verify_staff(test_data)\n staff = test_data[CoreUseOfCollectionsData::STAFF_GRP.name] || [CoreUseOfCollectionsData.empty_staff]\n staff.each_with_index do |staf, index|\n verify_values_match(staf[CoreUseOfCollectionsData::STAFF_NAME.name], element_value(staff_name_input index))\n verify_values_match(staf[CoreUseOfCollectionsData::STAFF_ROLE.name], element_value(staff_role_input index))\n verify_values_match(staf[CoreUseOfCollectionsData::STAFF_HOURS_SPENT.name], element_value(staff_hours_spent_input index))\n verify_values_match(staf[CoreUseOfCollectionsData::STAFF_NOTE.name], element_value(staff_note_input index))\n end\n end", "def verify_authorizations(test_data)\n authorizations = test_data[UseOfCollections::AUTHORIZATION_GRP.name] || [UseOfCollections.empty_authorization]\n authorizations.each_with_index do |auth, index|\n verify_values_match(auth[UseOfCollections::AUTHORIZED_BY.name], element_value(authorized_by_input index))\n verify_values_match(auth[UseOfCollections::AUTHORIZATION_DATE.name], element_value(authorization_date_input index))\n verify_values_match(auth[UseOfCollections::AUTHORIZATION_NOTE.name], element_value(authorization_note_input index))\n verify_values_match(auth[UseOfCollections::AUTHORIZATION_STATUS.name], element_value(authorization_status_input index))\n end\n end", "def run_verification\n verify_data_format # runs through what we have and makes sure teh values are in the correct format\n verify_dependencies # makes sure that any 'optional' columns have any dependency columns present\n verify_order # finally, all columns must be in a specific order \n end", "def verify_result(test_data)\n verify_values_match(test_data[CoreUseOfCollectionsData::RESULT.name], element_value(result_text_area))\n end", "def test_repeated_email\n \tuser = users(:user_with_repeated_email)\n \tassert !user.valid?\n end", "def test_users_exist\n assert(@@db[:sakai_user].count > 0)\n assert(@@db[:sakai_user_id_map].count > 0)\n end", "def test_password\n main_user = users(:main_user)\n normal_user = users(:normal_user)\n \n assert_equal true, main_user.valid_password('testing')\n assert_equal false, main_user.valid_password('goop')\n \n assert_equal true, normal_user.valid_password('testing')\n assert_equal false, normal_user.valid_password('goop')\n end", "def dataTest data\n failure = RubyUnit::AssertionFailure.new(data)\n assertEqual data, failure.data, 'Assertion data Hash is incorrect'\n end", "def check_updated_user_data_validity(user_data)\n schema = {\n 'type' => 'object',\n 'required' => %w(OrgDefinedId FirstName MiddleName\n LastName ExternalEmail UserName\n Activation),\n 'properties' => {\n 'OrgDefinedId' => { 'type' => %w(string null) },\n 'FirstName' => { 'type' => 'string' },\n 'MiddleName' => { 'type' => %w(string null) },\n 'LastName' => { 'type' => 'string' },\n 'ExternalEmail' => { 'type' => %w(string null) },\n 'UserName' => { 'type' => 'string' },\n 'Activation' => {\n 'required' => ['IsActive'],\n 'properties' => {\n 'IsActive' => {\n 'type' => 'boolean'\n }\n }\n }\n }\n }\n JSON::Validator.validate!(schema, user_data, validate_schema: true)\nend", "def verify_authorized_by(test_data)\n verify_values_match(test_data[CoreUseOfCollectionsData::AUTHORIZED_BY.name], element_value(authorized_by_input))\n end", "def verify_authorizations(test_data)\n authorizations = test_data[CoreUseOfCollectionsData::AUTHORIZATION_GRP.name] || [CoreUseOfCollectionsData.empty_authorization]\n authorizations.each_with_index do |auth, index|\n verify_values_match(auth[CoreUseOfCollectionsData::AUTHORIZED_BY.name], element_value(authorized_by_input index))\n verify_values_match(auth[CoreUseOfCollectionsData::AUTHORIZATION_DATE.name], element_value(authorization_date_input index))\n verify_values_match(auth[CoreUseOfCollectionsData::AUTHORIZATION_NOTE.name], element_value(authorization_note_input index))\n verify_values_match(auth[CoreUseOfCollectionsData::AUTHORIZATION_STATUS.name], element_value(authorization_status_input index))\n end\n end", "def verify_result(test_data)\n verify_values_match(test_data[UseOfCollections::RESULT.name], element_value(result_text_area))\n end", "def assert_user_arrays_equal(expect, got, *args)\n assert_arrays_equal(expect, got, *args, &:login)\n end", "def test_no_duplicate_emails\n @db_modified = true\n data = post_create_user('email' => 'user1@lakeheadu.ca',\n 'password' => 'password')\n assert_equal('error', data[0]['kind'])\n end", "def checkData(iUserID, iComment)\n # Nothing to test\n end", "def test_repeated_username\n \tuser = users(:user_with_repeated_username)\n \tassert !user.valid?\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Verifies that the title matches test data
def verify_title(test_data) verify_values_match(test_data[CoreUseOfCollectionsData::TITLE.name], element_value(title_input)) end
[ "def verify_title(test_data)\n verify_values_match(test_data[UseOfCollections::TITLE.name], element_value(title_input))\n end", "def verify_title title_pattern\r\n command 'verifyTitle', title_pattern\r\n end", "def verifyPopupTitle(text)\n Log.TestVerify(text == @title.getText, \"Verify popup Title == '#{text}' \")\n end", "def expected_title(expected_title)\n define_method(\"has_expected_title?\") do\n has_expected_title = expected_title === title\n raise \"Expected title '#{expected_title}' instead of '#{title}'\" unless has_expected_title\n has_expected_title\n end\n end", "def has_correct_title?\n no_title_is_provided if title_attribute.nil?\n !title.match(title_attribute).nil?\n end", "def assert_title(title)\n assert_seen title, :within => \"head title\"\n end", "def expected_title(expected_title)\n define_method(\"has_expected_title?\") do\n has_expected_title = expected_title.kind_of?(Regexp) ? expected_title =~ title : expected_title == title\n raise \"Expected title '#{expected_title}' instead of '#{title}'\" unless has_expected_title\n has_expected_title\n end\n end", "def check_title(title, test_case)\n page_title = get_page_title\n\n if test_case\n expect(page_title).to eq title\n else\n expect(page_title).to_not eq title\n end\nend", "def page_title_is_correct\n ( text =~ self.class.page_title_validation_value ) !=nil\n end", "def is_title_case\n if title.split.any?{|w|w[0].upcase != w[0]}\n errors.add(:title, \"Title must be in title case\")\n end\n end", "def expected_title expected_title\n define_method 'has_expected_title?' do\n has_expected_title = expected_title.kind_of?(Regexp) ? expected_title =~ @browser.title : expected_title == @browser.title\n raise \"Expected title '#{expected_title}' instead of '#{@browser.title}'\" unless has_expected_title\n end\n end", "def check_partial_title(partial_text_title, test_case)\n page_title = get_page_title\n\n if test_case\n expect(page_title).to include(partial_text_title)\n else\n expect(page_title).to_not include(partial_text_title)\n end\nend", "def test_missing_title\n idea = ideas(:no_title)\n assert !idea.valid?, idea.errors.full_messages.inspect\n end", "def assert_head_title(title)\n assert_select(\"head title\", { text: /#{title}/, count: 1 },\n \"Incorrect page or page title displayed\")\n end", "def verify_page?(key, exit = true)\n base_title = wait_for_title(exit)\n puts \"Verify Title - Desired Prefix: #{site.get_title(key)} => Full Title: #{page_title}\" if exit\n if site.get_title(key) != base_title\n fail(\"Page title does not match expected result. EXPECTED: #{site.get_title(key)} FOUND: #{page_title}\") if exit\n return false\n end\n return true\n end", "def assert_title(title)\n assert_tag :tag => 'title', :content => title\n assert_tag :tag => 'h1', :content => title\n end", "def check_page_title(text)\n $driver.title.should == text\n end", "def validate_title_fields(recs = records.to_a)\n title_fields = get_title_fields\n shared_title_values = make_comparable(title_fields)\n recs.each.with_index(1) do |rec, idx|\n rec_fields = get_fields(rec, :title)\n rec_title_fields = make_comparable(rec_fields)\n shared_title_values.each_pair do |section, shared_values|\n rec_title_fields[section]&.each_pair do |k, v|\n next if v == shared_values[k]\n sec = '%-15s' % \"[#{section}]\"\n field = '%-20s' % \"[#{k}]\"\n record = \"FILE #{idx} -> #{rec_fields[section][k].inspect}\"\n shared = \"EXEMPLAR -> #{title_fields[section][k].inspect}\"\n __output \"*** #{sec} | #{field} #{record} | #{shared}\"\n end\n end\n end\n end", "def have_a_page_title(expected) \n simple_matcher(\"have a page title in the <head> element with [#{expected}]\") do |given, matcher|\n given.should have_tag('head > title', expected)\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Verifies date requested matches test data
def verify_date_requested(test_data) verify_values_match(test_data[CoreUseOfCollectionsData::DATE_REQUESTED.name], element_value(date_requested_input)) end
[ "def verify_date_requested(test_data)\n verify_values_match(test_data[UseOfCollections::DATE_REQUESTED.name], element_value(date_requested_input))\n end", "def verify_foundation_date(test_data)\n errors = []\n text_values_match?(test_data[CoreOrgData::FOUNDING_DATE.name], element_value(foundation_date_input), errors)\n errors\n end", "def verify_use_dates(test_data)\n dates = test_data[CoreUseOfCollectionsData::USE_DATE_GRP.name] || [CoreUseOfCollectionsData.empty_use_date]\n dates.each_with_index do |date, index|\n verify_values_match(date[CoreUseOfCollectionsData::USE_DATE.name], element_value(use_date_input index))\n verify_values_match(date[CoreUseOfCollectionsData::USE_DATE_TIME_NOTE.name], element_value(use_date_time_note(index)))\n verify_values_match(date[CoreUseOfCollectionsData::USE_DATE_NUM_VISITORS.name], element_value(use_num_visitors_input index))\n verify_values_match(date[CoreUseOfCollectionsData::USE_DATE_HOURS_SPENT.name], element_value(use_hours_spent_input index))\n verify_values_match(date[CoreUseOfCollectionsData::USE_DATE_VISITOR_NOTE.name], element_value(use_note_input index))\n end\n end", "def verify_use_dates(test_data)\n dates = test_data[UseOfCollections::USE_DATE_GRP.name] || [UseOfCollections.empty_use_date]\n dates.each_with_index do |date, index|\n verify_values_match(date[UseOfCollections::USE_DATE.name], element_value(use_date_input index))\n verify_values_match(date[UseOfCollections::USE_DATE_TIME_NOTE.name], element_value(use_date_time_note(index)))\n verify_values_match(date[UseOfCollections::USE_DATE_NUM_VISITORS.name], element_value(use_num_visitors_input index))\n verify_values_match(date[UseOfCollections::USE_DATE_HOURS_SPENT.name], element_value(use_hours_spent_input index))\n verify_values_match(date[UseOfCollections::USE_DATE_VISITOR_NOTE.name], element_value(use_note_input index))\n end\n end", "def verify_authorization_date(test_data)\n verify_values_match(test_data[CoreUseOfCollectionsData::AUTHORIZATION_DATE.name], element_value(authorization_date_input))\n end", "def verify_date_completed(test_data)\n verify_values_match(test_data[CoreUseOfCollectionsData::DATE_COMPLETED.name], element_value(date_completed_input))\n end", "def verify_date_completed(test_data)\n verify_values_match(test_data[UseOfCollections::DATE_COMPLETED.name], element_value(date_completed_input))\n end", "def verify_accession_date(data_set)\n verify_values_match(data_set[Acquisition::ACCESSION_DATE_GRP.name], element_value(access_date_input_locator))\n end", "def verify_dissolution_date(test_data)\n errors = []\n text_values_match?(test_data[Org::DISSOLUTION_DATE.name], element_value(dissolution_date_input), errors)\n errors\n end", "def assert_date( exp, act )\r\n assert_equal exp.year, act.year\r\n assert_equal exp.month, act.month \r\n assert_equal exp.day, act.day\r\n end", "def test_verify_time_stamp_valid\n assert_equal true, verify_time_stamp(create_maps(@full))\n end", "def test_invalid_card_dates\n create_invalid_card_dates\n deny @last_year.valid?\n deny @last_month.valid?\n end", "def test_several_dates_1\n \tassert_equal(1, Zeller.calc(12, 2013))\n end", "def test_date_in_checks_for_check_date\n check_information = check_informations(:check_information13)\n check_date = check_information.date_in_checks(check_information.check_date)\n assert_equal(check_date, '02/05/09')\n end", "def test_date\n if test_execution_date and test_delivery_date\n if test_execution_date > test_delivery_date\n errors.add(:test_delivery_date, \"should be greater then test date.\")\n end\n end\n end", "def verify_end_date(test_data)\n verify_values_match(test_data[UseOfCollections::END_DATE.name], element_value(end_date_input))\n end", "def verify_end_date(test_data)\n verify_values_match(test_data[CoreUseOfCollectionsData::END_DATE.name], element_value(end_date_input))\n end", "def test_invalid_bithdays\n spec = @valid_spec\n invalid_birthdates = [Date.new(Spec::START_YEAR - 1), Date.today + 1.year]\n invalid_birthdates.each do |birthdate|\n spec.birthdate = birthdate\n assert !spec.valid?, \"#{birthdate} should not pass validation\"\n end\n end", "def test_date_formats_and_ranges\n batch = Batch.new(:id => 20,\n :batchid=>20,\n :date => Date.today.strftime(\"%m/%d/%Y\"),\n :eob => 30,\n :facility=> facilities(:facility1),\n :arrival_time => \"#{Time.now}\",\n :target_time => \"#{Time.now}\",\n :status => \"New\"\n )\n\n # Assert the validity of tommorrow, mm/dd/YYYY format, and sixty days prior\n assert_valid(batch, :date, [(Date.today + 1).to_s, Date.today.strftime(\"%m/%d/%Y\"), (Date.today - 60).to_s])\n # Assert the invalidity of three days from today, more than sixty days ago, mm/dd/yy, and yy/mm/dd formats\n assert_invalid(batch, :date, [(Date.today + 4).to_s, (Date.today - 61).to_s, Date.today.strftime(\"%m/%d/%y\"), Date.today.strftime(\"y/%m/%d\")])\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Verifies date completed matches test data
def verify_date_completed(test_data) verify_values_match(test_data[CoreUseOfCollectionsData::DATE_COMPLETED.name], element_value(date_completed_input)) end
[ "def verify_date_completed(test_data)\n verify_values_match(test_data[UseOfCollections::DATE_COMPLETED.name], element_value(date_completed_input))\n end", "def verify_date_requested(test_data)\n verify_values_match(test_data[CoreUseOfCollectionsData::DATE_REQUESTED.name], element_value(date_requested_input))\n end", "def verify_date_requested(test_data)\n verify_values_match(test_data[UseOfCollections::DATE_REQUESTED.name], element_value(date_requested_input))\n end", "def test_valid_review_dates_others_work\n #@request.session[:user] = User.find(users(:student1).id)\n @assignment = assignments(:assignment1)\n #@participant = AssignmentParticipant.find(participants(:par1).id)\n due_dates = DueDate.find(:all, :conditions => [\"assignment_id = ?\", @assignment.id])\n @very_last_due_date = DueDate.find(:all, :order => \"due_at DESC\", :limit =>1, :conditions => [\"assignment_id = ?\", @assignment.id])\n next_due_date = @very_last_due_date[0]\n for due_date in due_dates\n if due_date.due_at > Time.now\n if due_date.due_at < next_due_date.due_at\n next_due_date = due_date\n end\n end\n end\n @review_phase = next_due_date.deadline_type_id;\n\n assert_equal(DeadlineType.find(@review_phase).name , deadline_types(:deadline_type_review).name)\n end", "def verify_end_date(test_data)\n verify_values_match(test_data[CoreUseOfCollectionsData::END_DATE.name], element_value(end_date_input))\n end", "def verify_end_date(test_data)\n verify_values_match(test_data[UseOfCollections::END_DATE.name], element_value(end_date_input))\n end", "def verify_use_dates(test_data)\n dates = test_data[CoreUseOfCollectionsData::USE_DATE_GRP.name] || [CoreUseOfCollectionsData.empty_use_date]\n dates.each_with_index do |date, index|\n verify_values_match(date[CoreUseOfCollectionsData::USE_DATE.name], element_value(use_date_input index))\n verify_values_match(date[CoreUseOfCollectionsData::USE_DATE_TIME_NOTE.name], element_value(use_date_time_note(index)))\n verify_values_match(date[CoreUseOfCollectionsData::USE_DATE_NUM_VISITORS.name], element_value(use_num_visitors_input index))\n verify_values_match(date[CoreUseOfCollectionsData::USE_DATE_HOURS_SPENT.name], element_value(use_hours_spent_input index))\n verify_values_match(date[CoreUseOfCollectionsData::USE_DATE_VISITOR_NOTE.name], element_value(use_note_input index))\n end\n end", "def verify_use_dates(test_data)\n dates = test_data[UseOfCollections::USE_DATE_GRP.name] || [UseOfCollections.empty_use_date]\n dates.each_with_index do |date, index|\n verify_values_match(date[UseOfCollections::USE_DATE.name], element_value(use_date_input index))\n verify_values_match(date[UseOfCollections::USE_DATE_TIME_NOTE.name], element_value(use_date_time_note(index)))\n verify_values_match(date[UseOfCollections::USE_DATE_NUM_VISITORS.name], element_value(use_num_visitors_input index))\n verify_values_match(date[UseOfCollections::USE_DATE_HOURS_SPENT.name], element_value(use_hours_spent_input index))\n verify_values_match(date[UseOfCollections::USE_DATE_VISITOR_NOTE.name], element_value(use_note_input index))\n end\n end", "def test_date\n if test_execution_date and test_delivery_date\n if test_execution_date > test_delivery_date\n errors.add(:test_delivery_date, \"should be greater then test date.\")\n end\n end\n end", "def verify_foundation_date(test_data)\n errors = []\n text_values_match?(test_data[CoreOrgData::FOUNDING_DATE.name], element_value(foundation_date_input), errors)\n errors\n end", "def verify_dissolution_date(test_data)\n errors = []\n text_values_match?(test_data[Org::DISSOLUTION_DATE.name], element_value(dissolution_date_input), errors)\n errors\n end", "def team_registration_passed?\n d = @data_object\n today = Date.today\n d.aff_team_reg_end.to_date < today\n end", "def verify_authorization_date(test_data)\n verify_values_match(test_data[CoreUseOfCollectionsData::AUTHORIZATION_DATE.name], element_value(authorization_date_input))\n end", "def completed?\n CandidateEvent.completed?(due_date, verified)\n end", "def test_verify_time_stamp_valid\n assert_equal true, verify_time_stamp(create_maps(@full))\n end", "def verify_accession_date(data_set)\n verify_values_match(data_set[Acquisition::ACCESSION_DATE_GRP.name], element_value(access_date_input_locator))\n end", "def dates_are_correct\n errors.add(:begins, _(\"%{fn} can't be blank\")) if begins.nil?\n errors.add(:finishes, _(\"%{fn} can't be blank\")) if finishes.nil?\n\n dates_in_order?(begins, finishes) or\n errors.add(:begins, _('Conference must end after its start date'))\n\n dates_in_order?(cfp_open_date, cfp_close_date) or\n errors.add(:cfp_open_date, _('Call for papers must end after its ' +\n 'start date'))\n dates_in_order?(cfp_close_date, begins) or\n errors.add(:cfp_close_date, _('Call for papers must finish before ' +\n 'the conference begins'))\n\n dates_in_order?(reg_open_date, reg_close_date) or\n errors.add(:reg_open_date, _('Registration must end after its ' +\n 'start date'))\n dates_in_order?(reg_close_date, finishes) or\n errors.add(:reg_close_date, _('Registration must finish before the ' +\n 'conference ends'))\n end", "def check_completed_at\n self.errors.add('Completion Date', 'can\\'t be blank') if (self.completed && self.completed_at.blank?)\n end", "def correct_completion_status_date\n if self.is_complete? && self.completed_at.blank?\n # if task is marked complete without a completion time, update completion time to now & allow confirmation\n self.completed_at = DateTime.now\n return true\n elsif !self.is_complete? && !self.completed_at.blank?\n # if task is marked incomplete, but there is a completion time entered, update completion time to blank & allow confirmation\n self.completed_at = nil\n return true\n else\n return false\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
OCCASIONS Enters occasions per a given set of test data
def enter_occasions(test_data) occasions = test_data[CoreUseOfCollectionsData::OCCASION_LIST.name] || [{CoreUseOfCollectionsData::OCCASION.name => ''}] hide_notifications_bar prep_fieldsets_for_test_data([fieldset(CoreUseOfCollectionsData::OCCASION_LIST.name)], occasions) occasions.each_with_index do |occasion, index| logger.info "Entering occasion data set at index #{index}: #{occasion}" enter_auto_complete(occasion_input(index), occasion_options(index), occasion[CoreUseOfCollectionsData::OCCASION.name], 'Occasion Concepts') end end
[ "def enter_pahma_occasions(test_data)\n occasions = test_data[CoreUseOfCollectionsData::OCCASION_LIST.name] || [{CoreUseOfCollectionsData::OCCASION.name => ''}]\n hide_notifications_bar\n prep_fieldsets_for_test_data([fieldset(CoreUseOfCollectionsData::OCCASION_LIST.name)], occasions)\n occasions.each_with_index do |occasion, index|\n logger.info \"Entering occasion data set at index #{index}: #{occasion}\"\n enter_auto_complete(occasion_input(index), occasion_options(index), occasion[CoreUseOfCollectionsData::OCCASION.name], 'Occasion Concepts')\n end\n end", "def operating_expenses_ccs\n fetch(:operating_expenses_ccs) do\n variable_operation_and_maintenance_costs_for_ccs + co2_emissions_costs\n end\n end", "def populate_datasets_to_be_curated\n datasets_found = 0\n\n # for each dataset that was in the target status on the given day\n launch_day = Date.new(2019, 9, 17)\n\n StashEngine::Identifier.where(created_at: launch_day..(date + 1.day)).each do |i|\n # check the actual status on that date...if it was 'curation' or 'submitted', count it\n s = status_on_date(i)\n datasets_found += 1 if %w[submitted curation].include?(s)\n end\n update(datasets_to_be_curated: datasets_found)\n end", "def set_research_est_count\n count = 0\n Project.rex_est_total.each do |r|\n if User.is_charity(r.userID)\n count = count + r.estimated_spending_on_medical_research.to_i\n end\n end\n session[:rex_est_spending] = count\nend", "def exec_iter_fixed_alpha aggrStats\n iterStats = RunInfo.new\n iterConsensus = Hash.new {|hash, key| hash[key] = 0 }\n \n # NUMBER_OF_ITER times do\n $NUM_AVG_ITER.times do |i| \n # randomly withhold 20% of data\n pick_patients\n \n # find G'\n cover = exec_search \"nCOP_heruistic\"\n \n # store the stats\n iterStats.fillIn cover\n\n # add the cover to the consensus list\n add_to_consensus(iterConsensus, cover)\n end\n \n # compute the average and std for this set of iterations\n aggrStats.fillAggregateStats iterStats\n aggrStats.alpha << $ALPHA\n aggrStats.beta << $BETA\n\n # remember the frequency genes occured\n $selected_genes = iterConsensus\nend", "def add_encounter_ids_to_events\n encounter_times = {}\n qdmPatient.get_data_elements('encounter', 'performed').each do |ep|\n # Only use inpatient encounter\n unless ep.dataElementCodes.map(&:code).intersect?(bundle.value_sets.where(oid: '2.16.840.1.113883.3.666.5.307').first.concepts.map(&:code))\n next\n end\n\n rel_time = ep.relevantPeriod\n # 1 day before and 1 day after\n rel_time.low -= 86_400\n rel_time.high += 86_400\n encounter_times[ep.id] = rel_time\n end\n qdmPatient.get_data_elements('laboratory_test', 'performed').each do |lt|\n event_time = lt.resultDatetime || lt.relevantDatetime || lt.relevantPeriod&.low\n lt.encounter_id = encounter_id_for_event(encounter_times, event_time)\n lt.relatedTo << lt.encounter_id if lt.encounter_id\n end\n qdmPatient.get_data_elements('physical_exam', 'performed').each do |pe|\n event_time = pe.relevantDatetime || pe.relevantPeriod&.low\n pe.encounter_id = encounter_id_for_event(encounter_times, event_time)\n pe.relatedTo << pe.encounter_id if pe.encounter_id\n end\n end", "def accumulate_contest (year, region, contest)\n puts \"..Including #{contest.year_name} #{contest.place}\"\n contest.c_results.each do |c_result|\n c_result.pc_results.each do |pc_result|\n #puts \"...finding regional pilot for #{pc_result.pilot}\"\n regional_pilot = RegionalPilot.find_or_create_given_result(\n year, region, pc_result.category.id, pc_result.pilot.id)\n #puts \"....regional pilot is #{regional_pilot}\"\n regional_pilot.pc_results << pc_result\n #puts \"....regional pilot contest count is #{regional_pilot.pc_results.count}\"\n end\n end\nend", "def count_electoral_college\n @swing_states.each do |row|\n @swing_votes += @polls[row[0]][0]\n end\n @clinton_states.each do |row|\n @clinton_votes += @polls[row[0]][0]\n end\n @trump_states.each do |row|\n @trump_votes += @polls[row[0]][0]\n end\n end", "def populate_datasets_to_be_curated\n datasets_found = Set.new\n # for each dataset that received the target status on the given day\n cas = CurationActivity.where(created_at: date..(date + 1.day), status: %w[submitted curation])\n cas.each do |ca|\n found_dataset = ca.resource.identifier\n next unless found_dataset\n\n orig_submitter_id = found_dataset.resources.first.user_id\n if ca.user_id == orig_submitter_id\n # Case: the originala author submitted it or took it out of peer review\n # if this ca was caused by the author of the current version, include the dataset\n datasets_found.add(found_dataset)\n elsif ca.user_id == 0\n # Case: we received a journal notification, so the system pulled it out of PPR\n # if this ca was caused by system, include the dataset if the previous ca (in same resource) was peer review\n prev_ca = CurationActivity.where(resource_id: ca.resource_id, id: 0..ca.id - 1).last\n datasets_found.add(found_dataset) if prev_ca.status == 'peer_review'\n\n # Case: the system sent a reminder, the author acted on the reminder, but the submission was credited to System.\n # walk back through System ca's. Count the dataset if the previous \"real\" ca was from the original submitting user\n prev_ca = CurationActivity.where(resource_id: ca.resource_id, id: 0..prev_ca.id - 1).last while prev_ca&.user_id == 0\n datasets_found.add(found_dataset) if prev_ca&.user_id == orig_submitter_id\n else\n # Case: a curator took it out of peer review\n # if the prev_ca is 'in progress', AND prior version ended in PPR status without being submitted\n prev_ca = CurationActivity.where(resource_id: ca.resource_id, id: 0..ca.id - 1).last\n prev_resource = ca.resource.identifier.resources.where(id: 0..ca.resource.id - 1)&.last\n datasets_found.add(found_dataset) if prev_ca&.status == 'in_progress' && prev_resource&.current_curation_status == 'peer_review'\n end\n end\n update(datasets_to_be_curated: datasets_found.size)\n end", "def patients_reinitiated_on_art_ever\n Observation.find(:all, :joins => [:encounter], :conditions => [\"concept_id = ? AND value_coded IN (?) AND encounter.voided = 0 \\\n AND DATE_FORMAT(obs_datetime, '%Y-%m-%d') <= ?\", ConceptName.find_by_name(\"EVER RECEIVED ART\").concept_id,\n ConceptName.find(:all, :conditions => [\"name = 'YES'\"]).collect{|c| c.concept_id},\n @end_date.to_date.strftime(\"%Y-%m-%d\")]).length rescue 0\n end", "def coalition_situations(agent)\n # To find situations on coalition level and averaged data\n\n # DELAYED: Real choice of situation not possible due to given data\n beliefs = agent.belief_set\n situations = Hash.new\n\n if beliefs.has_key?('inputSituation') && beliefs.has_key?('analyseCoalitionSituation')\n beliefs['inputSituation'].each do |args|\n situations[args[1]] ||= Hash.new\n situations[args[1]][args[2]] ||= 0\n situations[args[1]][args[2]] += 1\n end\n end\n\n situations.each do |type, values|\n values.each do |value, count|\n agent.add_belief(\"coalitionSituation\", \"#{only_arg(beliefs, 'currentTimeStamp')},#{type},#{value}\")\n end\n end\n end", "def test_for_calories\n assert_equal( @entry1.calories, 50 )\n\tassert_equal( @entry2.calories, 70 )\n\tassert_equal( @entry3.calories, 15 )\n end", "def decisions_by_cd\n cds=@sr.cd_id_by_stage(@stage)\n\n decisions=Decision.where(:systematic_review_id=>@sr.id, :canonical_document_id=>cds, :user_id=>@sr.group_users.map {|u| u[:id]}, :stage=>@stage.to_s).group_and_count(:canonical_document_id, :decision).all\n n_jueces_por_cd=AllocationCd.where(:systematic_review_id=>@sr.id, :canonical_document_id=>cds, :stage=>@stage.to_s).group_and_count(:canonical_document_id).as_hash(:canonical_document_id)\n\n\n# n_jueces=@sr.group_users.count\n cds.inject({}) {|ac,v|\n ac[v]=empty_decisions_hash\n ac[v]=ac[v].merge decisions.find_all {|dec| dec[:canonical_document_id]==v }\n .inject({}) {|ac1,v1| ac1[v1[:decision]]=v1[:count]; ac1 }\n suma=ac[v].inject(0) {|ac1,v1| ac1+v1[1]}\n n_jueces=n_jueces_por_cd[v].nil? ? 0 : n_jueces_por_cd[v][:count]\n ac[v][Decision::NO_DECISION]=n_jueces-suma\n ac\n }\n end", "def generateNumCases \n time1 = Time.now # used for test\n time11 = Time.new # used for test\n puts time1# used for test\n\n\t while !isEmptyOfUC do\n tmprecords=Records.new #new a Record used for store the results\n getMostOccur #get the most occur store in @maxparam and @maxelement\n for i in 0...@maxloopnum do #generate @maxloopnum records and later will random select one\n\n \n\n recorde = generateCase\n if(recorde !=-1)\n tmprecords.recordsArr << recorde\n end\n end\n flag = getMostCover tmprecords\n @records.recordsArr << tmprecords.recordsArr[flag]\n updateUC tmprecords.recordsArr[flag]\n tmprecords.recordsArr.clear\n end\n \n #puts 'list:'\n outputcase \n #print'total number:'\n #puts @records.recordsArr.length\n\n #puts \"start time\" #design for test start\n #puts time11\n time22 = Time.new\n #puts \"end time\"\n \n #puts time22\n time2 = Time.now\n puts time2-time1 #design for test end\n @usetime = time2-time1\n end", "def add_to_consensus iterConsensus, cover\n cover.each { |e| iterConsensus[e] += 1 }\nend", "def get_expenses(constructions)\n information = {expenses: 0, paid: 0}\n constructions.each do |construction|\n information[:expenses] += construction.expenses\n information[:paid] += construction.paid\n end\n information\n end", "def occasions\n @stats = SimpleCache.new.fetch(\"occasions\")\n render :template => \"admin/stats/occasions\"\n end", "def create_ready_cavc_genpop_cases_within_affinity_lever\n Timecop.travel(14.days.ago)\n 4.times do\n create(\n :appeal,\n :direct_review_docket,\n :type_cavc_remand,\n :cavc_ready_for_distribution,\n veteran: create_veteran\n )\n end\n Timecop.return\n end", "def run_tests(consider_others)\n step_size = (consider_others) ? 4 : 5\n company_type_id = CompanyType.find_by_price_set(true).id\n company = Company.where(:test => true, :company_type_id => company_type_id).order(:id).first\n return if company.nil?\n\n market = Market.find_by_test(true)\n return if market.customer_amount.nil?\n array = []\n array << [\"ID\", \"Launches\", \"Price\", \"Profit\", \"Size\"]\n\n unit_max = CompanyType.where(:unit_produce => true).first.limit_hash[\"11_unit_max_size\"].to_i\n price_max = market.variables[\"exp1\"].to_i * 3\n unit_step = (unit_max.to_f / step_size).to_i\n price_step = (price_max.to_f / step_size).to_i\n unit = unit_step\n price = price_step\n while unit_max >= unit\n while price_max >= price\n inside_array = []\n inside_array << \"\"\n inside_array << unit\n inside_array << price\n profit = market.test_sales(price, unit, company, consider_others).to_i\n inside_array << profit\n inside_array << profit.abs\n array << inside_array\n price += price_step\n end\n unit += unit_step\n price = price_step\n end\n return array\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Verifies project description matches test data
def verify_project_desc(test_data) verify_values_match(test_data[CoreUseOfCollectionsData::PROJECT_DESC.name], element_value(project_desc_text_area)) end
[ "def verify_project_desc(test_data)\n verify_values_match(test_data[UseOfCollections::PROJECT_DESC.name], element_value(project_desc_text_area))\n end", "def verify_project_id(test_data)\n verify_values_match(test_data[UseOfCollections::PROJECT_ID.name], element_value(project_id_input))\n end", "def verify_project_id(test_data)\n verify_values_match(test_data[CoreUseOfCollectionsData::PROJECT_ID.name], element_value(project_id_input))\n end", "def test_description\r\n\t\tdescription = \"A test job for bits.rb\"\r\n\t\t@job.description = description\r\n\t\tassert_equal(description, @job.description)\r\n\tend", "def verify_result(test_data)\n verify_values_match(test_data[CoreUseOfCollectionsData::RESULT.name], element_value(result_text_area))\n end", "def assert_code_and_desc_ok(object)\n cloned = object.clone\n assert_presence_required(object,:code)\n assert_presence_required(object,:description)\n assert object.save,\"Failed to save object first time in uniqueness test; pass unique record first\"\n assert !cloned.save,\"Saved object twice while testing for uniqueness of :code and :description\"\n assert(cloned.errors[:code].to_s =~ /taken/, \"Missing or wrong error message for duplicate code\" )\n assert(cloned.errors[:description].to_s =~ /taken/, \"Missing or wrong error message for duplicate description\") \n end", "def test_if_factory_is_valid # test:unit way to define a test\n assert @project.valid?\n end", "def compare_to(project_name)\n compare_contents @project_path, project_fixture(project_name)\n end", "def test_update_todo_list_description\n @todo_list.description = \"Things I want to do this week\"\n assert_equal(\"Things I want to do this week\", @todo_list.description)\n end", "def verify_provisos(test_data)\n verify_values_match(test_data[CoreUseOfCollectionsData::PROVISOS.name], element_value(provisos_text_area))\n end", "def verify_result(test_data)\n verify_values_match(test_data[UseOfCollections::RESULT.name], element_value(result_text_area))\n end", "def verify_provisos(test_data)\n verify_values_match(test_data[UseOfCollections::PROVISOS.name], element_value(provisos_text_area))\n end", "def verify_title(test_data)\n verify_values_match(test_data[CoreUseOfCollectionsData::TITLE.name], element_value(title_input))\n end", "def test_checklist_for_project\n login\n project = projects(:one_genus_two_species_project)\n expect = Name.joins(observations: :project_observations).\n where({ observations: { project_observations:\n { project_id: project.id } } }).\n with_rank(\"Species\").distinct\n\n get(:show, params: { project_id: project.id })\n assert_match(/Checklist for #{project.title}/, css_select(\"title\").text,\n \"Wrong page\")\n\n prove_checklist_content(expect)\n end", "def validateProject(data, isSocialGood = false)\n fails = []\n # Check if project is private\n if data.private\n fails.push(\"Project is either private or doesn't exist!\")\n end \n # Check if project has license\n if data.license == nil\n fails.push(\"Project doesn't have a license\")\n end\n # Check if project has atleast 100 stars\n if data.stargazers_count < 100 && !isSocialGood\n fails.push(\"Project has less than 100 stars\")\n end\n return fails\nend", "def test_should_require_description\n archive = create(:description => nil)\n assert archive.errors.invalid?(:description), \":description should be required\"\n assert_invalid archive, \"archive shouldn't be created\"\n end", "def validate_release\n if !system(\"origen examples\") # || !system(\"origen specs\")\n puts \"Sorry but you can't release with failing tests, please fix them and try again.\"\n exit 1\n else\n puts \"All tests passing, proceeding with release process!\"\n end\n end", "def verify_title(test_data)\n verify_values_match(test_data[UseOfCollections::TITLE.name], element_value(title_input))\n end", "def validate_release\n if !system(\"origen specs\") #|| !system(\"origen examples\")\n puts \"Sorry but you can't release with failing tests, please fix them and try again.\"\n exit 1\n else\n puts \"All tests passing, proceeding with release process!\"\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Verifies authorizations match test data
def verify_authorizations(test_data) authorizations = test_data[CoreUseOfCollectionsData::AUTHORIZATION_GRP.name] || [CoreUseOfCollectionsData.empty_authorization] authorizations.each_with_index do |auth, index| verify_values_match(auth[CoreUseOfCollectionsData::AUTHORIZED_BY.name], element_value(authorized_by_input index)) verify_values_match(auth[CoreUseOfCollectionsData::AUTHORIZATION_DATE.name], element_value(authorization_date_input index)) verify_values_match(auth[CoreUseOfCollectionsData::AUTHORIZATION_NOTE.name], element_value(authorization_note_input index)) verify_values_match(auth[CoreUseOfCollectionsData::AUTHORIZATION_STATUS.name], element_value(authorization_status_input index)) end end
[ "def verify_authorizations(test_data)\n authorizations = test_data[UseOfCollections::AUTHORIZATION_GRP.name] || [UseOfCollections.empty_authorization]\n authorizations.each_with_index do |auth, index|\n verify_values_match(auth[UseOfCollections::AUTHORIZED_BY.name], element_value(authorized_by_input index))\n verify_values_match(auth[UseOfCollections::AUTHORIZATION_DATE.name], element_value(authorization_date_input index))\n verify_values_match(auth[UseOfCollections::AUTHORIZATION_NOTE.name], element_value(authorization_note_input index))\n verify_values_match(auth[UseOfCollections::AUTHORIZATION_STATUS.name], element_value(authorization_status_input index))\n end\n end", "def verify_authorized_by(test_data)\n verify_values_match(test_data[CoreUseOfCollectionsData::AUTHORIZED_BY.name], element_value(authorized_by_input))\n end", "def verify_authorization_note(test_data)\n verify_values_match(test_data[CoreUseOfCollectionsData::AUTHORIZATION_NOTE.name], element_value(authorization_note_input))\n end", "def valid?\n @authorizations.each do |authorization|\n authorizer = get_authorizer(authorization)\n if authorizer.status != \"valid\"\n return false\n end\n end\n return true\n end", "def verify_authorization_date(test_data)\n verify_values_match(test_data[CoreUseOfCollectionsData::AUTHORIZATION_DATE.name], element_value(authorization_date_input))\n end", "def test_authorize\n object_person = people(:valid_person) # person_id 1\n subject_person_1aa = people(:person1) # person_id 1aa\n subject_person_2aa = people(:person3) # person_id 2aa\n \n assert Rule.authorize?(subject_person_1aa, object_person.id, \"view\", \"email\")\n assert !Rule.authorize?(subject_person_2aa, object_person.id, \"view\",\"email\")\n\n end", "def test_agreement\n\n # Make sure just responds if not yet affiliate\n get :agreement, {}, {:customer_id => 1, :timestamp => Time.now.to_i}\n assert_response :success\n assert_template 'agreement'\n\n # Make sure allows affiliate signup\n assert(!Customer.find(1).affiliate)\n post :agreement, {:commit => 'Accept Agreement'}, {:customer_id => 1, :timestamp => Time.now.to_i}\n assert_redirected_to :controller => 'affiliate', :action => ''\n assert(Customer.find(1).affiliate)\n\n # Make sure signed up affiliates just get redirect\n get :agreement, {}, {:customer_id => 1, :timestamp => Time.now.to_i}\n assert_redirected_to :controller => 'affiliate', :action => ''\n\n end", "def enter_pahma_authorizations(test_data)\n authorizations = test_data[CoreUseOfCollectionsData::AUTHORIZATION_GRP.name] || [CoreUseOfCollectionsData.empty_authorization]\n hide_notifications_bar\n prep_fieldsets_for_test_data([fieldset(CoreUseOfCollectionsData::AUTHORIZATION_GRP.name)], authorizations)\n authorizations.each_with_index do |auth, index|\n logger.info \"Entering authorization data set at index #{index}: #{auth}\"\n enter_auto_complete(authorized_by_input(index), authorized_by_options(index), auth[CoreUseOfCollectionsData::AUTHORIZED_BY.name], 'PAHMA Persons')\n wait_for_element_and_type(authorization_date_input(index), auth[CoreUseOfCollectionsData::AUTHORIZATION_DATE.name])\n hit_enter\n wait_for_element_and_type(authorization_note_input(index), auth[CoreUseOfCollectionsData::AUTHORIZATION_NOTE.name])\n wait_for_options_and_select(authorization_status_input(index), authorization_status_options(index), auth[CoreUseOfCollectionsData::AUTHORIZATION_STATUS.name])\n end\n end", "def authenticate\n unless params[:just_retrieving_resources_to_prove_they_exist_in_client_test]\n render json: { errors: ['You are not authorized'] }, status: :unauthorized\n end\n end", "def test_index_authorized\n authorize @valid_user\n get :index\n assert_response :success\n assert_template \"index\"\n end", "def verify_against_enabled_authorization_handlers\n @verified_handler= authorization_handlers.find {|handler| handler.valid? }\n\n errors.add(:authorizations, :invalid) unless @verified_handler\n end", "def verify_acceptance_of_user_agreement!\n return true\n end", "def audit_data_verify(a_data, v_data)\n found = nil\n a_data.each do |entry|\n if entry.text.match(v_data)\n found = true\n print_to_output(\"VERIFIED: Expected Audit data: '#{v_data}' is displayed.\")\n break\n end\n end\n raise \"Audit data #{v_data} was not found!\" unless found\n end", "def validate!\n if self.status != CHALLENGE_STATE_STARTED\n end\n @logger.debug(\"Requesting authorization for certificate with subjects '#{@certificate.subjects.join(\",\")}'\")\n @authorizations.each do |authorization|\n num_timeouts = 0\n authorizer = get_authorizer(authorization)\n authorizer.request_validation\n while authorizer.status == \"pending\"\n sleep(2)\n begin\n authorizer.reload\n rescue Acme::Client::Error::Timeout\n num_timeouts += 1\n @logger.debug(\"Received ACME timeout no##{num_timeouts} of max. #{@endpoint.timeout_retries}\")\n if num_timeouts >= @endpoint.timeout_retries + 1\n raise Acme::Distributed::ServerError, \"Abort authorization request, max. number of timeouts exceeded\"\n end\n end\n end\n @logger.debug(\"Status of this authorization request: #{authorizer.status}\")\n end\n end", "def all_authorities_test_data\n parse_test_data(@deployment, 'test-data-all-authorities.json')['organizations']\n end", "def all_authorities_test_data(deployment = nil)\n parse_test_data((deployment || @deployment), 'test-data-all-authorities.json')['organizations']\n end", "def test_authorize_according_to_one_rule\n rule_or = rules(:active_or_rule) # friends or members of group tkk can view name of person 1\n rule_and = rules(:active_and_rule) # friends who are members of group tkk can view email of person 1\n\n action_view_name = actions(:view_name)\n action_view_email = actions(:view_email)\n object_person = people(:valid_person) # person id 1\n subject_person_test = people(:test) # not friend, not member of group tkk\n subject_person_4 = people(:friend) # person 4 is a friend, but not a member of group tkk\n subject_person_1aa = people(:person1) # person 1aa is a friend, and also a member of group tkk\n\n assert subject_person_1aa.contacts.include? object_person\n assert subject_person_1aa.is_member_of? groups(:tkk)\n\n assert !rule_or.authorize_according_to_one_rule(subject_person_test, object_person.id, action_view_name.action_type, action_view_name.action_value)\n assert !rule_and.authorize_according_to_one_rule(subject_person_test, object_person.id, action_view_email.action_type, action_view_email.action_value)\n assert rule_or.authorize_according_to_one_rule(subject_person_4, object_person.id, action_view_name.action_type, action_view_name.action_value)\n assert !rule_and.authorize_according_to_one_rule(subject_person_4, object_person.id, action_view_email.action_type, action_view_email.action_value)\n assert rule_or.authorize_according_to_one_rule(subject_person_1aa, object_person.id, action_view_name.action_type, action_view_name.action_value)\n assert rule_and.authorize_according_to_one_rule(subject_person_1aa, object_person.id, action_view_email.action_type, action_view_email.action_value)\n end", "def verify_captive_portal(expected_data, expected_text)\n data_captive = (access_control_data_val == expected_data ? true : false )\n text_captive = (access_control_text == expected_text ? true : false )\n ((data_captive == true ) && (text_captive == true)) ? true : false\n end", "def verify_assignments(submission)\n submission.voter_submission_assignments.each do |vsa|\n grants_voter_count = GrantsVoter.where(grant: submission.grant, voter: vsa.voter).count\n\n expect(vsa.voter).to be_verified\n expect(grants_voter_count).not_to eq(0)\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Verifies use dates match test data
def verify_use_dates(test_data) dates = test_data[CoreUseOfCollectionsData::USE_DATE_GRP.name] || [CoreUseOfCollectionsData.empty_use_date] dates.each_with_index do |date, index| verify_values_match(date[CoreUseOfCollectionsData::USE_DATE.name], element_value(use_date_input index)) verify_values_match(date[CoreUseOfCollectionsData::USE_DATE_TIME_NOTE.name], element_value(use_date_time_note(index))) verify_values_match(date[CoreUseOfCollectionsData::USE_DATE_NUM_VISITORS.name], element_value(use_num_visitors_input index)) verify_values_match(date[CoreUseOfCollectionsData::USE_DATE_HOURS_SPENT.name], element_value(use_hours_spent_input index)) verify_values_match(date[CoreUseOfCollectionsData::USE_DATE_VISITOR_NOTE.name], element_value(use_note_input index)) end end
[ "def verify_use_dates(test_data)\n dates = test_data[UseOfCollections::USE_DATE_GRP.name] || [UseOfCollections.empty_use_date]\n dates.each_with_index do |date, index|\n verify_values_match(date[UseOfCollections::USE_DATE.name], element_value(use_date_input index))\n verify_values_match(date[UseOfCollections::USE_DATE_TIME_NOTE.name], element_value(use_date_time_note(index)))\n verify_values_match(date[UseOfCollections::USE_DATE_NUM_VISITORS.name], element_value(use_num_visitors_input index))\n verify_values_match(date[UseOfCollections::USE_DATE_HOURS_SPENT.name], element_value(use_hours_spent_input index))\n verify_values_match(date[UseOfCollections::USE_DATE_VISITOR_NOTE.name], element_value(use_note_input index))\n end\n end", "def verify_date_requested(test_data)\n verify_values_match(test_data[CoreUseOfCollectionsData::DATE_REQUESTED.name], element_value(date_requested_input))\n end", "def verify_date_requested(test_data)\n verify_values_match(test_data[UseOfCollections::DATE_REQUESTED.name], element_value(date_requested_input))\n end", "def verify_foundation_date(test_data)\n errors = []\n text_values_match?(test_data[CoreOrgData::FOUNDING_DATE.name], element_value(foundation_date_input), errors)\n errors\n end", "def verify_date_completed(test_data)\n verify_values_match(test_data[UseOfCollections::DATE_COMPLETED.name], element_value(date_completed_input))\n end", "def verify_dissolution_date(test_data)\n errors = []\n text_values_match?(test_data[Org::DISSOLUTION_DATE.name], element_value(dissolution_date_input), errors)\n errors\n end", "def verify_date_completed(test_data)\n verify_values_match(test_data[CoreUseOfCollectionsData::DATE_COMPLETED.name], element_value(date_completed_input))\n end", "def verify_accession_date(data_set)\n verify_values_match(data_set[Acquisition::ACCESSION_DATE_GRP.name], element_value(access_date_input_locator))\n end", "def test_invalid_bithdays\n spec = @valid_spec\n invalid_birthdates = [Date.new(Spec::START_YEAR - 1), Date.today + 1.year]\n invalid_birthdates.each do |birthdate|\n spec.birthdate = birthdate\n assert !spec.valid?, \"#{birthdate} should not pass validation\"\n end\n end", "def verify_authorization_date(test_data)\n verify_values_match(test_data[CoreUseOfCollectionsData::AUTHORIZATION_DATE.name], element_value(authorization_date_input))\n end", "def verify_end_date(test_data)\n verify_values_match(test_data[UseOfCollections::END_DATE.name], element_value(end_date_input))\n end", "def date_combination_valid\n\n # just make sure we work on something valid and existing\n\n return false unless date_from?\n\n set_nil_default( :date_until, date_from )\n\n if date_until < date_from\n errors.add( :date_until, I18n.t( 'holidays.msg.bad_period' ))\n return false\n end\n\n if date_from.year != date_until.year\n errors.add( :date_until, I18n.t( 'holidays.msg.bad_years' ))\n return false\n end\n\n # all tests passed\n\n write_attribute( :year_period, date_from.year )\n return true\n end", "def verify_end_date(test_data)\n verify_values_match(test_data[CoreUseOfCollectionsData::END_DATE.name], element_value(end_date_input))\n end", "def test_date_formats_and_ranges\n batch = Batch.new(:id => 20,\n :batchid=>20,\n :date => Date.today.strftime(\"%m/%d/%Y\"),\n :eob => 30,\n :facility=> facilities(:facility1),\n :arrival_time => \"#{Time.now}\",\n :target_time => \"#{Time.now}\",\n :status => \"New\"\n )\n\n # Assert the validity of tommorrow, mm/dd/YYYY format, and sixty days prior\n assert_valid(batch, :date, [(Date.today + 1).to_s, Date.today.strftime(\"%m/%d/%Y\"), (Date.today - 60).to_s])\n # Assert the invalidity of three days from today, more than sixty days ago, mm/dd/yy, and yy/mm/dd formats\n assert_invalid(batch, :date, [(Date.today + 4).to_s, (Date.today - 61).to_s, Date.today.strftime(\"%m/%d/%y\"), Date.today.strftime(\"y/%m/%d\")])\n end", "def test_several_dates_1\n \tassert_equal(1, Zeller.calc(12, 2013))\n end", "def test_dates\n dates = ExchangeRate.dates\n assert_equal(dates, [Date.new(2016, 01, 01), Date.new(2015, 12, 25), Date.new(2015, 12, 12)])\n end", "def verify_start_single_date(test_data)\n verify_values_match(test_data[CoreUseOfCollectionsData::START_SINGLE_DATE.name], element_value(start_single_date_input))\n end", "def test_invalid_card_dates\n create_invalid_card_dates\n deny @last_year.valid?\n deny @last_month.valid?\n end", "def test_valid_review_dates_others_work\n #@request.session[:user] = User.find(users(:student1).id)\n @assignment = assignments(:assignment1)\n #@participant = AssignmentParticipant.find(participants(:par1).id)\n due_dates = DueDate.find(:all, :conditions => [\"assignment_id = ?\", @assignment.id])\n @very_last_due_date = DueDate.find(:all, :order => \"due_at DESC\", :limit =>1, :conditions => [\"assignment_id = ?\", @assignment.id])\n next_due_date = @very_last_due_date[0]\n for due_date in due_dates\n if due_date.due_at > Time.now\n if due_date.due_at < next_due_date.due_at\n next_due_date = due_date\n end\n end\n end\n @review_phase = next_due_date.deadline_type_id;\n\n assert_equal(DeadlineType.find(@review_phase).name , deadline_types(:deadline_type_review).name)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Verifies that the end date matches test data
def verify_end_date(test_data) verify_values_match(test_data[CoreUseOfCollectionsData::END_DATE.name], element_value(end_date_input)) end
[ "def verify_end_date(test_data)\n verify_values_match(test_data[UseOfCollections::END_DATE.name], element_value(end_date_input))\n end", "def verify_date_completed(test_data)\n verify_values_match(test_data[CoreUseOfCollectionsData::DATE_COMPLETED.name], element_value(date_completed_input))\n end", "def verify_date_completed(test_data)\n verify_values_match(test_data[UseOfCollections::DATE_COMPLETED.name], element_value(date_completed_input))\n end", "def end_date_is_valid?\n begin\n date = USDateParse(self.end_date)\n self.end_date = date.strftime(\"%m/%d/%y\")\n rescue\n return false\n end\n return true\n end", "def check_end_date_validation\n unless self.event_ends_on.blank?\n if self.event_ends_on < self.event_starts_on\n #Add errror\n self.errors.add(:event_ends_on, \"date should be greater than event start on date\")\n end\n end\n end", "def end_date_is_valid?\n begin\n date = USDateParse(self.end_date)\n rescue\n return false\n end\n return true\n end", "def scenarios_have_matching_end_years\n if @scenarios.map(&:end_year).any? { |year| year != end_year }\n errors.add(:base, 'One or more scenarios have differing end years')\n end\n end", "def verify_use_dates(test_data)\n dates = test_data[CoreUseOfCollectionsData::USE_DATE_GRP.name] || [CoreUseOfCollectionsData.empty_use_date]\n dates.each_with_index do |date, index|\n verify_values_match(date[CoreUseOfCollectionsData::USE_DATE.name], element_value(use_date_input index))\n verify_values_match(date[CoreUseOfCollectionsData::USE_DATE_TIME_NOTE.name], element_value(use_date_time_note(index)))\n verify_values_match(date[CoreUseOfCollectionsData::USE_DATE_NUM_VISITORS.name], element_value(use_num_visitors_input index))\n verify_values_match(date[CoreUseOfCollectionsData::USE_DATE_HOURS_SPENT.name], element_value(use_hours_spent_input index))\n verify_values_match(date[CoreUseOfCollectionsData::USE_DATE_VISITOR_NOTE.name], element_value(use_note_input index))\n end\n end", "def verify_use_dates(test_data)\n dates = test_data[UseOfCollections::USE_DATE_GRP.name] || [UseOfCollections.empty_use_date]\n dates.each_with_index do |date, index|\n verify_values_match(date[UseOfCollections::USE_DATE.name], element_value(use_date_input index))\n verify_values_match(date[UseOfCollections::USE_DATE_TIME_NOTE.name], element_value(use_date_time_note(index)))\n verify_values_match(date[UseOfCollections::USE_DATE_NUM_VISITORS.name], element_value(use_num_visitors_input index))\n verify_values_match(date[UseOfCollections::USE_DATE_HOURS_SPENT.name], element_value(use_hours_spent_input index))\n verify_values_match(date[UseOfCollections::USE_DATE_VISITOR_NOTE.name], element_value(use_note_input index))\n end\n end", "def check_end_date\n if end_date < Date.today\n \n errors.add(:end_date, \"End Date can only be later than today\")\n end\n end", "def verify_foundation_date(test_data)\n errors = []\n text_values_match?(test_data[CoreOrgData::FOUNDING_DATE.name], element_value(foundation_date_input), errors)\n errors\n end", "def date_validation(start_date, end_date)\n first_date = start_date.split(\"-\").map {|item| item.to_i}\n last_date = end_date.split(\"-\").map {|item| item.to_i}\n if !(Date.valid_date?(first_date[0], first_date[1], first_date[2])) || !(Date.valid_date?(last_date[0], last_date[1], last_date[2]))\n raise ArgumentError.new(\"Date(s) you provided is/are invalid\")\n end\n end", "def date_check_validation\n if !self.start_date.nil? && !self.end_date.nil?\n if self.end_date < self.start_date\n self.errors.add(:end_date, \"End date should be greater than or equal to start date\")\n end\n end\n end", "def verify_dissolution_date(test_data)\n errors = []\n text_values_match?(test_data[Org::DISSOLUTION_DATE.name], element_value(dissolution_date_input), errors)\n errors\n end", "def test_invalid_bithdays\n spec = @valid_spec\n invalid_birthdates = [Date.new(Spec::START_YEAR - 1), Date.today + 1.year]\n invalid_birthdates.each do |birthdate|\n spec.birthdate = birthdate\n assert !spec.valid?, \"#{birthdate} should not pass validation\"\n end\n end", "def date_checker_not_past_end\n\t\t\treturn if end_date.nil?\n\n\t\t\tif (self.end_date.to_date >= Time.now.to_date)\n\t\t\t\treturn true\n\t\t\telse \n\t\t\t\terrors.add(:end_date, \"start_date or end_date is in the past\")\n\t\t\t\treturn false \n\t\t\tend\n\t\tend", "def verify_date_requested(test_data)\n verify_values_match(test_data[CoreUseOfCollectionsData::DATE_REQUESTED.name], element_value(date_requested_input))\n end", "def end_date_set?\n end_date != nil\n end", "def validate_dates\n if start_at && end_at\n errors.add(:end_at, 'should be greater than start date') if end_at <= start_at\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Verifies staff match test data
def verify_staff(test_data) staff = test_data[CoreUseOfCollectionsData::STAFF_GRP.name] || [CoreUseOfCollectionsData.empty_staff] staff.each_with_index do |staf, index| verify_values_match(staf[CoreUseOfCollectionsData::STAFF_NAME.name], element_value(staff_name_input index)) verify_values_match(staf[CoreUseOfCollectionsData::STAFF_ROLE.name], element_value(staff_role_input index)) verify_values_match(staf[CoreUseOfCollectionsData::STAFF_HOURS_SPENT.name], element_value(staff_hours_spent_input index)) verify_values_match(staf[CoreUseOfCollectionsData::STAFF_NOTE.name], element_value(staff_note_input index)) end end
[ "def verify_staff(test_data)\n staff = test_data[UseOfCollections::STAFF_GRP.name] || [UseOfCollections.empty_staff]\n staff.each_with_index do |staf, index|\n verify_values_match(staf[UseOfCollections::STAFF_NAME.name], element_value(staff_name_input index))\n verify_values_match(staf[UseOfCollections::STAFF_ROLE.name], element_value(staff_role_input index))\n verify_values_match(staf[UseOfCollections::STAFF_HOURS_SPENT.name], element_value(staff_hours_spent_input index))\n verify_values_match(staf[UseOfCollections::STAFF_NOTE.name], element_value(staff_note_input index))\n end\n end", "def verify_fees(test_data)\n fees = test_data[UseOfCollections::FEE_GRP.name] || [UseOfCollections.empty_fee]\n fees.each_with_index do |fee, index|\n verify_values_match(fee[UseOfCollections::FEE_CURRENCY.name], element_value(fee_currency_input index))\n verify_values_match(fee[UseOfCollections::FEE_VALUE.name], element_value(fee_value_input index))\n verify_values_match(fee[UseOfCollections::FEE_NOTE.name], element_value(fee_note_input index))\n end\n end", "def verify_result(test_data)\n verify_values_match(test_data[CoreUseOfCollectionsData::RESULT.name], element_value(result_text_area))\n end", "def verify_fees(test_data)\n fees = test_data[CoreUseOfCollectionsData::FEE_GRP.name] || [CoreUseOfCollectionsData.empty_fee]\n fees.each_with_index do |fee, index|\n verify_values_match(fee[CoreUseOfCollectionsData::FEE_CURRENCY.name], element_value(fee_currency_input index))\n verify_values_match(fee[CoreUseOfCollectionsData::FEE_VALUE.name], element_value(fee_value_input index))\n verify_values_match(fee[CoreUseOfCollectionsData::FEE_NOTE.name], element_value(fee_note_input index))\n end\n end", "def verify_result(test_data)\n verify_values_match(test_data[UseOfCollections::RESULT.name], element_value(result_text_area))\n end", "def verify_users(test_data)\n users = test_data[UseOfCollections::USER_GRP.name] || [UseOfCollections.empty_user]\n users.each_with_index do |user, index|\n verify_values_match(user[UseOfCollections::USER.name], element_value(user_name_input index))\n verify_values_match(user[UseOfCollections::USER_INSTITUTION_ROLE.name], element_value(user_type_input index))\n verify_values_match(user[UseOfCollections::USER_UOC_ROLE.name], element_value(user_role_input index))\n verify_values_match(user[UseOfCollections::USER_INSTITUTION.name], element_value(user_institution_input index))\n end\n end", "def verify_users(test_data)\n test_users = test_data[CoreUseOfCollectionsData::USER_GRP.name]\n test_users = [{CoreUseOfCollectionsData::USER.name => '', CoreUseOfCollectionsData::USER_TYPE.name => ''}] unless test_users\n test_users.each_with_index do |user, index|\n verify_values_match(user[CoreUseOfCollectionsData::USER.name], element_value(user_name_input index))\n verify_values_match(user[CoreUseOfCollectionsData::USER_TYPE.name], element_value(user_type_input index))\n end\n end", "def test_validity_of_examples\n @dfa_examples.each do |e|\n assert_equal(true, e.deterministic?)\n end\n @nfa_examples.each do |e|\n assert_equal(false, e.deterministic?)\n end\n end", "def check_against(unit_test)\n grade_sheet = unit_test.execute(src_code) \n Feedback.on(grade_sheet)\n end", "def verify_result(expect, model)\n puts \"running query: #{build_query(model)}\"\n result = search(build_query(model))\n assert_equal(expect.size, result.hit.size)\n expect.each_with_index do |expected_sub_scores,i|\n jsf = result.hit[i].field['summaryfeatures']\n sub_scores = extract_subscores(jsf, model.size)\n assert_equal(expected_sub_scores, sub_scores,\n \"subscores differ for hit #{i}: #{expected_sub_scores} != #{sub_scores}\")\n end\n end", "def verify_users(test_data)\n users = test_data[CoreUseOfCollectionsData::USER_GRP.name] || [CoreUseOfCollectionsData.empty_user]\n users.each_with_index do |user, index|\n verify_values_match(user[CoreUseOfCollectionsData::USER.name], element_value(user_name_input index))\n verify_values_match(user[CoreUseOfCollectionsData::USER_INSTITUTION_ROLE.name], element_value(user_type_input index))\n verify_values_match(user[CoreUseOfCollectionsData::USER_UOC_ROLE.name], element_value(user_role_input index))\n verify_values_match(user[CoreUseOfCollectionsData::USER_INSTITUTION.name], element_value(user_institution_input index))\n end\n end", "def chk_test (correct_contact,info_request, expected_result = :match)\n # Get the number of records in contacts before we run our test\n num_existing_recs = Contact.all.size\n\n resulting_contact = Contact.match_contact_from_info_request(info_request)\n\n if expected_result == :new\n assert_not_nil resulting_contact, \"[#{expected_result}]: Expecting an instance of Contact \"\n assert_instance_of( Contact, resulting_contact, \"[#{expected_result}]: Results not an Contact class\" )\n assert_equal num_existing_recs + 1, Contact.all.size, \"[#{expected_result}]: Should create new row in contacts\"\n elsif expected_result == :match\n assert_not_nil resulting_contact, \"[#{expected_result}]: Expecting an instance of Contact\"\n assert_instance_of( Contact, resulting_contact, \"[#{expected_result}]: Results not an Contact class\" )\n assert_equal num_existing_recs, Contact.all.size, \"[#{expected_result}]: Should be no new rows in contacts\"\n else\n assert_nil resulting_contact, \"[#{expected_result}]: Should not return an instance of Contact\"\n assert_equal num_existing_recs, Contact.all.size, \"[#{expected_result}]: Should be no new rows in contacts\"\n end\n\n\n\n ## -- Test the Results -----------------------------------------------------\n if expected_result == :new || expected_result == :match\n assert_not_nil resulting_contact.id, \"match id should not be null\"\n assert_equal correct_contact.email_address, resulting_contact.email_address, \"unexpected email_address\"\n assert_equal correct_contact.phone, resulting_contact.phone, \"unexpected phone\"\n assert_equal correct_contact.company, resulting_contact.company, \"unexpected company\"\n assert_equal correct_contact.contact_method, resulting_contact.contact_method, \"unexpected contact_method\"\n assert_equal correct_contact.name_first, resulting_contact.name_first, \"unexpected name_first\"\n assert_equal correct_contact.name_last, resulting_contact.name_last, \"unexpected name_last\"\n assert_equal correct_contact.title, resulting_contact.title, \"unexpected title\"\n end\n\n end", "def test_valid_with_attributes\r\n referal_provider = ReferalProvider.new\r\n referal_provider.first_name = @referal_provider.first_name\r\n referal_provider.last_name = @referal_provider.last_name\r\n referal_provider.npi = @referal_provider.npi\r\n referal_provider.taxonomy = @referal_provider.taxonomy\r\n assert referal_provider.valid?, referal_provider.errors.full_messages\r\n end", "def run_verification\n verify_data_format # runs through what we have and makes sure teh values are in the correct format\n verify_dependencies # makes sure that any 'optional' columns have any dependency columns present\n verify_order # finally, all columns must be in a specific order \n end", "def verify_object_info_data(data_set)\n logger.debug \"Checking object number #{data_set[CoreObjectData::OBJECT_NUM.name]}\"\n object_data_errors = []\n text_values_match?(data_set[CoreObjectData::OBJECT_NUM.name], element_value(object_num_input), object_data_errors)\n\n other_nums = data_set[CoreObjectData::OTHER_NUM.name]\n other_nums && other_nums.each do |num|\n index = other_nums.index num\n text_values_match?(num[CoreObjectData::NUM_VALUE.name], element_value(other_num_num_input index), object_data_errors)\n text_values_match?(num[CoreObjectData::NUM_TYPE.name], element_value(other_num_type_input index), object_data_errors)\n end\n\n num_objects = data_set[CoreObjectData::NUM_OBJECTS.name]\n num_objects && text_values_match?(num_objects.to_s, element_value(num_objects_input), object_data_errors)\n\n collection = data_set[CoreObjectData::COLLECTION.name]\n collection && text_values_match?(collection, element_value(collection_input), object_data_errors)\n\n resp_depts = data_set[CoreObjectData::RESPONSIBLE_DEPTS.name]\n resp_depts && resp_depts.each { |dept| text_values_match?(dept[CoreObjectData::RESPONSIBLE_DEPT.name], element_value(resp_dept_input resp_depts.index(dept)), object_data_errors) }\n\n pub_to_list = data_set[CoreObjectData::PUBLISH_TO_LIST.name]\n pub_to_list && pub_to_list.each { |pub| text_values_match?(pub[CoreObjectData::PUBLISH_TO.name], element_value(publish_to_input pub_to_list.index(pub)), object_data_errors) }\n\n status = data_set[CoreObjectData::RECORD_STATUS.name]\n status && text_values_match?(status, element_value(record_status_input), object_data_errors)\n\n inv_statuses = data_set[CoreObjectData::INVENTORY_STATUS_LIST.name]\n inv_statuses && inv_statuses.each { |stat| text_values_match?(stat[CoreObjectData::INVENTORY_STATUS.name], element_value(inventory_status_input inv_statuses.index(stat)), object_data_errors) }\n\n brief_descrips = data_set[CoreObjectData::BRIEF_DESCRIPS.name]\n brief_descrips && brief_descrips.each { |descrip| text_values_match?(descrip[CoreObjectData::BRIEF_DESCRIP.name], element_value(brief_desc_text_area brief_descrips.index(descrip)), object_data_errors) }\n\n dist_feat = data_set[CoreObjectData::DISTINGUISHING_FEATURES.name]\n dist_feat && text_values_match?(dist_feat, element_value(dist_features_text_area), object_data_errors)\n\n comments = data_set[CoreObjectData::COMMENTS.name]\n comments && comments.each { |comment| text_values_match?(comment[CoreObjectData::COMMENT.name], element_value(comment_text_area comments.index(comment)), object_data_errors) }\n\n titles = data_set[CoreObjectData::TITLE_GRP.name]\n titles && titles.each do |title|\n index = titles.index title\n text_values_match?(title[CoreObjectData::TITLE.name], element_value(title_input index), object_data_errors)\n text_values_match?(title[CoreObjectData::TITLE_TYPE.name], element_value(title_type_input index), object_data_errors)\n text_values_match?(title[CoreObjectData::TITLE_LANG.name], element_value(title_lang_input index), object_data_errors)\n\n translations = title[CoreObjectData::TITLE_TRANSLATION_SUB_GRP.name]\n translations && translations.each do |trans|\n sub_index = translations.index trans\n text_values_match?(trans[CoreObjectData::TITLE_TRANSLATION.name], element_value(title_translation_input [index, sub_index]), object_data_errors)\n text_values_match?(trans[CoreObjectData::TITLE_TRANSLATION_LANG.name], element_value(title_translation_lang_input [index, sub_index]), object_data_errors)\n end\n end\n\n obj_names = data_set[CoreObjectData::OBJ_NAME_GRP.name]\n obj_names && obj_names.each do |name|\n index = obj_names.index name\n text_values_match?(name[CoreObjectData::OBJ_NAME_NAME.name], element_value(object_name_input index), object_data_errors)\n text_values_match?(name[CoreObjectData::OBJ_NAME_CURRENCY.name], element_value(object_name_currency_input index), object_data_errors)\n text_values_match?(name[CoreObjectData::OBJ_NAME_LEVEL.name], element_value(object_name_level_input index), object_data_errors)\n text_values_match?(name[CoreObjectData::OBJ_NAME_SYSTEM.name], element_value(object_name_system_input index), object_data_errors)\n text_values_match?(name[CoreObjectData::OBJ_NAME_TYPE.name], element_value(object_name_type_input index), object_data_errors)\n text_values_match?(name[CoreObjectData::OBJ_NAME_LANG.name], element_value(object_name_lang_input index), object_data_errors)\n text_values_match?(name[CoreObjectData::OBJ_NAME_NOTE.name], element_value(object_name_note_input index), object_data_errors)\n end\n\n object_data_errors\n end", "def test_appellation_validity\n assert @valid_appellation_1.valid?\n assert @valid_appellation_2.valid?\n end", "def run_verify(target_dir)\n cmd_ver_obj = commands[:verify_command]\n cmd_ver_obj.run({ :target_dir => target_dir })\n if cmd_ver_obj.findings.size == 0\n puts 'OK, system is conforming to model'.color(:green)\n else\n puts 'ERROR, detected inconsistencies/errors.'.color(:red)\n # cmd_ver_obj.findings.each do |val_error|\n # puts val_error.to_s\n # end\n end\n end", "def validate_same_number_of_sts_in_p_and_f(asp_w_f_sts)\n p_st_count = asp_w_f_sts.inject(0) { |m,e| m += e[0].count('@') }\n f_st_count = asp_w_f_sts.inject(0) { |m,e| m += e[1].count('@') }\n if p_st_count != f_st_count\n if debug\n puts asp_w_f_sts.ai(indent: -2)\n puts \"\\n\\n\\n\\n\"\n puts \"mismatches:\".color(:red)\n asp_w_f_sts.each { |(primary,foreign,conf)|\n pr_st_count = primary.count('@')\n fo_st_count = foreign.count('@')\n if pr_st_count != fo_st_count\n puts '-' * 10\n p primary\n puts \"pr_st_count: #{ pr_st_count }\"\n p foreign\n puts \"fo_st_count: #{ fo_st_count }\"\n puts \"conf: #{ conf.inspect }\"\n end\n }\n end\n raise \"Mismatch in subtitle counts: primary has #{ p_st_count } and foreign has #{ f_st_count }\"\n end\n true\n end", "def verify_contact_names(test_data)\n test_names = test_data[Org::CONTACT_NAMES.name]\n errors = []\n test_names = [{ Org::CONTACT_NAME.name => ''}] unless test_names\n test_names.each_with_index do |test_name, index|\n text_values_match?(test_name[Org::CONTACT_NAME.name], element_value(contact_name_input(index)), errors)\n end\n errors\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
LOCATIONS Enters locations per a given set of test data
def enter_locations(test_data) locations = test_data[CoreUseOfCollectionsData::LOCATION_LIST.name] || [{CoreUseOfCollectionsData::LOCATION.name => ''}] hide_notifications_bar prep_fieldsets_for_test_data([fieldset(CoreUseOfCollectionsData::LOCATION_LIST.name)], locations) locations.each_with_index do |location, index| logger.info "Entering location data set at index #{index}: #{location}" enter_auto_complete(location_input(index), location_options(index), location[CoreUseOfCollectionsData::LOCATION.name], 'Local Places') end end
[ "def enter_locations(test_data)\n locations = test_data[UseOfCollections::LOCATION_LIST.name] || [{ UseOfCollections::LOCATION.name => ''}]\n prep_fieldsets_for_test_data([fieldset(UseOfCollections::LOCATION_LIST.name)], locations)\n locations.each_with_index do |location, index|\n enter_auto_complete(location_input(index), location_options(index), location[UseOfCollections::LOCATION.name], 'Local Places')\n end\n end", "def verify_locations(test_data)\n locations = test_data[UseOfCollections::LOCATION_LIST.name] || [{ UseOfCollections::LOCATION.name => ''}]\n locations.each_with_index { |location, index| verify_values_match(location[UseOfCollections::LOCATION.name], element_value(location_input index)) }\n end", "def verify_locations(test_data)\n locations = test_data[CoreUseOfCollectionsData::LOCATION_LIST.name] || [{CoreUseOfCollectionsData::LOCATION.name => ''}]\n locations.each_with_index { |location, index| verify_values_match(location[CoreUseOfCollectionsData::LOCATION.name], element_value(location_input index)) }\n end", "def enter_pahma_locations(test_data)\n locations = test_data[CoreUseOfCollectionsData::LOCATION_LIST.name] || [{CoreUseOfCollectionsData::LOCATION.name => ''}]\n hide_notifications_bar\n prep_fieldsets_for_test_data([fieldset(CoreUseOfCollectionsData::LOCATION_LIST.name)], locations)\n locations.each_with_index do |location, index|\n logger.info \"Entering location data set at index #{index}: #{location}\"\n enter_auto_complete(location_input(index), location_options(index), location[CoreUseOfCollectionsData::LOCATION.name], 'PAHMA Places')\n end\n end", "def update_sample_locs\n self.samples.each do |k|\n k['storage_location_id'] = self.storage_location_id\n end\n end", "def extend_location_data\n (@data['locations'] or []).each do |loc|\n (loc['team'] || []).each do |member|\n full_member_record = @data['team'].find {|m| m['name'] == member['name']}\n member.update full_member_record\n end\n end\n end", "def locations=(value)\n @locations = value\n end", "def process_location_data\n regions = get_location_data\n \n regions.each do |region|\n area = save_area_data region\n \n prefectures = region[\"Prefecture\"].is_a?(Array) ? region[\"Prefecture\"] : [region[\"Prefecture\"]] \n prefectures.each do |prefecture|\n pref = save_prefecture_data area.id, prefecture\n \n large_areas = prefecture[\"LargeArea\"].is_a?(Array) ? prefecture[\"LargeArea\"] : [prefecture[\"LargeArea\"]] \n large_areas.each do |large_area|\n l_area = save_large_area_data pref.id, large_area\n \n places = large_area[\"SmallArea\"].is_a?(Array) ? large_area[\"SmallArea\"] : [large_area[\"SmallArea\"]]\n places.each do |place|\n save_place_data area.id, l_area.id, place\n end\n end\n end \n end\nend", "def test_all_locations\r\n\t\toakland = City::new\r\n\t\tfour_locations = [\"Museum\", \"Cathedral\", \"Hospital\", \"Hillman\"]\r\n\t\tassert_includes four_locations, oakland.all_locations[0], oakland.all_locations[1]\r\n\t\tassert_includes four_locations, oakland.all_locations[2], oakland.all_locations[3]\r\n\tend", "def init_paths(locations)\n locations['Enumerable Canyon'].add_paths_to(locations['Duck Type Beach'],\n locations['Monkey Patch City'])\n locations['Duck Type Beach'].add_paths_to(locations['Enumerable Canyon'],\n locations['Matzburg'])\n locations['Monkey Patch City'].add_paths_to(locations['Enumerable Canyon'],\n locations['Matzburg'],\n locations['Nil Town'])\n locations['Nil Town'].add_paths_to(locations['Monkey Patch City'],\n locations['Hash Crossing'])\n locations['Matzburg'].add_paths_to(locations['Duck Type Beach'],\n locations['Monkey Patch City'],\n locations['Hash Crossing'],\n locations['Dynamic Palisades'])\n locations['Hash Crossing'].add_paths_to(locations['Nil Town'],\n locations['Matzburg'],\n locations['Dynamic Palisades'])\n locations['Dynamic Palisades'].add_paths_to(locations['Matzburg'],\n locations['Hash Crossing'])\n rescue StandardError\n raise 'Locations not correctly initialized.'\n end", "def location\n location = gcmd.locations? ? gcmd.locations : []\n\n polar = {\"Location_Category\" => \"GEOGRAPHIC REGION\", \"Location_Type\" => \"POLAR\"}\n arctic = {\"Location_Category\" => \"GEOGRAPHIC REGION\", \"Location_Type\" => \"ARCTIC\"}\n\n (placenames||[]).each do | p |\n\n area = p.area\n if p.area? and p.area == \"\"\n area = p.placename\n end\n\n case area\n\n when /^(Svalbard|Jan Mayen)$/ then\n location << {\n \"Location_Category\" => \"OCEAN\",\n \"Location_Type\" => \"ATLANTIC OCEAN\",\n \"Location_Subregion1\" => \"NORTH ATLANTIC OCEAN\",\n \"Location_Subregion2\" => \"SVALBARD AND JAN MAYEN\",\n \"Detailed_Location\" => p.placename\n }\n location << polar\n location << arctic\n\n when /^(Dronning Maud Land|Antarctica|Antarktis)$/ then\n location << {\n \"Location_Category\" => \"CONTINENT\",\n \"Location_Type\" => \"ANTARCTICA\",\n \"Detailed_Location\" => p.placename\n }\n location << polar\n\n when /^(Bouvetøya|Bouvet Island)$/\n location << {\n \"Location_Category\" => \"OCEAN\",\n \"Location_Type\" => \"ATLANTIC OCEAN\",\n \"Location_Subregion1\" => \"SOUTH ATLANTIC OCEAN\",\n \"Location_Subregion2\" => \"BOUVET ISLAND\",\n \"Detailed_Location\" => p.placename\n }\n location << polar\n\n when \"Peter I Øy\" then\n locations << {\n \"Location_Category\" => \"OCEAN\",\n \"Location_Type\" => \"PACIFIC OCEAN\",\n \"Location_Subregion1\" => \"SOUTH PACIFIC OCEAN\",\n \"Detailed_Location\" => p.placename\n }\n locations << polar\n locations << {\n \"Location_Category\" => \"CONTINENT\",\n \"Location_Type\" => \"ANTARCTICA\",\n \"Detailed_Location\" => p.placename\n }\n end\n\n end\n location.uniq\n end", "def verify_location(test_data)\n verify_values_match(test_data[CoreUseOfCollectionsData::LOCATION.name], element_value(location_input))\n end", "def build_locations\n\n height.times do |h|\n width.times do |w|\n locations.build( x_coordinate: h,\n y_coordinate: w,\n state: 'covered',\n has_mine: false,\n mines: 0 )\n end\n end\n\n analyzer = Field::Analyzer.new self\n\n locations.sample(mines).each do |loc|\n loc.has_mine = true\n analyzer.locations_around(loc).each { |area| area.mines += 1 }\n end\n\n self.state = 'ready'\n\n end", "def test_update_location_name_errors\n params = update_params_from_loc(locations(:albion))\n params[:location][:display_name] = \"Somewhere Dubious\"\n update_location_error(params)\n end", "def test_sitemap_geos_and_queries\n geos = ['ca', 'co', 'il', 'ny']\n\n geos.each do |loc|\n # Step 1\n params = {\n 'g' => loc,\n 'h_geos' => 0,\n 'h_queries' => 0\n }\n\n get '/site_map/geos_and_queries', params\n assert_response(@response, :success)\n assert_equal(0, @parsed_response['Geos']['NumFound'], @parsed_response)\n assert_equal(0, @parsed_response['Queries']['NumFound'], @parsed_response)\n\n # Step 2\n params = {\n 'g' => loc,\n 'h_geos' => 10,\n 'h_queries' => 30\n }\n\n get '/site_map/geos_and_queries', params\n assert_response(@response, :success)\n assert_equal(10, @parsed_response['Geos']['NumFound'], @parsed_response)\n assert_equal(30, @parsed_response['Queries']['NumFound'], @parsed_response)\n\n # Step 3\n params = {\n 'g' => loc,\n 'h_geos' => 50,\n 'h_queries' => 100\n }\n\n get '/site_map/geos_and_queries', params\n assert_response(@response, :success)\n assert_equal(50, @parsed_response['Geos']['NumFound'], @parsed_response)\n assert_equal(100, @parsed_response['Queries']['NumFound'], @parsed_response)\n\n # Step 4\n params = {\n 'g' => loc\n }\n\n get '/site_map/geos_and_queries', params\n assert_response(@response, :success)\n assert_equal(50, @parsed_response['Geos']['NumFound'], @parsed_response)\n assert_equal(50, @parsed_response['Queries']['NumFound'], @parsed_response)\n\n # Step 5\n params = {\n 'g' => loc,\n 'h_geos' => 100,\n 'h_queries' => 1000\n }\n\n get '/site_map/geos_and_queries', params\n assert_response(@response, :success)\n assert_equal(50, @parsed_response['Geos']['NumFound'], @parsed_response)\n assert_equal(100, @parsed_response['Queries']['NumFound'], @parsed_response)\n end\n end", "def locations\n collect\n end", "def test_initial_location_valid_location\r\n\t\tassert_includes [@my_sim.cathedral, @my_sim.hospital, @my_sim.hillman, @my_sim.museum], @my_sim.set_driver_location_initial\r\n\tend", "def test_fun_other_places\n\t\tdriver = @city.drivers[0]\n\t\tdriver.set_location @city.locations[1]\n\t\tassert_output(\"Driver 1 heading from Cathedral to Monroeville via Fourth Ave.\\n\") {@city.iterate driver, 1}\n\t\tdriver.set_location @city.locations[2]\n\t\tassert_output(\"Driver 1 heading from Hillman to Downtown via Fifth Ave.\\n\") {@city.iterate driver, 1}\n\tend", "def save_sample_locs\n self.samples.each do |k|\n @sample = Sample.find(k[:id])\n @sample.storage_location_id = self.storage_location['id'] \n @sample.save\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Verifies locations match a given set of test data
def verify_locations(test_data) locations = test_data[CoreUseOfCollectionsData::LOCATION_LIST.name] || [{CoreUseOfCollectionsData::LOCATION.name => ''}] locations.each_with_index { |location, index| verify_values_match(location[CoreUseOfCollectionsData::LOCATION.name], element_value(location_input index)) } end
[ "def verify_locations(test_data)\n locations = test_data[UseOfCollections::LOCATION_LIST.name] || [{ UseOfCollections::LOCATION.name => ''}]\n locations.each_with_index { |location, index| verify_values_match(location[UseOfCollections::LOCATION.name], element_value(location_input index)) }\n end", "def verify_location(test_data)\n verify_values_match(test_data[CoreUseOfCollectionsData::LOCATION.name], element_value(location_input))\n end", "def test_all_locations\r\n\t\toakland = City::new\r\n\t\tfour_locations = [\"Museum\", \"Cathedral\", \"Hospital\", \"Hillman\"]\r\n\t\tassert_includes four_locations, oakland.all_locations[0], oakland.all_locations[1]\r\n\t\tassert_includes four_locations, oakland.all_locations[2], oakland.all_locations[3]\r\n\tend", "def test_initial_location_valid_location\r\n\t\tassert_includes [@my_sim.cathedral, @my_sim.hospital, @my_sim.hillman, @my_sim.museum], @my_sim.set_driver_location_initial\r\n\tend", "def test_CheckIfAllFourLocationsAreDefined\n driver = Driver.new(1)\n assert_includes ['Hillman', 'Museum', 'Hospital', 'Cathedral'], driver.gets_location\n end", "def compare_locations?(locatin_from, location_to)\n\t\t# location_to.title == locatin_from.title &&\n\t\tlocation_to.city == locatin_from.city &&\t\t\n\t\tlocation_to.custom_address == locatin_from.custom_address &&\n\t\t# location_to.custom_address_use == locatin_from.custom_address_use &&\n\t\t# location_to.gmap_use == locatin_from.gmap_use &&\n\t\t# location_to.gmaps == locatin_from.gmaps &&\n\t\tlocation_to.latitude == locatin_from.latitude &&\n\t\tlocation_to.longitude == locatin_from.longitude\n\t\t# location_to.title == locatin_from.title\n end", "def test_identify_latlon_basic\n [\n \"50.06773 14.37742\",\n \"50.06773, 14.37742\",\n \"+50.06773 +14.37742\",\n \"+50.06773, +14.37742\"\n ].each do |code|\n latlon_check code, 50.06773, 14.37742\n end\n end", "def test_identify_latlon_se_d\n [\n \"S50.06773 E14.37742\",\n \"S50.06773, E14.37742\",\n \"50.06773S 14.37742E\",\n \"50.06773S, 14.37742E\"\n ].each do |code|\n latlon_check code, -50.06773, 14.37742\n end\n end", "def test_CheckIfAllFourLocationsAreDefined\n driver = Driver::new(1);\n assert_includes [\"Hillman\", \"Museum\", \"Hospital\", \"Cathedral\"], driver.getLocation;\n end", "def test_driver_checkLocation_mid\n\t\td = Driver::new(\"Driver 1\",1)\n\t\td.checkLocation\n\t\t# assert_equal d.classes, 2\n\t\td.setLocation 1 #from cathedral go to museum\n\t\td.checkLocation\n\t\tassert_equal [2,1], [d.classes,d.dinotoys]\n\t\t#assert_equal d.dinotoys, 1\n\tend", "def valid_locations\n if start_location.present?\n result1 = Geocoder.search(start_location.full_address)\n if result1.length == 0 || result1.first.data[\"partial_match\"] == true\n errors.add(:start_location, \"is invalid.\")\n end\n end\n\n if end_location.present?\n result2 = Geocoder.search(end_location.full_address)\n if result2.length == 0 || result2.first.data[\"partial_match\"] == true\n errors.add(:end_location, \"is invalid.\")\n end\n end\n end", "def test_identify_latlon_sw_d\n [\n \"S50.06773 W14.37742\",\n \"S50.06773, W14.37742\",\n \"50.06773S 14.37742W\",\n \"50.06773S, 14.37742W\"\n ].each do |code|\n latlon_check code, -50.06773, -14.37742\n end\n end", "def verify_foundation_place(test_data)\n errors = []\n text_values_match?(test_data[Org::FOUNDING_PLACE.name], element_value(foundation_place_input), errors)\n errors\n end", "def verify_foundation_place(test_data)\n errors = []\n text_values_match?(test_data[CoreOrgData::FOUNDING_PLACE.name], element_value(foundation_place_input), errors)\n errors\n end", "def verify_branches branch_table\n branch_table.map_headers!(&:downcase)\n branch_table.hashes.each do |branch_data|\n repository = branch_data['repository']\n expected_branches = Kappamaki.from_sentence branch_data['branches']\n expected_branches.map! { |branch_name| branch_name_for_location repository, branch_name }\n actual_branches = branches_for_repository repository\n expect(actual_branches).to match_array(expected_branches)\n end\nend", "def enter_locations(test_data)\n locations = test_data[UseOfCollections::LOCATION_LIST.name] || [{ UseOfCollections::LOCATION.name => ''}]\n prep_fieldsets_for_test_data([fieldset(UseOfCollections::LOCATION_LIST.name)], locations)\n locations.each_with_index do |location, index|\n enter_auto_complete(location_input(index), location_options(index), location[UseOfCollections::LOCATION.name], 'Local Places')\n end\n end", "def verify_city_location\n Country.all(:conditions => \"language='en'\").each do |country|\n location = MultiLanguage::Location.geocode(country.name)\n bounds = location.suggested_bounds\n\n City.with_location.all(:conditions => ['language=:lang and country_id=:country', {:lang => 'en', :country => country.id}]).each do |city|\n if City.find_within_bounds(bounds, :conditions => {:id => city.id}).count > 0 then\n #p \"#{city.name} - OK\"\n else\n p \"#{city.id} - #{city.name} - FAILED\"\n end\n end\n end\n end", "def test_identify_latlon_ne_d\n [\n \"N50.06773 E14.37742\",\n \"N50.06773, E14.37742\",\n \"50.06773N 14.37742E\",\n \"50.06773N, 14.37742E\"\n ].each do |code|\n latlon_check code, 50.06773, 14.37742\n end\n end", "def enter_locations(test_data)\n locations = test_data[CoreUseOfCollectionsData::LOCATION_LIST.name] || [{CoreUseOfCollectionsData::LOCATION.name => ''}]\n hide_notifications_bar\n prep_fieldsets_for_test_data([fieldset(CoreUseOfCollectionsData::LOCATION_LIST.name)], locations)\n locations.each_with_index do |location, index|\n logger.info \"Entering location data set at index #{index}: #{location}\"\n enter_auto_complete(location_input(index), location_options(index), location[CoreUseOfCollectionsData::LOCATION.name], 'Local Places')\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Verifies fees match test data
def verify_fees(test_data) fees = test_data[CoreUseOfCollectionsData::FEE_GRP.name] || [CoreUseOfCollectionsData.empty_fee] fees.each_with_index do |fee, index| verify_values_match(fee[CoreUseOfCollectionsData::FEE_CURRENCY.name], element_value(fee_currency_input index)) verify_values_match(fee[CoreUseOfCollectionsData::FEE_VALUE.name], element_value(fee_value_input index)) verify_values_match(fee[CoreUseOfCollectionsData::FEE_NOTE.name], element_value(fee_note_input index)) end end
[ "def verify_fees(test_data)\n fees = test_data[UseOfCollections::FEE_GRP.name] || [UseOfCollections.empty_fee]\n fees.each_with_index do |fee, index|\n verify_values_match(fee[UseOfCollections::FEE_CURRENCY.name], element_value(fee_currency_input index))\n verify_values_match(fee[UseOfCollections::FEE_VALUE.name], element_value(fee_value_input index))\n verify_values_match(fee[UseOfCollections::FEE_NOTE.name], element_value(fee_note_input index))\n end\n end", "def fees\n total_input - total_output\n end", "def fees\n raise Sibit::NotSupportedError, 'Cryptoapis doesn\\'t provide recommended fees'\n end", "def test_validity_of_examples\n @dfa_examples.each do |e|\n assert_equal(true, e.deterministic?)\n end\n @nfa_examples.each do |e|\n assert_equal(false, e.deterministic?)\n end\n end", "def test_pay_application_sum_should_not_exceed_invoice_amount\n \n end", "def test_address_deposit_new_and_existing\n address = \"123456\"\n billcoins_sent = 11\n assert_equal [true, 11], @VERIFY.address_deposit(address, billcoins_sent)\n \n assert_equal [false, 22], @VERIFY.address_deposit(address, billcoins_sent)\n end", "def fees\n raise Sibit::NotSupportedError, 'Btc.com doesn\\'t provide recommended fees'\n end", "def verify_actual_vs_expected_production\n job.job_markings.each do |marking|\n puts marking.actual_production\n puts marking.amount\n if marking.actual_production > marking.amount\n SiteMailer.job_marking_production_over_expected(marking).deliver\n end\n end\n end", "def calculate_fees(data)\n\t# compute the enrolment fee\n\tenrollment_fee = data[0] * (data[1] == 1 ? $ENROLLMENT_FEE_NONCA_RESIDENTS\n\t : $ENROLLMENT_FEE_CA_RESIDENTS)\n \n # compute the student service fee\n\tstudent_service_fee = (data[2] % 2 == 1 ? $STUDENT_SERVICES_FEE_WINTER_SUMMER\n : $STUDENT_SERVICES_FEE_FALL_SPRING) +\n\t\t (data[3] == \"y\" ? $STUDENT_SERVICES_FEE_STICKER : 0) +\n\t\t (data[4] == \"y\" ? $STUDENT_SERVICES_ID_CARD : 0)\n\t\t\t\t\t\t\n # compute the cost of the parking decal\n\tparking_decal_fee = (data[2] % 2 == 1 ? $PARKING_DECAL_WINTER_SUMMER\n : $PARKING_DECAL_FALL_SPRING)\n\tparking_decal_fee = (data[5] == \"y\" ? parking_decal_fee : 0)\n\t\n\ttotal_fees = enrollment_fee + student_service_fee + parking_decal_fee\nend", "def amt_mismatched?\n amt_paid != amt_subm\n end", "def test_bar_accepts_fee()\n @bar.bar_accepts_fee(@room1.room_fee_per_guest)\n assert_equal(510,@bar.bar_cash)\n end", "def total_fee_is_valid\n errors.add(:total_fee,'The total fee is invalid.') unless total_fee_is_valid?\n end", "def test_verify_transactions_valid\n balances = verify_transactions(create_maps(@full))\n assert_kind_of Hash, balances\n balances.each do |addr, balance|\n if addr == \"SYSTEM\"\n assert balance < 0\n else\n assert balance >= 0\n end\n end\n end", "def test_pay_application_should_not_exceed_invoice_amount\n \n end", "def free?\n self.setupFee == 0 && self.laborFee == 0 && self.oneTimeFee == 0 && self.recurringFee == 0 && self.hourlyRecurringFee == 0\n end", "def test_guest_can_afford__not_enough_money()\n assert_equal(false, @guest1.can_afford?(500))\n end", "def test_guest_funds\n assert_equal(200, @guest3.return_guest_funds)\n end", "def fees\n resp = authenticated_post(\"account_fees\")\n resp.body\n end", "def fees\n @fees ||= {\n \"insurance_fee\" => (commission * ASSURANCE_SHARE).round(0),\n \"assistance_fee\" => (ASSISTANCE_COST * rental.duration).round(0)\n }.tap { |fees| fees[\"drivy_fee\"] = commission - fees.values.inject(:+) }\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Verifies that the note matches test data
def verify_note(test_data) verify_values_match(test_data[CoreUseOfCollectionsData::NOTE.name], element_value(note_text_area)) end
[ "def verify_note(test_data)\n verify_values_match(test_data[UseOfCollections::NOTE.name], element_value(note_text_area))\n end", "def verify_history_notes(test_data)\n test_histories = test_data[Org::HISTORY_NOTES.name]\n errors = []\n test_histories = [{ Org::HISTORY_NOTE.name => ''}] unless test_histories\n test_histories.each_with_index do |test_history, index|\n text_values_match?(test_history[Org::HISTORY_NOTE.name], element_value(history_input(index)), errors)\n end\n errors\n end", "def verify_history_notes(test_data)\n test_histories = test_data[CoreOrgData::HISTORY_NOTES.name]\n errors = []\n test_histories = [{CoreOrgData::HISTORY_NOTE.name => ''}] unless test_histories\n test_histories.each do |test_history|\n index = test_histories.index test_history\n text_values_match?(test_history[CoreOrgData::HISTORY_NOTE.name], element_value(history_input(index)), errors)\n end\n errors\n end", "def verify_note(note)\n logger.debug \"Verifying visible data for note ID #{note.id}\"\n\n # Verify data visible when note is collapsed\n collapsed_note_el(note).when_present Utils.medium_wait\n collapse_note note\n visible_data = visible_collapsed_note_data note\n expected_short_updated_date = \"Last updated on #{expected_note_short_date_format note.updated_date}\"\n wait_until(1, \"Expected '#{note.subject}', got #{visible_data[:subject]}\") { visible_data[:subject] == note.subject }\n wait_until(1, \"Expected '#{expected_short_updated_date}', got #{visible_data[:date]}\") { visible_data[:date] == expected_short_updated_date }\n\n # Verify data visible when note is expanded\n expand_note note\n visible_data.merge!(visible_expanded_note_data note)\n wait_until(1, \"Expected '#{note.body}', got '#{visible_data[:body]}'\") { visible_data[:body] == \"#{note.body}\" }\n wait_until(1, 'Expected non-blank advisor name') { !visible_data[:advisor].empty? }\n wait_until(1, 'Expected non-blank advisor role') { !visible_data[:advisor_role].empty? }\n wait_until(1, \"Expected '#{note.advisor.depts}', got #{visible_data[:advisor_depts]}\") { visible_data[:advisor_depts] == note.advisor.depts }\n\n # Topics\n note_topics = (note.topics.map { |t| t.name.upcase }).sort\n wait_until(1, \"Expected '#{note_topics}', got #{visible_data[:topics]}\") { visible_data[:topics] == note_topics }\n wait_until(1, \"Expected no remove-topic buttons, got #{visible_data[:remove_topics_btns].length}\") { visible_data[:remove_topics_btns].length.zero? }\n\n # Attachments\n non_deleted_attachments = note.attachments.reject &:deleted_at\n expected_file_names = non_deleted_attachments.map &:file_name\n wait_until(1, \"Expected '#{expected_file_names.sort}', got #{visible_data[:attachments].sort}\") { visible_data[:attachments].sort == expected_file_names.sort }\n\n # Check visible timestamps within 1 minute to avoid failures caused by a 1 second diff\n expected_long_created_date = \"Created on #{expected_note_long_date_format note.created_date}\"\n wait_until(1, \"Expected '#{expected_long_created_date}', got #{visible_data[:created_date]}\") do\n Time.parse(visible_data[:created_date]) <= Time.parse(expected_long_created_date) + 60\n Time.parse(visible_data[:created_date]) >= Time.parse(expected_long_created_date) - 60\n end\n expected_long_updated_date = \"Last updated on #{expected_note_long_date_format note.updated_date}\"\n wait_until(1, \"Expected '#{expected_long_updated_date}', got #{visible_data[:updated_date]}\") do\n Time.parse(visible_data[:updated_date]) <= Time.parse(expected_long_updated_date) + 60\n Time.parse(visible_data[:updated_date]) >= Time.parse(expected_long_updated_date) - 60\n end\n end", "def verify_authorization_note(test_data)\n verify_values_match(test_data[CoreUseOfCollectionsData::AUTHORIZATION_NOTE.name], element_value(authorization_note_input))\n end", "def valid_notes?\n return true if @version >= 8 # Easy out if we've already identified the version\n\n # Just because my fingerprinting isn't great yet, adding in a more manual check for the key tables we need\n expected_tables = [\"ZICCLOUDSYNCINGOBJECT\",\n \"ZICNOTEDATA\"]\n @database.execute(\"SELECT name FROM sqlite_master WHERE type='table'\") do |row|\n expected_tables.delete(row[\"name\"])\n end\n\n return (expected_tables.length == 0)\n end", "def valid_notes?\n return true if @version >= IOS_LEGACY_VERSION # Easy out if we've already identified the version\n\n # Just because my fingerprinting isn't great yet, adding in a more manual check for the key tables we need\n expected_tables = [\"ZICCLOUDSYNCINGOBJECT\",\n \"ZICNOTEDATA\"]\n @database.execute(\"SELECT name FROM sqlite_master WHERE type='table'\") do |row|\n expected_tables.delete(row[\"name\"])\n end\n\n return (expected_tables.length == 0)\n end", "def dataTest data\n failure = RubyUnit::AssertionFailure.new(data)\n assertEqual data, failure.data, 'Assertion data Hash is incorrect'\n end", "def test_validate_book_data\n b = Book.new('OL26412312M')\n assert_equal(b.title, 'The bazaar of bad dreams')\n assert_equal(b.author, 'Stephen King')\n assert_equal(true, b.cover?)\n\n b = Book.new('OL145191W')\n assert_equal(b.title, 'Picasso')\n assert_equal(true, b.subjects.include?('Amsterdam (Netherlands)'))\n assert_equal(b.cover_img_small, 'https://covers.openlibrary.org/b/id/2238306-S.jpg')\n\n b = Book.new('OL8141930M')\n assert_equal(b.isbn.to_s, '0786806931')\n assert_equal(true, b.rating > 3.5 && b.rating < 5) if set_goodreads_key\n assert_equal(b.cover_id, 544_129)\n assert_equal(b.publish_date, 'May 1, 2004')\n\n b = Book.new('OL24229110M')\n assert_equal(b.amazon_link, 'https://www.amazon.com/dp/0451149513')\n end", "def test_verify_correct_line\n assert_equal \"288d\", @VERIFY.verify_line(@split_line)\n end", "def check_midi_file_generated_correctly(xml_questions, chord_root, chord_quality, chord_inversion, correct_midi_note_events)\n midi_file_name = xml_questions.at_xpath(\"answer[chordRoot/text()='#{chord_root}' and chordQuality/text()='#{chord_quality}' and chordInversion/text()='#{chord_inversion}']/../questionMidiFileName\").text\n stored_midi_notes = xml_questions.at_xpath(\"answer[chordRoot/text()='#{chord_root}' and chordQuality/text()='#{chord_quality}' and chordInversion/text()='#{chord_inversion}']/../midiNotes\").text.split.map { |note| note.to_i }\n assert_equal(false, midi_file_name.strip.empty?)\n assert(File.exist?(\"#{TestUtils::GENERATE_QUESTION_FILES_DIR}/#{midi_file_name}\"))\n\n midi_sequence = MIDI::Sequence.new\n File.open(\"#{TestUtils::GENERATE_QUESTION_FILES_DIR}/#{midi_file_name}\", 'rb') { |file| midi_sequence.read(file) }\n generated_midi_note_events = Array.new\n midi_sequence.each do |track|\n track.each do |event|\n if MIDI::NoteEvent === event\n generated_midi_note_events << event.note\n end\n end\n end\n\n assert_equal(correct_midi_note_events, generated_midi_note_events)\n\n # Checking the MIDI notes stored in the XML file by only looking at the note\n # on event for the MIDI notes.\n if correct_midi_note_events.length.eql? 6\n # First/second inversion chord testing.\n assert_equal(correct_midi_note_events[0, 3], stored_midi_notes)\n else\n # Third inversion chord testing.\n assert_equal(correct_midi_note_events[0, 4], stored_midi_notes)\n end\n end", "def test_verify_correct_line2\n split_line = [\"0\", \"0\", \"SYSTEM>569274(100)\", \"1553184699.650330000\", \"f311\"]\n assert_equal \"f311\", @VERIFY.verify_line(split_line)\n end", "def descrips_match(expected, descrips)\n begin\n expected.zip(descrips).collect {\n |x, y| (x == :anything || x == y) }.inject(true) {\n |x, y| x && y }\n rescue TypeError => te\n puts \"TypeError: #{te}\"\n end\n end", "def test_notes_export_format\n assert_equal(\n \"\",\n observations(:minimal_unknown_obs).notes_export_formatted\n )\n\n assert_equal(\n \"Found in a strange place... & with śtrangè characters™\",\n observations(:detailed_unknown_obs).notes_export_formatted\n )\n assert_equal(\n \"substrate: soil\",\n observations(:substrate_notes_obs).notes_export_formatted\n )\n assert_equal(\n \"substrate: soil\\nOther: slimy\",\n observations(:substrate_and_other_notes_obs).notes_export_formatted\n )\n end", "def different_notes?\n if self.source_note.nil?\n return false\n end\n if self.target_note.nil?\n return false\n end\n return self.source_note != self.target_note\n end", "def test_invalid_duplicate_metaphor\n met = Metaphor.new(:metaphor => \" None can chain a mind / Whom this sweet chordage cannot bind.\")\n assert !met.save\n end", "def reliable_match?(record_metadata)\n return true unless (@record_id.nil? or @record_id.empty?)\n return true unless (@issn.nil? or @issn.empty?) and (@isbn.nil? or @isbn.empty?)\n return false if (record_metadata.nil? or record_metadata.empty? or record_metadata[:title].nil? or record_metadata[:title].empty?)\n # Titles must be equal\n return false unless record_metadata[:title].to_s.downcase.eql?(@title.downcase)\n # Author must be equal\n return false unless record_metadata[:author].to_s.downcase.eql?(@author.downcase)\n return true\n end", "def expect_note\n lambda { |cand_id, rendered_or_page, td_index|\n expect(rendered_or_page).to have_css \"td[id=tr#{cand_id}_td#{td_index}]\", text: I18n.t('label.sidebar.candidate_note')\n }\n end", "def audit_data_verify(a_data, v_data)\n found = nil\n a_data.each do |entry|\n if entry.text.match(v_data)\n found = true\n print_to_output(\"VERIFIED: Expected Audit data: '#{v_data}' is displayed.\")\n break\n end\n end\n raise \"Audit data #{v_data} was not found!\" unless found\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
PROVISOS Enters or removes provisos per a given set of test data
def enter_provisos(test_data) hide_notifications_bar logger.info "Entering provisos '#{test_data[CoreUseOfCollectionsData::PROVISOS.name]}'" wait_for_element_and_type(provisos_text_area, test_data[CoreUseOfCollectionsData::PROVISOS.name]) end
[ "def calculate_and_set_protease_volumes\n protease_samples.each do |protease|\n protease_ops = operations.select { |op| op.input(PROTEASE).sample == protease }\n stock_conc = stock_concentration(ops: protease_ops)\n protease_ops.each { |op| set_protease_volumes(op, stock_conc) }\n end\n end", "def verify_provisos(test_data)\n verify_values_match(test_data[CoreUseOfCollectionsData::PROVISOS.name], element_value(provisos_text_area))\n end", "def verify_provisos(test_data)\n verify_values_match(test_data[UseOfCollections::PROVISOS.name], element_value(provisos_text_area))\n end", "def test_016\n test_000\n login(\"root\",\"root\")\n all_analyze_config_pus_before = AnalyzeConfigsPus.find_all_by_pu_id(PU_ID)\n assert_not_equal all_analyze_config_pus_before, []\n delete_pu\n all_analyze_config_pus_after= AnalyzeConfigsPus.find_all_by_pu_id(PU_ID)\n assert_equal all_analyze_config_pus_after,[]\n logout\n end", "def delete_testds(testds)\n not_found = []\n testds.each do |testd_td|\n descriptor = Testd.where({ 'testd.name' => testd_td['name'],\n 'testd.vendor' => testd_td['vendor'],\n 'testd.version' => testd_td['version'] }).first\n if descriptor.nil?\n logger.error 'Test Descriptor not found ' + testd_td.to_s\n not_found << testd_td\n else\n if descriptor['pkg_ref'] == 1\n descriptor.destroy\n del_ent_dict(descriptor, :testd)\n else descriptor.update_attributes(pkg_ref: descriptor['pkg_ref'] - 1)\n end\n end\n end\n not_found\n end", "def test_export_to_pstore\r\n model=Model.new(2, 'pstore persistance')\r\n model.description = 'will be deleted between tests'\r\n model.commentary = 'and they are off'\r\n \r\n proc1 = model.node_list.add_node(1, 'process one', PROCESS)\r\n proc2 = model.node_list.add_node(2, 'process two', PROCESS)\r\n proc1.evidence = [0.2, 0.3]\r\n model.link_list.add_link(proc1, proc2)\r\n \r\n Persist.export_to_pstore(model, 'models/unittest1')\r\n end", "def actualizar_propietarios\n self.propietarios.each do |p|\n p.es_propietario = false if p.negocios_propios.count == 1\n p.save\n end\n end", "def update_with_certification_tests(params)\n add_measure_tests(params)\n save!\n add_filtering_tests if c4_test\n add_checklist_test if c1_test\n end", "def update_with_measure_tests(product_params)\n new_ids = product_params['measure_ids'] ? product_params['measure_ids'] : []\n old_ids = measure_ids ? measure_ids : []\n update_attributes(product_params)\n (new_ids - old_ids).each do |measure_id|\n m = bundle.measures.top_level.find_by(hqmf_id: measure_id)\n product_tests.build({ name: m.name, measure_ids: [measure_id], cms_id: m.cms_id }, MeasureTest)\n end\n (old_ids - new_ids).each { |measure_id| product_tests.in(measure_ids: measure_id).destroy }\n add_filtering_tests if c4_test\n end", "def update_with_measure_tests(product_params)\n add_measure_tests(product_params)\n save!\n add_filtering_tests if c4_test\n add_checklist_test if c1_test\n end", "def test_041\n test_000\n login(\"root\",\"root\")\n all_subtasks = Subtask.find_all_by_task_id(TASK_ID)\n all_subtasks_ids = all_subtasks.map { |a_subtask| a_subtask.id }\n all_analyze_config_subtask_before = AnalyzeConfigsSubtasks.find(:all, :conditions => [ \"subtask_id IN (?)\", all_subtasks_ids])\n assert_not_equal all_analyze_config_subtask_before, []\n delete_pj\n all_analyze_config_subtask_after= AnalyzeConfigsSubtasks.find(:all, :conditions => [ \"subtask_id IN (?)\", all_subtasks_ids])\n assert_equal all_analyze_config_subtask_after,[]\n logout\n end", "def test_022\n test_000\n login(\"root\",\"root\")\n all_subtasks = Subtask.find_all_by_task_id(TASK_ID)\n all_subtasks_ids = all_subtasks.map { |a_subtask| a_subtask.id }\n all_analyze_config_subtask_before = AnalyzeConfigsSubtasks.find(:all, :conditions => [ \"subtask_id IN (?)\", all_subtasks_ids])\n assert_not_equal all_analyze_config_subtask_before, []\n delete_pu\n all_analyze_config_subtask_after= AnalyzeConfigsSubtasks.find(:all, :conditions => [ \"subtask_id IN (?)\", all_subtasks_ids])\n assert_equal all_analyze_config_subtask_after,[]\n logout\n end", "def test_038\n test_000\n login(\"root\",\"root\")\n all_analyze_rule_config_pjs_before = AnalyzeRuleConfigsPjs.find_all_by_pj_id(PJ_ID)\n assert_not_equal all_analyze_rule_config_pjs_before, []\n delete_pj\n all_analyze_rule_config_pjs_after= AnalyzeRuleConfigsPjs.find_all_by_pj_id(PJ_ID)\n assert_equal all_analyze_rule_config_pjs_after,[]\n logout\n end", "def build_provisions(run_directory, data_directory, whitelist, blacklist)\n \n Scarcity::Submission.new do\n \n # declare where runs happen and where data comes from\n # and indicate that we gather default provisions accordingly \n # This will not need to change in almost all cases.\n runs_in run_directory\n pulls_from data_directory, :only => whitelist, :except => blacklist\n gathers_provisions :zip_data => true\n \n # Declare other goods that will be provided to each dataset at run time.\n provides :from => 'app/executables', :to => :each_dataset do\n file '<%= base_name %>.py', :chmod => 0755\n end\n \n provides :from => 'app/scripts', :to => :each_dataset do\n file 'prejob.py', :chmod => 0755\n file 'postjob.py', :chmod => 0755\n end\n \n provides :from => 'app/submits', :to => :each_dataset do\n file '<%= base_name %>.submit'\n end\n \n provides :from => 'lib/submits', :to => :each_dataset do\n file 'null.submit'\n end\n \n # You might also declare goods to be provided to the segment as a whole\n # provides :from => 'lib/submits', :to => :segment do\n # file 'null.submit'\n # end\n \n end\n \nend", "def enable_all_experiments!\n exps = Experiment.find(:all, :conditions => {\n :min_genes => self.min_genes,\n :predict_matrix_id => self.predict_matrix_id,\n :run_result => 0\n })\n exps.delete_if { |x| x.children.count == 0 }\n self.experiment_ids = exps.collect { |x| x.id }\n end", "def test_multiple_process\r\n linklist = @mymodel.link_list\r\n nodelist = @mymodel.node_list \r\n \r\n process1 = nodelist.add_node(1, 'process one', PROCESS) \r\n process2 = nodelist.add_node(2, 'process two', PROCESS)\r\n process3 = nodelist.add_node(3, 'process three', PROCESS) \r\n \r\n link1 = linklist.add_link(process1, process2, true)\r\n link2 = linklist.add_link(process1, process3, true)\r\n\r\n process1.evidence = [0.63, 0.64]\r\n \r\n assert_equal([0.63, 0.64], process2.evidence)\r\n assert_equal([0.63, 0.64], process3.evidence) \r\n \r\n #remove the callbacks for process 2 as a side affect of deleting the link\r\n linklist.remove_link(link1)\r\n \r\n process1.evidence = [0.44, 0.50]\r\n\r\n assert_equal(\"undefined\", process2.evidence)\r\n assert_equal([0.44, 0.50], process3.evidence) \r\n end", "def test_uts_at_m_001\n all = AnalyzeTool.find(:all)\n all.each do |tool|\n tool.in_use = true\n tool.save\n end unless all.blank?\n #\n analyze_tools = AnalyzeTool.all_analyze_tools\n assert !analyze_tools.blank?\n assert_equal all.size,analyze_tools.size\n end", "def test_del_multiple_non_profile_nodes\r\n\r\n @place_holder.login_goto_profile(@provider_url)\r\n\r\n # Switch the perspective to 'Graph'\r\n @place_holder.add_to_or_view_on_graph('initially form the profile view when the nodes are not yet added to the graph')\r\n\r\n # Delete multiple nodes\r\n begin\r\n\r\n # Delete NPI node\r\n npi_node_vertex_id = PropertiesReader.get_npi_node_vertex_id\r\n npi_node_message = PropertiesReader.get_npi_node_message\r\n @place_holder.delete_node(npi_node_vertex_id, npi_node_message, true)\r\n\r\n # Delete location node\r\n location1_node_vertex_id = PropertiesReader.get_location1_node_vertex_id\r\n location1_node_message = PropertiesReader.get_location1_node_message\r\n @place_holder.delete_node(location1_node_vertex_id, location1_node_message, true)\r\n\r\n # Until dependency on uids is reduced, these two nodes are excluded\r\n begin\r\n ## Delete phone node\r\n #phone_node_vertex_id = PropertiesReader.get_phone_node_vertex_id\r\n #phone_node_message = PropertiesReader.get_phone_node_message\r\n #@place_holder.delete_node(phone_node_vertex_id, phone_node_message, true)\r\n #\r\n ## Delete tax_id node\r\n #tax_id_node_vertex_id = PropertiesReader.get_tax_id_node_vertex_id\r\n #tax_id_node_message = PropertiesReader.get_tax_id_node_message\r\n #@place_holder.delete_node(tax_id_node_vertex_id, tax_id_node_message, true)\r\n\r\n end\r\n end\r\n\r\n # Switch the perspective to 'Profiles'\r\n @place_holder.go_to_profiles_view\r\n\r\n # Switch back to the graph view\r\n @place_holder.add_to_or_view_on_graph(\"after switching back to 'Profiles' view after the nodes are deleted from the graph\")\r\n\r\n # Verify that all the deleted nodes stay deleted\r\n begin\r\n\r\n # Verify that the NPI node stays deleted (i.e. doesn't appear back on the graph) after it was deleted and the view is switched to 'Profiles' view and then back to the graph view\"\r\n assert_message = \"The existence/persistence of the deleted (non-profile) node <#{npi_node_message}> after it was deleted and the view is switched to 'Profiles' view and then back to the graph view\"\r\n @place_holder.assert_node_existence_on_graph(npi_node_vertex_id, false, assert_message)\r\n\r\n # Verify that the location node stays deleted (i.e. doesn't appear back on the graph) after it was deleted and the view is switched to 'Profiles' view and then back to the graph view\"\r\n assert_message = \"The existence/persistence of the deleted (non-profile) node <#{location1_node_message}> after it was deleted and the view is switched to 'Profiles' view and then back to the graph view\"\r\n @place_holder.assert_node_existence_on_graph(location1_node_vertex_id, false, assert_message)\r\n\r\n # Until dependency on uids is reduced, these two nodes are excluded\r\n begin\r\n ## Verify that the tax_id node stays deleted (i.e. doesn't appear back on the graph) after it was deleted and the view is switched to 'Profiles' view and then back to the graph view\"\r\n #assert_message = \"The existence/persistence of the deleted (non-profile) node <#{tax_id_node_message}> after it was deleted and the view is switched to 'Profiles' view and then back to the graph view\"\r\n #@place_holder.assert_node_existence_on_graph(tax_id_node_vertex_id, false, assert_message)\r\n #\r\n ## Verify that the phone node stays deleted (i.e. doesn't appear back on the graph) after it was deleted and the view is switched to 'Profiles' view and then back to the graph view\"\r\n #assert_message = \"The existence/persistence of the deleted (non-profile) node <#{phone_node_message}> after it was deleted and the view is switched to 'Profiles' view and then back to the graph view\"\r\n #@place_holder.assert_node_existence_on_graph(phone_node_vertex_id, false, assert_message)\r\n end\r\n\r\n end\r\n\r\n end", "def clean_product_specs!\n existing_specs = Set.new\n product_specs.each do |spec|\n if existing_specs.include?(spec.value_for_comparison)\n puts \" - #{spec}\"\n spec.destroy\n else\n existing_specs << spec\n end\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Verifies that the provisos match test data
def verify_provisos(test_data) verify_values_match(test_data[CoreUseOfCollectionsData::PROVISOS.name], element_value(provisos_text_area)) end
[ "def verify_provisos(test_data)\n verify_values_match(test_data[UseOfCollections::PROVISOS.name], element_value(provisos_text_area))\n end", "def run_verification\n verify_data_format # runs through what we have and makes sure teh values are in the correct format\n verify_dependencies # makes sure that any 'optional' columns have any dependency columns present\n verify_order # finally, all columns must be in a specific order \n end", "def verify_result(test_data)\n verify_values_match(test_data[CoreUseOfCollectionsData::RESULT.name], element_value(result_text_area))\n end", "def verify_result(test_data)\n verify_values_match(test_data[UseOfCollections::RESULT.name], element_value(result_text_area))\n end", "def test_validity_of_examples\n @dfa_examples.each do |e|\n assert_equal(true, e.deterministic?)\n end\n @nfa_examples.each do |e|\n assert_equal(false, e.deterministic?)\n end\n end", "def a_test_was_verified?\n\t\tself.a_test_was_verified\n\t\t## so how do we do this ?\n=begin\n\t\tself.tests.select{|c|\n\t\t\t((c.changed_attributes.include? \"verification_done\") && (c.verification_done == Diagnostics::Test::VERIFIED))\n\t\t}.size > 0\n=end\n\tend", "def verify_object_info_data(data_set)\n logger.debug \"Checking object number #{data_set[CoreObjectData::OBJECT_NUM.name]}\"\n object_data_errors = []\n text_values_match?(data_set[CoreObjectData::OBJECT_NUM.name], element_value(object_num_input), object_data_errors)\n\n other_nums = data_set[CoreObjectData::OTHER_NUM.name]\n other_nums && other_nums.each do |num|\n index = other_nums.index num\n text_values_match?(num[CoreObjectData::NUM_VALUE.name], element_value(other_num_num_input index), object_data_errors)\n text_values_match?(num[CoreObjectData::NUM_TYPE.name], element_value(other_num_type_input index), object_data_errors)\n end\n\n num_objects = data_set[CoreObjectData::NUM_OBJECTS.name]\n num_objects && text_values_match?(num_objects.to_s, element_value(num_objects_input), object_data_errors)\n\n collection = data_set[CoreObjectData::COLLECTION.name]\n collection && text_values_match?(collection, element_value(collection_input), object_data_errors)\n\n resp_depts = data_set[CoreObjectData::RESPONSIBLE_DEPTS.name]\n resp_depts && resp_depts.each { |dept| text_values_match?(dept[CoreObjectData::RESPONSIBLE_DEPT.name], element_value(resp_dept_input resp_depts.index(dept)), object_data_errors) }\n\n pub_to_list = data_set[CoreObjectData::PUBLISH_TO_LIST.name]\n pub_to_list && pub_to_list.each { |pub| text_values_match?(pub[CoreObjectData::PUBLISH_TO.name], element_value(publish_to_input pub_to_list.index(pub)), object_data_errors) }\n\n status = data_set[CoreObjectData::RECORD_STATUS.name]\n status && text_values_match?(status, element_value(record_status_input), object_data_errors)\n\n inv_statuses = data_set[CoreObjectData::INVENTORY_STATUS_LIST.name]\n inv_statuses && inv_statuses.each { |stat| text_values_match?(stat[CoreObjectData::INVENTORY_STATUS.name], element_value(inventory_status_input inv_statuses.index(stat)), object_data_errors) }\n\n brief_descrips = data_set[CoreObjectData::BRIEF_DESCRIPS.name]\n brief_descrips && brief_descrips.each { |descrip| text_values_match?(descrip[CoreObjectData::BRIEF_DESCRIP.name], element_value(brief_desc_text_area brief_descrips.index(descrip)), object_data_errors) }\n\n dist_feat = data_set[CoreObjectData::DISTINGUISHING_FEATURES.name]\n dist_feat && text_values_match?(dist_feat, element_value(dist_features_text_area), object_data_errors)\n\n comments = data_set[CoreObjectData::COMMENTS.name]\n comments && comments.each { |comment| text_values_match?(comment[CoreObjectData::COMMENT.name], element_value(comment_text_area comments.index(comment)), object_data_errors) }\n\n titles = data_set[CoreObjectData::TITLE_GRP.name]\n titles && titles.each do |title|\n index = titles.index title\n text_values_match?(title[CoreObjectData::TITLE.name], element_value(title_input index), object_data_errors)\n text_values_match?(title[CoreObjectData::TITLE_TYPE.name], element_value(title_type_input index), object_data_errors)\n text_values_match?(title[CoreObjectData::TITLE_LANG.name], element_value(title_lang_input index), object_data_errors)\n\n translations = title[CoreObjectData::TITLE_TRANSLATION_SUB_GRP.name]\n translations && translations.each do |trans|\n sub_index = translations.index trans\n text_values_match?(trans[CoreObjectData::TITLE_TRANSLATION.name], element_value(title_translation_input [index, sub_index]), object_data_errors)\n text_values_match?(trans[CoreObjectData::TITLE_TRANSLATION_LANG.name], element_value(title_translation_lang_input [index, sub_index]), object_data_errors)\n end\n end\n\n obj_names = data_set[CoreObjectData::OBJ_NAME_GRP.name]\n obj_names && obj_names.each do |name|\n index = obj_names.index name\n text_values_match?(name[CoreObjectData::OBJ_NAME_NAME.name], element_value(object_name_input index), object_data_errors)\n text_values_match?(name[CoreObjectData::OBJ_NAME_CURRENCY.name], element_value(object_name_currency_input index), object_data_errors)\n text_values_match?(name[CoreObjectData::OBJ_NAME_LEVEL.name], element_value(object_name_level_input index), object_data_errors)\n text_values_match?(name[CoreObjectData::OBJ_NAME_SYSTEM.name], element_value(object_name_system_input index), object_data_errors)\n text_values_match?(name[CoreObjectData::OBJ_NAME_TYPE.name], element_value(object_name_type_input index), object_data_errors)\n text_values_match?(name[CoreObjectData::OBJ_NAME_LANG.name], element_value(object_name_lang_input index), object_data_errors)\n text_values_match?(name[CoreObjectData::OBJ_NAME_NOTE.name], element_value(object_name_note_input index), object_data_errors)\n end\n\n object_data_errors\n end", "def run_verify(target_dir)\n cmd_ver_obj = commands[:verify_command]\n cmd_ver_obj.run({ :target_dir => target_dir })\n if cmd_ver_obj.findings.size == 0\n puts 'OK, system is conforming to model'.color(:green)\n else\n puts 'ERROR, detected inconsistencies/errors.'.color(:red)\n # cmd_ver_obj.findings.each do |val_error|\n # puts val_error.to_s\n # end\n end\n end", "def verify_result(expect, model)\n puts \"running query: #{build_query(model)}\"\n result = search(build_query(model))\n assert_equal(expect.size, result.hit.size)\n expect.each_with_index do |expected_sub_scores,i|\n jsf = result.hit[i].field['summaryfeatures']\n sub_scores = extract_subscores(jsf, model.size)\n assert_equal(expected_sub_scores, sub_scores,\n \"subscores differ for hit #{i}: #{expected_sub_scores} != #{sub_scores}\")\n end\n end", "def test_hash_match\n v = Verify.new\n return_code = v.verify_second_pipeset('as3', 'as3')\n assert_equal 0, return_code\n end", "def consistency_checks\n unless certificate_serial_number == voucher_serial_number\n error_report << \"serial number mismatch certificate '#{certificate_serial_number}' vs '#{voucher_serial_number}'\"\n return false\n end\n # other tests here.\n return true\n end", "def test_hash_mismatch\n v = Verify.new\n return_code = v.verify_second_pipeset('1as3', '2as3')\n assert_equal 2, return_code\n end", "def test_appellation_validity\n assert @valid_appellation_1.valid?\n assert @valid_appellation_2.valid?\n end", "def verify_actual_vs_expected_production\n job.job_markings.each do |marking|\n puts marking.actual_production\n puts marking.amount\n if marking.actual_production > marking.amount\n SiteMailer.job_marking_production_over_expected(marking).deliver\n end\n end\n end", "def passed?(expected_result)\n reported_result = reported_result(expected_result['key'])\n ['denominator', 'numerator', 'exclusions'].each do |component|\n if reported_result[component] != expected_result[component]\n #puts \"reported: #{reported_result[component]} , expected: #{expected_result[component]}\"\n return false\n end\n end\n \n return true\n end", "def verify\n recipes.each { |rcp| rcp.verify }\n end", "def matches_expected? pi\n pi.patient_identifier == expected.patient_identifier &&\n pi.affinity_domain == expected.affinity_domain\n end", "def dataTest data\n failure = RubyUnit::AssertionFailure.new(data)\n assertEqual data, failure.data, 'Assertion data Hash is incorrect'\n end", "def test_cmp_genes_identity()\n truth_p = IO.readlines(\"#{get_data_dir}/truth.tab\").map do |l|\n IgValve::LabelPrediction.new(l)\n end\n pred_p = IO.readlines(\"#{get_data_dir}/perf_preds.tab\").map do |l|\n IgValve::LabelPrediction.new(l)\n end\n (0..truth_p.size-1).each do |i|\n retval = cmp_genes(truth_p[i], pred_p[i])\n [:V, :D, :J].each{|seg| assert_equal(retval[seg], true)}\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Verifies that the result matches test data
def verify_result(test_data) verify_values_match(test_data[CoreUseOfCollectionsData::RESULT.name], element_value(result_text_area)) end
[ "def verify_result(test_data)\n verify_values_match(test_data[UseOfCollections::RESULT.name], element_value(result_text_area))\n end", "def verify_result(expect, model)\n puts \"running query: #{build_query(model)}\"\n result = search(build_query(model))\n assert_equal(expect.size, result.hit.size)\n expect.each_with_index do |expected_sub_scores,i|\n jsf = result.hit[i].field['summaryfeatures']\n sub_scores = extract_subscores(jsf, model.size)\n assert_equal(expected_sub_scores, sub_scores,\n \"subscores differ for hit #{i}: #{expected_sub_scores} != #{sub_scores}\")\n end\n end", "def compare_result(test, result)\n if @per_test_insert\n expected = translate_column_names(test['expected'])\n else\n expected = test['expected']\n end\n expected.each do |key, value|\n if value == 'ignore'\n next\n end\n\n if !compare_values(value, result[key])\n @num_fails += 1\n @failed_tests.push(test['name'])\n\n puts \" Expected: \\\"#{key}\\\": #{value}\"\n puts \" Result: \\\"#{key}\\\": #{result[key]}\"\n puts \" Status: Failed\"\n return\n end\n end\n\n @num_successes += 1\n puts \" Status: Passed\"\n end", "def passed?(expected_result)\n reported_result = reported_result(expected_result['key'])\n ['denominator', 'numerator', 'exclusions'].each do |component|\n if reported_result[component] != expected_result[component]\n #puts \"reported: #{reported_result[component]} , expected: #{expected_result[component]}\"\n return false\n end\n end\n \n return true\n end", "def verify_operation_result(actual)\n expected = test_instance.outcome['result']\n if expected.is_a?(Array)\n if expected.empty?\n expect(actual).to be_empty\n else\n expected.each_with_index do |expected_elt, i|\n verify_result(expected_elt, actual[i])\n end\n end\n else\n verify_result(expected, actual)\n end\n end", "def validate_results(actual_result, expect_result)\n total = 0\n pass_count = 0\n fail_count = 0\n case_idx = 0\n case_ord = 0\n\n @cfg_env= get_testset_cfg(@cfg['testset'])\n\n @log.info \"************************************************************\"\n @log.info \"***** Test results validation for each test case begin *****\"\n @log.info \"************************************************************\"\n puts \"************************************************************\"\n puts \"***** Test results validation for each test case begin *****\"\n puts \"************************************************************\"\n if actual_result == nil or expect_result ==nil\n raise StandardError.new('Missing actual result or expected result!')\n end\n\n fp_expect = File.open(expect_result, \"rb\")\n exp_lines = fp_expect.readlines\n fp_expect.close\n\n fp_actual = File.open(actual_result, \"rb\")\n act_lines = fp_actual.readlines\n fp_actual.close\n\n if act_lines.length == exp_lines.length\n i=0\n while i < act_lines.length\n exp_line = JSON.parse(exp_lines[i])\n act_line = JSON.parse(act_lines[i])\n if exp_line == act_line\n# @log.result('pass')\n print \"\\e[32mPASS\\e[0m:\"\n case_title = nil\n exp_line.each { |key, val| case_title=key }\n if @aid_methods.include?(case_title)\n case_ord+=1\n @log.info \"PASS: #{act_line}\"\n puts \"#{act_line}\"\n else\n pass_count+=1; case_idx+=1\n @log.info \"PASS: #{act_line}\"\n puts \"#{act_line}\"\n end\n else\n print \"\\e[31mFAIL\\e[0m:\"\n case_title = nil\n exp_line.each { |key, val| case_title=key }\n if @aid_methods.include?(case_title)\n case_ord+=1\n @log.error \"FAIL: ord:#{case_ord} #{case_title}\"\n puts \" ord:#{case_ord} #{case_title}\"\n else\n fail_count+=1; case_idx+=1\n @log.error \"FAIL: idx:#{case_idx} #{case_title}:\"\n puts \" idx:#{case_idx} #{case_title}:\"\n end\n @log.error \" expect value is: #{exp_line}\"\n @log.error \" actual value is: #{act_line}\"\n puts \" expect value is: #{exp_line}\"\n puts \" actual value is: #{act_line}\"\n end\n i+=1\n end\n elsif @cfg['run_cases']!=[]\n\n act_lines.each do |act_line|\n\n act_line_parse = JSON.parse(act_line)\n\n exp_line = find_expect_line(act_line_parse, exp_lines)\n\n if exp_line == act_line_parse\n pass_count+=1 if act_line_parse.keys.include?('idx')\n print \"\\e[32mPASS\\e[0m:\"\n @log.info \"PASS: #{act_line_parse}\"\n puts \" #{act_line_parse}\"\n elsif exp_line == nil\n print \"\\e[31mFAIL\\e[0m:\"\n @log.error \"FAIL: #{act_line_parse}\"\n @log.error \" Cannot find expected value for the line: #{act_line_parse}\"\n puts \" #{act_line_parse}\"\n puts \" Cannot find expected value for the line: #{act_line_parse}\"\n else\n fail_count+=1 if act_line_parse.keys.include?('idx')\n print \"\\e[31mFAIL\\e[0m:\"\n @log.error \" #{act_line_parse}\"\n @log.error \" expect value is: #{exp_line}\"\n @log.error \" actual value is: #{act_line_parse}\"\n puts \" #{act_line_parse}\"\n puts \" expect value is: #{exp_line}\"\n puts \" actual value is: #{act_line_parse}\"\n end\n end\n else\n @log.error \"actual length: #{act_lines.length}; expect length:#{exp_lines.length}\"\n puts \"actual length: #{act_lines.length}; expect length:#{exp_lines.length}\"\n raise StandardError.new('Expected result lines are not equal to Actual result lines')\n end\n total = fail_count + pass_count\n @log.info \"RSLT: {\\\"total\\\"=>#{total}, \\\"pass\\\"=>#{pass_count}, \\\"fail\\\"=>#{fail_count}}\"\n @log.info \"TIME: #{ Time.at(Time.now - @start_time).utc.strftime(\"%H:%M:%S\") }\"\n @log.info \"************************************************************\"\n @log.info \"***** Test results validation for each test case end *****\"\n @log.info \"************************************************************\"\n puts \"RSLT: {\\\"total\\\"=>#{total}, \\\"pass\\\"=>#{pass_count}, \\\"fail\\\"=>#{fail_count}}\"\n puts \"TIME: #{ Time.at(Time.now - @start_time).utc.strftime(\"%H:%M:%S\") }\"\n puts \"************************************************************\"\n puts \"***** Test results validation for each test case end *****\"\n puts \"************************************************************\"\n end", "def verify_operation_result(actual)\n expected = test_instance.expected_outcome.result\n if expected.is_a?(Array)\n if expected.empty?\n expect(actual).to be_empty\n else\n expected.each_with_index do |expected_elt, i|\n verify_result(expected_elt, actual[i])\n end\n end\n else\n verify_result(expected, actual)\n end\n end", "def dataTest data\n failure = RubyUnit::AssertionFailure.new(data)\n assertEqual data, failure.data, 'Assertion data Hash is incorrect'\n end", "def verify_operation_result(expected, actual)\n if expected.is_a?(Array)\n if expected.empty?\n expect(actual).to be_empty\n else\n expected.each_with_index do |expected_elt, i|\n # If the YAML spec test does not define a result,\n # do not assert the operation's result - the operation may\n # have produced a result, the test just does not care what it is\n if expected_elt\n verify_result(expected_elt, actual[i])\n end\n end\n end\n else\n verify_result(expected, actual)\n end\n end", "def dataset_data_match?(domo_client, dataset_id, expected_data, should_fail=false)\n data = export_dataset(domo_client, dataset_id)\n\n if data.nil?\n unless expected_data.nil?\n puts \"Got no data back from Domo.\"\n puts \"Expected data: #{expected_data}\"\n return false\n end\n return true\n end\n\n if expected_data.is_a? Hash\n return false unless data.size == 1\n data = data[0]\n end\n\n # Sort the expected and actual data so we don't go chasing down row order differences.\n unless data.is_a? Hash\n data.sort! { |a,b| b[\"Event Name\"] <=> a[\"Event Name\"] }\n end\n unless expected_data.is_a? Hash\n expected_data.sort! { |a,b| b[\"Event Name\"] <=> a[\"Event Name\"] }\n end\n\n unless data == expected_data\n missing_data = Array.new\n expected_data.each do |d|\n unless data.include? d\n missing_data << d\n end\n end\n unless should_fail\n puts \"-----\"\n puts \"Actual data length: #{data.length}\"\n puts \"Expected data length: #{expected_data.length}\"\n puts \"-----\"\n puts \"Missing Data\"\n puts missing_data\n puts \"-----\"\n puts \"Actual Data\"\n puts data\n puts \"-----\"\n end\n return false\n end\n true\n end", "def quick_validate\n puts \"\\nDoing some validation\"\n expected_diff = '010700000000000000' # ?\n IMPORT_TABLES.each_key do |t|\n GeographicAreasGeographicItem.where(data_origin: t.to_s).limit(9).each do |i|\n if i.geographic_item.valid_geometry?\n a = \"SELECT St_AsBinary(geom) FROM #{i.data_origin} WHERE gid = #{i.origin_gid}\"\n b = \"SELECT St_AsBinary(multi_polygon) FROM geographic_items WHERE id = #{i.geographic_item_id}\"\n sql1 = \"SELECT St_SymDifference((#{a}), (#{b}));\"\n r = ApplicationRecord.connection.execute(sql1).first['St_SymDifference'].to_s\n if r == expected_diff\n puts \"#{i.data_origin} data matching\"\n else\n puts \"#{i.data_origin} data not matching\"\n puts a\n puts b\n end\n end\n end\n end\n end", "def equal(expected); end", "def test_verify_prev_hash_match_valid\n map = create_maps(@full)\n assert_equal true, verify_prev_hash_match(map)\n end", "def assert_results(expected, actual, message = nil)\n assert_equal(expected.size, actual.size, \"Size of results. #{message}\")\n expected.each_with_index do |result, index|\n assert_equal((index + 1).to_s, actual[index].place.to_s, \"place for #{result}. #{message}\")\n assert_equal(result.person, actual[index].person, \"person for #{result}. #{message}\")\n assert_equal(result.time, actual[index].time, \"time for #{result}. #{message}\")\n end\n end", "def test_validity_of_examples\n @dfa_examples.each do |e|\n assert_equal(true, e.deterministic?)\n end\n @nfa_examples.each do |e|\n assert_equal(false, e.deterministic?)\n end\n end", "def chk_test (correct_contact,info_request, expected_result = :match)\n # Get the number of records in contacts before we run our test\n num_existing_recs = Contact.all.size\n\n resulting_contact = Contact.match_contact_from_info_request(info_request)\n\n if expected_result == :new\n assert_not_nil resulting_contact, \"[#{expected_result}]: Expecting an instance of Contact \"\n assert_instance_of( Contact, resulting_contact, \"[#{expected_result}]: Results not an Contact class\" )\n assert_equal num_existing_recs + 1, Contact.all.size, \"[#{expected_result}]: Should create new row in contacts\"\n elsif expected_result == :match\n assert_not_nil resulting_contact, \"[#{expected_result}]: Expecting an instance of Contact\"\n assert_instance_of( Contact, resulting_contact, \"[#{expected_result}]: Results not an Contact class\" )\n assert_equal num_existing_recs, Contact.all.size, \"[#{expected_result}]: Should be no new rows in contacts\"\n else\n assert_nil resulting_contact, \"[#{expected_result}]: Should not return an instance of Contact\"\n assert_equal num_existing_recs, Contact.all.size, \"[#{expected_result}]: Should be no new rows in contacts\"\n end\n\n\n\n ## -- Test the Results -----------------------------------------------------\n if expected_result == :new || expected_result == :match\n assert_not_nil resulting_contact.id, \"match id should not be null\"\n assert_equal correct_contact.email_address, resulting_contact.email_address, \"unexpected email_address\"\n assert_equal correct_contact.phone, resulting_contact.phone, \"unexpected phone\"\n assert_equal correct_contact.company, resulting_contact.company, \"unexpected company\"\n assert_equal correct_contact.contact_method, resulting_contact.contact_method, \"unexpected contact_method\"\n assert_equal correct_contact.name_first, resulting_contact.name_first, \"unexpected name_first\"\n assert_equal correct_contact.name_last, resulting_contact.name_last, \"unexpected name_last\"\n assert_equal correct_contact.title, resulting_contact.title, \"unexpected title\"\n end\n\n end", "def assert_results(expected, actual, message = nil)\n assert_equal(expected.size, actual.size, \"Size of results. #{message}\")\n expected.each_with_index {|result, index|\n assert_equal((index + 1).to_s, actual[index].place.to_s, \"place for #{result}. #{message}\")\n assert_equal(result.racer, actual[index].racer, \"racer for #{result}. #{message}\")\n assert_equal(result.time, actual[index].time, \"time for #{result}. #{message}\")\n }\n end", "def assert_results(expected, actual, message = nil)\n assert_equal(expected.size, actual.size, \"Size of results. #{message}\")\n expected.each_with_index {|result, index|\n assert_equal((index + 1).to_s, actual[index].place.to_s, \"place for #{result}. #{message}\")\n assert_equal(result.person, actual[index].person, \"person for #{result}. #{message}\")\n assert_equal(result.time, actual[index].time, \"time for #{result}. #{message}\")\n }\n end", "def test_hash_mismatch\n v = Verify.new\n return_code = v.verify_second_pipeset('1as3', '2as3')\n assert_equal 2, return_code\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
OBLIGATIONS Clicks the obligations fulfilled checkbox, though it knows nothing about whether it is checking or unchecking
def click_obligations_fulfilled wait_for_element_and_click obligations_input end
[ "def check\n click unless checked?\n end", "def check\r\n self.click unless checked?\r\n end", "def offenses_to_check; end", "def email_consol; det.checkbox(:id, 'enableEmailConsolidation'); end", "def click(event)\n status.present = event.target.checked\n post_update()\n end", "def withdraw\n self.if_status = 4\n self.active_agreements.each { |ia| ia.withdraw }\n end", "def uncheck\n click unless unchecked?\n end", "def enable_require_ingredient\n change_ingredient_status_link.click\n ingredient_status_select.select('Yes')\n submit_ingredient_status_btn.click\n wait_until{ !submit_ingredient_status_btn.visible? }\n end", "def make_insurance_choice\n self.student_choice = :insurance_choice\n save!\n CourseSelection.decline_all(self.application)\n end", "def test_FieldValidationQuestionAplicantTrue\n Common.login(Users::USER_EMAIL, Users::PASSWORD) \n Common.goToTab(HomePage::STANDARD_QUESTIONS_TAB_XPATH)\n $wait.until {\n $browser.find_element(:xpath, StandardQuestions::NEW_BUTTON_XPATH).displayed?\n }\n Common.click_and_load(StandardQuestions::NEW_BUTTON_XPATH)\n $wait.until {\n $browser.find_element(:xpath, NewStandardQuestion::SAVE_BUTTON_XPATH).displayed?\n }\n $browser.find_element(:xpath, NewStandardQuestion::REQUIRED_BY_APPLICANT_CHECKBOX_XPATH).click\n $browser.find_element(:xpath, NewStandardQuestion::SAVE_BUTTON_XPATH).click\n assert $wait.until {\n $browser.find_element(:xpath, NewStandardQuestion::ERROR_MESSAGE_XPATH).displayed?\n }\n end", "def uncheck\r\n self.click if checked?\r\n end", "def consent_checkbox\n # unit_test_no_generate: consent_checkbox, input.className(create_ats_regex_string(\"ats-consentbox\"))\n $tracer.trace(__method__)\n return ToolTag.new(input.className(create_ats_regex_string(\"ats-consentbox\")), __method__)\n end", "def data_com_checked!\n update_attributes(data_com_checked_c: true)\n end", "def check_show_only_if_condition\n frm.checkbox(:id=>\"cbCondition_0\")\n end", "def add_obligations\n \tself.trip.expenses.where(:is_loan => false).each do |e|\n \t\tuser.add_obligation(e, \"Expense Obligation\", e.cost / self.trip.members.size)\n user.add_obligation(e, \"Tip Obligation\", e.tip / self.trip.members.size, false) unless e.tip.nil? || e.tip.zero?\n e.reaverage_obligations\n \tend\n end", "def change_notcontacted_to_recommended\r\n\t self.check_first_task_checkbox\r\n\t self.choose_task_action_element.when_present.select \"Recommended\"\r\n\t self.update_task_status_element.when_present.click\r\n\t\t\tself.select_contacted_element.when_present.click\r\n\t\t\tself.next_btn_element.when_present.click\r\n self.select_email_to_element.when_present.value=\"test_km1@capvision.com\"\r\n self.send_keys(:enter)\r\n self.send_email_element.when_present.click\r\n self.recommend_continue_element.when_present.click\r\n\tend", "def set_eligibility\n update_attribute_without_callbacks(:eligible,\n mandatory ||\n (amount != 0 && eligible_for_originator?))\n end", "def toggle_button_checklist(name, opts) end", "def sponsor_all_agreed\n ethics == true and code_of_conduct == true and sponsor_terms == true\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
return the line and column of a given offset into this string line and column are 1based
def line_column(string, offset) return 1,1 if string.length==0 || offset==0 lines = (string[0..offset-1] + " ").split("\n") return lines.length, lines[-1].length end
[ "def get_line_and_column_from_chunk(offset)\n if offset.zero?\n return [@chunk_line, @chunk_column]\n end\n\n string =\n offset >= @chunk.size ? @chunk : @chunk[0..offset-1]\n\n line_count = string.count(\"\\n\")\n\n column = @chunk_column\n if line_count > 0\n lines = string.split(\"\\n\")\n column = lines.empty? ? 0 : lines[-1].size\n else\n column += string.size\n end\n\n [@chunk_line + line_count, column]\n end", "def line_offset(pos=pos())\n p = 0\n string.each_line do |line|\n len = line.length\n return (pos - p) if p + len >= pos\n p += len\n end\n 0\n end", "def line_offset(pos=pos)\n p = 0\n string.each_line do |line|\n len = line.length\n return (pos - p) if p + len >= pos\n p += len\n end\n 0\n end", "def line_char_to_offset(text, line, character); end", "def get_offset line, col\r\n CodeMap.get_offset @code, line, col\r\n end", "def location_of text, offset, file\n last_newline = text[0, offset].rindex \"\\n\"\n start_of_line = last_newline ? last_newline + 1 : 0\n\n line_text = text[start_of_line..offset]\n\n full_path = File.expand_path file.absolute_name, @source_dir\n\n file_content = RDoc::Encoding.read_file full_path, @encoding\n\n raise \"[bug] Unable to read #{full_path}\" unless file_content\n\n file_content.each_line.with_index do |line, index|\n if line =~ /#{Regexp.escape line_text}/ then\n column = $`.length + line_text.length\n return index, column\n end\n end\n\n # TODO typos in include file\n\n nil\n end", "def line_and_column(pos, io)\n @pos = pos\n io.pos = 0\n return line_and_column0(io)\n end", "def line_and_column(position = T.unsafe(nil)); end", "def to_offset(text, position); end", "def pos\n locatable.locator.pos_on_line(offset)\n end", "def line_for_offset(offset)\n if line_nbr = line_index.index {|x| x > offset}\n return line_nbr\n end\n # If not found it is after last\n return line_index.size\n end", "def token_pos offset\n [offset - @line_pos, @line]\n end", "def offset\n\t\tOffset.new(@ida, @str.ea)\n\tend", "def column_number()\n if line_number() == 1 then\n return @position + 1\n else\n return @position - @line_endings[0]\n end\n end", "def character_at(source_position, offset = 0)\n actual_line = source_position.line - 1\n actual_offset = source_position.offset + offset - 1\n\n # Return a newline if offset points at the very end of the line\n return \"\\n\" if actual_offset == engine.lines[actual_line].length\n\n engine.lines[actual_line][actual_offset]\n end", "def line_and_column\n if @__source_position__\n return @__source_position__.line, @__source_position__.column\n end\n end", "def ln_col_from_offset(newlines_array, offset)\n preceding_newlines = newlines_array\n .select { |newline| newline <= offset }\n ln = preceding_newlines.length + 1\n line_start_offset = preceding_newlines.last || -1\n col = offset - line_start_offset\n [ln, col]\n end", "def byte_offset(loc) loc.y * line_byte_size + loc.x * pixel_byte_size; end", "def position\n\t\t[@line, @column]\n\tend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Takes an array of Strings and Regexp and generates a new Regexp that matches the or ("|") of all strings and Regexp
def array_to_or_regexp_string(array) array = symbols_to_strings array.flatten array = sort_operator_patterns array array = regexp_and_strings_to_regexpstrings array array.collect {|op| "(#{op})"}.join('|') end
[ "def |(re)\n self.or(re)\n end", "def or(matcher); end", "def build_regex(fields)\n fields_or = fields.map { |field| \"#{field}(\\\\[\\\\])?\" }.join('|')\n\n Regexp.new(\"^#{fields_or}$\")\n end", "def build_string_regex(elements)\n\n starting_with = \"\"\n elements.each_with_index do |el, i|\n starting_with += el.starts_with.to_s\n if i < elements.size - 1\n starting_with += \"|\"\n end\n end\n\n string_regex = /\\A[^\\s]+?(?=(#{starting_with}))/\n end", "def bots_to_regexp\n Regexp.union(bots)\n end", "def array_to_regexp(arr)\n arr = arr.map { |x| \"(.{\" + x + \"}?) \" }\n arr.push('/').unshift('/')\n structure_reg = to_regexp(arr.join(\"\"))\n end", "def build_regexp(list)\n r = []\n list.each do |itm|\n r << \"#{ASFSVN.source}\\['#{itm}']\"\n end\n return Regexp.union(r)\nend", "def logicalOR(array, true_value, false_value)\n\t\tfor a in array\n\t\t\tif a == true_value # if value is true input value\n\t\t\t\treturn true_value\n\t\t\tend\n\t\tend\n\t\treturn false_value\n\tend", "def |( other )\n\t\treturn Regexp.new( \"(?:%s|%s)\" % [self.to_s, other.to_s] )\n\tend", "def split_by_or_token(tokens)\n expressions = [[]]\n tokens.each do |token|\n if token.or?\n expressions << []\n else\n expressions.last << token\n end\n end\n expressions.select { |e| e.size > 0 }\n end", "def array_or_one_literal_re(el = literal_re)\n /#{el} |\n \\[\\s* (?:#{el}\\s*,\\s*)* #{el} \\s*\\]/x\n end", "def array_or(a,b)\n c = []\n for i in 0...a.length\n if a[i] or b[i]\n c[i] = true\n else\n c[i] = false\n end\n end\n c\nend", "def |(matcher)\n OrMatcher.new([self,matcher])\n end", "def joinor(arr, delimiter = ', ', conjunction = 'or')\n array_of_strings = []\n if arr.size == 1\n arr[0].to_s\n elsif arr.size == 2\n arr.join(\" #{conjunction} \")\n else\n array_of_strings << arr[0..-3].join(\"#{delimiter}\")\n array_of_strings << arr[-2..-1].join(\"#{delimiter}#{conjunction} \")\n array_of_strings.join(\"#{delimiter}\")\n end\nend", "def regexify(words, ignore:[])\n if block_given?\n return Regexp.union(words.reject { |x| ignore.include?(x) || yield(x) })\n else\n return Regexp.union(words.reject { |x| ignore.include?(x) })\n end\n end", "def pattern_escape *patterns\n escaped_patterns = []\n patterns.each do |pattern|\n if (pattern.is_a?(String))\n pattern = Regexp.new(Regexp.escape(pattern))\n elsif (! pattern.is_a?(Regexp))\n raise \"Don't know how to match on a #{pattern.class}\"\n end\n escaped_patterns.push(pattern)\n end\n escaped_patterns\n end", "def create_reg_exp(sport)\n sport_regexp = \"\"\n sport_chars = sport.split \"\"\n sport_chars.each_index do |i|\n # regular expression contains substrings of various lengths\n sport_regexp += \"|\" + \"#{sport_chars[i]}#{sport_chars[i + 1]}#{sport_chars[i + 2]}#{sport_chars[i+3]}\" if i < sport.length - 3\n sport_regexp += \"|\" + \"#{sport_chars[i]}#{sport_chars[i + 1]}#{sport_chars[i + 2]}\" if i < sport.length - 2\n sport_regexp += \"|\" + \"#{sport_chars[i]}#{sport_chars[i + 1]}\" if i < sport.length - 1\n sport_regexp += \"|\" + \"#{sport_chars[i]}\" if i < sport.length\n end\n Regexp.new sport_regexp[1, sport_regexp.length]\nend", "def test_extended_patterns_no_flags\n [\n [ \".*\", \"abcd\\nefg\", \"abcd\" ],\n [ \"^a.\", \"abcd\\naefg\", \"ab\" ],\n [ \"^a.\", \"bacd\\naefg\", \"ae\" ],\n [ \".$\", \"bacd\\naefg\", \"d\" ]\n ].each do |reg, str, result|\n m = RustRegexp.new(reg).match(str)\n puts m.inspect\n unless m.nil?\n assert_equal result, m[0]\n end\n end\n end", "def buildStopwordRegExPattern words\n\t\tpattern = Array.new\n\t\twords.each do |word|\n\t\t\tpattern.push '\\\\b'+word+'\\\\b'\n\t\tend\n\t\treturn Regexp.new(pattern.join(\"|\"), Regexp::IGNORECASE)\n\tend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
generates a tree of nodes of the specified node_class The nodes have access to the following useful methods: self.left > return the left operand parsetreenode self.right > return the right operand parsetreenode self.operator_node > return the operator parsetreenode self.operator > return the operator as a ruby symbol
def generate_tree(operands, operators, parent_node) return operands[0] if operands.length==1 i = index_of_lowest_precedence(operators) operator = operators[i] new_operand = node_class.new(parent_node) new_operand.add_match generate_tree(operands[0..i], operators[0..i-1],new_operand), :left new_operand.add_match operators[i], :operator_node new_operand.add_match generate_tree(operands[i+1..-1], operators[i+1..-1],new_operand), :right new_operand end
[ "def generate_tree\n root =\tTreeNode.new(3)\n root.left =\tTreeNode.new(9)\n right = \t\tTreeNode.new(20)\n right.left = \tTreeNode.new(15)\n right.right = TreeNode.new(7)\n root.right = \tright\n root\nend", "def operators(node) #only want to print nodes that are operators (have children)\n return if node == nil\n operators(node.left)\n if node.left != nil || node.right != nil\n print node.value + \" \"\n end\n operators(node.right)\n end", "def get_tree\n n8 = Node.new(8)\n n9 = Node.new(9, n8, nil)\n n4 = Node.new(4)\n n7 = Node.new(7, n4, n9)\n n20 = Node.new(20)\n n11 = Node.new(11)\n n15 = Node.new(15, n11, n20)\n n10 = Node.new(10, n7, n15)\n end", "def terminalnode_class\n PTree::TerminalNode\n end", "def to_tree\n Node2.new(@val,@left,@right)\n end", "def build_node(*args)\n node_class.new(*args)\n end", "def build_node(*args)\n node_for(args[1]) || node_class.new(*args)\n end", "def create_tree(aRootNode)\n return Rley::PTree::ParseTree.new(aRootNode)\n end", "def create_node_class\n class_name = \"#{parser.module_name}_#{name}_node\".camelize\n parser.const_set class_name, Class.new(RuleNode)\n end", "def latex_qtree\n \"\\\\Tree \" + to_s(delim='[]', nonterm_prefix='.')\n end", "def gen\n expected_symbol = gen_expected_symbol\n fix_method_name = gen_fix_method_name\n\n # Skip node class generation if defined explicitly\n unless @node_class\n aliases = @aliases\n class_target = @class_target\n expected_len = @expected_len\n named = @named\n node_class_name = gen_node_class_name\n if const_defined? node_class_name\n # Add to existing node class\n cls = const_get node_class_name\n else\n # Completely new node class\n cls = Object.const_set node_class_name, Class.new(Node)\n end\n\n cls.class_eval do\n\n # Constructor\n define_method :initialize do\n |ruby_node, children|\n super()\n @ruby_node = ruby_node\n expect_len(expected_len, children) if expected_len\n if respond_to? :transform_children\n children = transform_children children\n end\n assign_children children\n end\n\n # For named methods add name to to_s()\n if named\n define_method :to_s do\n \"#{node_class_name}(#{name})\"\n end\n else\n define_method :to_s do\n node_class_name\n end\n end\n\n # Getters and setters for aliases\n aliases.each do\n |a|\n self.send :define_method, a.new_name do\n self.send a.old_name\n end\n self.send :define_method, (a.new_name.to_s + '=') do\n |value|\n self.send a.old_name.to_s + '=', value\n end\n end\n\n if class_target\n define_method :cls do\n self\n end\n end\n\n if named\n define_method :name do\n child.load_name\n end\n define_method :load_name do\n child.load_name\n end\n end\n end\n end\n\n node_class = gen_node_class\n\n # Generate the UnprocessedNode#fix_xyz()\n UnprocessedNode.send :define_method, fix_method_name do\n return self unless symbol? expected_symbol.to_s\n return node_class.new @ruby_node, children\n end\n end", "def make_node(type, *args)\n elem = type.new self, *args\n @nodes << elem\n self.core_node ||= elem.id\n elem.expand\n elem\n end", "def initialize(inputs,operators)\n self.root = Node.new\n self.root.build(inputs,operators)\n self.answer = nil\n end", "def instantiate_node(node_type,*args)\n if node_type.respond_to? :new \n node_type.new(*args)\n else\n SyntaxNode.new(*args).extend(node_type)\n end\n end", "def to_treenode\n tm_assert{ @tag }\n node = TreeNode.new( @tag, nil, @xml_attrs )\n node.__add_children__( @configure.__children__ )\n node\n end", "def build_tree(data_set=data)\n\t\tnode_params = get_optimal_params(data_set, entropy(data_set))\n\t\t\n\t\tif node_params[:best_gain] > 0\n\t\t\tcreate_branch_node(node_params)\n\t\telse\n\t\t\tcreate_leaf_node(data_set)\n\t\tend\n\n\tend", "def value\n case @token_type\n when :num \n @value\n when :plus\n @left.value + @right.value\n when :minus\n @left.value - @right.value\n when :mult\n @left.value * @right.value\n when :div\n @left.value / @right.value\n when :mod\n @left.value % @right.value\n when :neg\n @left.value * -1\n when :pow\n @left.value ** @right.value\n when :fact\n fact(@left.value.floor)\n else\n raise SyntaxError.new(\"Undefined operator in tree: #{self}\")\n end\n end", "def tree(data_path, options = {})\n\t\t\t\tresult = \"\"\n\n\t\t\t\t# Unique hash\n\t\t\t\t@hash = Digest::SHA1.hexdigest(data_path.to_s)\n\n\t\t\t\t# Options\n\t\t\t\t@options = options.nil? ? {} : options\n\n\t\t\t\t# Clipboard\n\t\t\t\tif @options[:clipboard_attrs]\n\t\t\t\t\tclipboard = true\n\t\t\t\t\t@options[:clipboard_attrs] = [@options[:clipboard_attrs]] if !@options[:clipboard_attrs].is_a?(Array)\n\t\t\t\t\tclipboard_attrs_js = \"[\" + @options[:clipboard_attrs].map { |item| \"'#{item}'\" }.join(\",\") + \"]\"\n\t\t\t\telse\n\t\t\t\t\tclipboard = false\n\t\t\t\t\tclipboard_attrs_js = \"[]\"\n\t\t\t\tend\n\n\t\t\t\t# Actions\n\t\t\t\tif @options[:actions]\n\t\t\t\t\tactions_js = \"[\"\n\t\t\t\t\toptions[:actions].each do |key, action|\n\t\t\t\t\t\tactions_js += %{\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\turl: '#{action[:path] ? @path_resolver.resolve(action[:path], \":id\") : \"\"}',\n\t\t\t\t\t\t\t\ticon: '#{action[:icon]}',\n\t\t\t\t\t\t\t\tlabel: '#{action[:label]}',\n\t\t\t\t\t\t\t\tcollapsed: #{action[:collapsed] == true ? \"true\" : \"false\"},\n\t\t\t\t\t\t\t\tstyle: '#{action[:style] ? action[:style] : \"default\"}',\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t}\n\t\t\t\t\tend\n\t\t\t\t\tactions_js += \"]\"\n\t\t\t\telse\n\t\t\t\t\tactions_js = \"[]\"\n\t\t\t\tend\n\n\t\t\t\t# Parent\n\t\t\t\tparent = (options[:parent] ? options[:parent] : nil)\n\n\t\t\t\t# Save state\n\t\t\t\tsave_state = (options[:save_state] ? options[:save_state] : :simple)\n\n\t\t\t\t# Application JS\n\t\t\t\tresult += @template.javascript_tag(%{\n\t\t\t\t\tvar rug_tree_#{@hash} = null;\n\t\t\t\t\t$(document).ready(function() {\n\t\t\t\t\t\trug_tree_#{@hash} = new RugTree('#{@hash}', {\n\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t// Model\n\t\t\t\t\t\t\tmodel: 'node',\n\n\t\t\t\t\t\t\t// State\n\t\t\t\t\t\t\tsaveState: '#{save_state.to_s}',\n\n\t\t\t\t\t\t\t// Parent element\n\t\t\t\t\t\t\tparent: '#{parent.to_s}',\n\n\t\t\t\t\t\t\t// Icons\n\t\t\t\t\t\t\tclosedIcon: '#{@icon_builder.render(@options[:closed_icon] ? @options[:closed_icon] : \"chevron-right\")}',\n\t\t\t\t\t\t\topenedIcon: '#{@icon_builder.render(@options[:opened_icon] ? @options[:opened_icon] : \"chevron-down\")}',\n\n\t\t\t\t\t\t\t// Show\n\t\t\t\t\t\t\tshow: #{check_show(@options) ? 'true' : 'false'},\n\t\t\t\t\t\t\tshowEvent: '#{@options[:show_event] && @options[:show_event].to_sym == :double_click ? \"dblclick\" : \"click\"}',\n\t\t\t\t\t\t\tshowUrl: '#{@path_resolver.resolve(@options[:paths][:show], \":id\")}',\n\n\t\t\t\t\t\t\t// Create\n\t\t\t\t\t\t\tcreate: #{check_create(@options) ? 'true' : 'false'}, \n\t\t\t\t\t\t\tcreateUrl: '#{@path_resolver.resolve(@options[:paths][:create])}',\n\t\t\t\t\t\t\tcreateIcon: '#{@icon_builder.render(@options[:update_icon] ? @options[:update_icon] : \"plus\")}',\n\t\t\t\t\t\t\tcreateLabel: '#{I18n.t(\"general.action.create_child\").upcase_first}',\n\t\t\t\t\t\t\tcreateActionCollapsed: #{@options[:create_action_collapsed] == true ? 'true' : 'false'}, \n\t\t\t\t\t\t\tcreateSuccessMessage: '#{I18n.t(\"general.messages.create.success\")}',\n\n\t\t\t\t\t\t\t// Update\n\t\t\t\t\t\t\tupdate: #{check_update(@options) ? 'true' : 'false'}, \n\t\t\t\t\t\t\tupdateUrl: '#{@path_resolver.resolve(@options[:paths][:update], \":id\")}', \n\t\t\t\t\t\t\tupdateIcon: '#{@icon_builder.render(@options[:update_icon] ? @options[:update_icon] : \"pencil\")}',\n\t\t\t\t\t\t\tupdateLabel: '#{I18n.t(\"general.action.update\").upcase_first}',\n\t\t\t\t\t\t\tupdateActionCollapsed: #{@options[:update_action_collapsed] == true ? 'true' : 'false'}, \n\t\t\t\t\t\t\tupdateSuccessMessage: '#{I18n.t(\"general.messages.create.success\")}',\n\n\t\t\t\t\t\t\t// Destroy\n\t\t\t\t\t\t\tdestroy: #{check_destroy(@options) ? 'true' : 'false'}, \n\t\t\t\t\t\t\tdestroyUrl: '#{@path_resolver.resolve(@options[:paths][:destroy], \":id\")}', \n\t\t\t\t\t\t\tdestroyIcon: '#{@icon_builder.render(@options[:update_icon] ? @options[:update_icon] : \"trash\")}',\n\t\t\t\t\t\t\tdestroyLabel: '#{I18n.t(\"general.action.destroy\").upcase_first}',\n\t\t\t\t\t\t\tdestroyActionCollapsed: #{@options[:destroy_action_collapsed] == true ? 'true' : 'false'}, \n\t\t\t\t\t\t\tdestroyConfirmMessage: '#{I18n.t(\"general.are_you_sure\")}',\n\t\t\t\t\t\t\tdestroySuccessMessage: '#{I18n.t(\"general.messages.destroy.success\")}',\n\n\t\t\t\t\t\t\t// Moving\n\t\t\t\t\t\t\tmoving: #{check_moving(@options) ? 'true' : 'false'},\n\t\t\t\t\t\t\tmovingUrl: '#{@path_resolver.resolve(@options[:paths][:move], \":id\", \":relation\", \":destination_id\")}',\n\t\t\t\t\t\t\n\t\t\t\t\t\t\t// Type\n\t\t\t\t\t\t\ttypeIconTemplate: '#{@icon_builder.render(\":icon\", class: \"jqtree-icon\")}',\n\t\t\t\t\t\t\ttypeIconAttr: '#{@options[:type_icon_attr]}',\n\n\t\t\t\t\t\t\t// Actions\n\t\t\t\t\t\t\tactions: #{actions_js},\n\t\t\t\t\t\t\tactionsIconTemplate: '#{@icon_builder.render(\":icon\")}',\n\n\t\t\t\t\t\t\t// Clipboard\n\t\t\t\t\t\t\tclipboard: #{clipboard ? 'true' : 'false'},\n\t\t\t\t\t\t\tclipboardIcon: '#{@icon_builder.render(@options[:clipboard_icon] ? @options[:clipboard_icon] : \"clipboard\")}',\n\t\t\t\t\t\t\tclipboardTemplate: \"#{clipboard ? (@options[:clipboard_template] ? @options[:clipboard_template].gsub('\"', \"'\") : \":\" + @options[:clipboard_attrs].first) : \"\"}\",\n\t\t\t\t\t\t\tclipboardAttrs: #{clipboard_attrs_js},\n\t\t\t\t\t\t\tclipboardActionCollapsed: #{@options[:clipboard_action_collapsed] == true ? 'true' : 'false'}, \n\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t// Select\n\t\t\t\t\t\t\tselectByDefault: #{@options[:select_by_default] ? @options[:select_by_default].to_i : \"null\"},\n\n\t\t\t\t\t\t\t// Reload\n\t\t\t\t\t\t\treloadIcon: '#{@icon_builder.render(@options[:update_icon] ? @options[:update_icon] : \"refresh\")}',\n\t\t\t\t\t\t\treloadLabel: '#{I18n.t(\"general.action.reload\").upcase_first}',\n\t\t\t\t\t\t});\n\t\t\t\t\t\trug_tree_#{@hash}.ready();\n\t\t\t\t\t});\n\t\t\t\t\t$(document).on('turbolinks:load', function() {\n\t\t\t\t\t\trug_tree_#{@hash}.repair();\n\t\t\t\t\t});\n\t\t\t\t})\n\n\t\t\t\tresult += %{\n\t\t\t\t\t<div id=\"tree-#{@hash}\" data-url=\"#{data_path.to_s}\"></div>\n\t\t\t\t}\n\n\t\t\t\treturn result.html_safe\n\t\t\tend", "def new(*args)\n parsed_node = Treetop::Runtime::SyntaxNode.new(*args)\n\n node_name, node_class = @node_name, @node_class # local scope for the block below\n\n # so the node knows how to build itself:\n parsed_node.meta_def :build do\n node_class.new(self)\n end\n\n # so the node can be filtered based on what kind of AST node it will build\n parsed_node.meta_def :node_to_build do\n node_name\n end\n\n parsed_node\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
check getting checksums of a stream
def test_get_checksum_stream basechecksum = Cfruby::Checksum::Checksum.get_checksums(@tempfile.path) File.open(@tempfile.path) { |fp| streamchecksum = Cfruby::Checksum::Checksum.get_checksums(fp) assert_equal(basechecksum, streamchecksum) } end
[ "def checksum; end", "def verify_checksum!\n log.info(log_key) { \"Verifying checksum\" }\n\n expected = checksum\n actual = digest(downloaded_file, digest_type)\n\n if expected != actual\n raise ChecksumMismatch.new(self, expected, actual)\n end\n end", "def checksum\n trailer[:checksum]\n end", "def checksum\n end", "def validate_checksums\n outcome = true\n results = {}\n self.datastreams.each do |dsid, ds|\n next if ds.profile.empty?\n outcome &&= ds.dsChecksumValid\n results[dsid] = ds.profile\n end\n [outcome, results]\n end", "def validate_checksum\n raise Puppet::Error.new \"Inconsistent checksums. Checksum of fetched file is #{calculated_checksum}. You specified #{specified_checksum}\" if calculated_checksum != specified_checksum\n end", "def checksum\n fil_header[:checksum]\n end", "def corrupt?\n checksum != calculate_checksum\n end", "def checksum_invalid?\n !checksum_valid?\n end", "def checksum\n @checksum ||= calculate_checksum\n end", "def checksum\n calc_checksum = 0\n byte_bit_nbr = 0\n @fuse_data.each { |bit|\n calc_checksum += 2**byte_bit_nbr if bit == 1\n byte_bit_nbr += 1\n byte_bit_nbr = 0 if byte_bit_nbr == 8\n }\n calc_checksum = calc_checksum % 2**16\n calc_checksum\n end", "def validate_checksums\n [:header_cksum, :puzzle_cksum, :icheated_cksum].each do |cksum|\n unless send(cksum) == headers[cksum]\n raise InvalidChecksumError.new(\"#{cksum.to_s.sub(/_/, ' ')} is invalid\")\n end\n end\n end", "def checksum?# {{{\n self.csum == compute_checksum\n end", "def checksum\n\t\t@checksum ||= FileManager.checksum(@path)\n #\t\tif file?\n #\t\t\treturn FileManager.checksum(@path)\n #\t\tend\n end", "def checksum( io )\n\t\tdigest = Digest::SHA256.new\n\t\tbuf = String.new\n\n\t\twhile io.read( CHUNK_SIZE, buf )\n\t\t\tdigest.update( buf )\n\t\tend\n\n\t\tio.rewind\n\t\treturn digest.hexdigest\n\tend", "def checksum\n Digest::SHA1.hexdigest(@raw_source)\n end", "def checksum\n Digest::SHA256.digest payload\n end", "def valid_checksums?\n \n p = package_path\n files.each do |f|\n file_path = File.join(p, f[:path])\n \n if File.exist?(file_path)\n digest = Digest::SHA1.hexdigest(File.read(file_path))\n errors.add :checksum_validity, \"Digest for #{file_path} in AIP does \" +\n \"not match\" unless digest == f[:sha_1]\n end \n \n end\n \n errors.on(:checksum_validity).nil?\n \n end", "def verify_crc!(type, content, found_crc); end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
checks the permission, existence, and ownership controls on the database file
def test_database_checks() currentmode = File.stat(@databasefile).mode Cfruby::FileOps.chmod(@databasefile, 0700) assert_not_equal(currentmode, File.stat(@databasefile).mode) assert_raise(Cfruby::Checksum::ChecksumPermissionError) { @checksum.update_all } Cfruby::FileOps.chmod(@databasefile, 0600) if(Process.euid() == 0) assert_raise(Cfruby::Checksum::ChecksumOwnershipError) { Cfruby::FileOps.chown(@databasefile, 200, 200) @checksum.update_all } end end
[ "def check_mode()\n\t\t\t\tCfruby.controller.inform('debug', \"checking checksum database permisions for \\\"#{@databasefile}\\\"\")\n\n\t\t\t\t#if it does exist test the mode. Raise an exception if it fails the test or return true\n\t\t\t\tstat = File.stat(@databasefile)\n\t\t\t\t\n\t\t\t\tif(stat.mode != @statmode)\n\t\t\t\t\traise(ChecksumPermissionError, \"Checksum database \\\"#{@databasefile}\\\" does not have permissions of \\\"#{@mode.to_s(8)}\\\"\")\n\t\t\t\tend\n\t\t\t\t\n\t\t\t\tif(stat.uid != @user or stat.gid != @group)\n\t\t\t\t\traise(ChecksumOwnershipError, \"Checksum database \\\"#{@databasefile}\\\" is not owned by uid #{@user} and group #{@group}\")\n\t\t\t\tend\n\t\t\t\t\n\t\t\t\treturn(true)\n\t\t\tend", "def validate_database\n db = @database_path\n raise \"bad mode of #{db}\" unless File.world_readable?(db).nil?\n end", "def checkpermissions()\n unless File.stat('/etc/hosts').writable?\n # We found it, log error and exit successfully\n @log.error('Can not write to /etc, missing required permissions.')\n abort('Can not write to /etc, are you root?')\n end\n end", "def check_file_owner(options,file_name,uid)\n test_uid = File.stat(file_name).uid\n if test_uid != uid.to_i\n message = \"Information:\\tChanging ownership of \"+file_name+\" to \"+uid.to_s\n if file_name.to_s.match(/^\\/etc/)\n command = \"sudo chown #{uid.to_s} #{file_name}\"\n else\n command = \"chown #{uid.to_s} #{file_name}\"\n end\n execute_command(options,message,command)\n end\n return\nend", "def create_access_check\n permission_check('create')\n end", "def validate_permissions\n return if Gem.win_platform?\n\n @specification.files.each do |file|\n next unless File.file?(file)\n next if File.stat(file).mode & 0444 == 0444\n warning \"#{file} is not world-readable\"\n end\n\n @specification.executables.each do |name|\n exec = File.join @specification.bindir, name\n next unless File.file?(exec)\n next if File.stat(exec).executable?\n warning \"#{exec} is not executable\"\n end\n end", "def existing_file\n if sqlite3 && database && !File.exist?(database)\n self.errors.add(:database, :not_exist)\n end\n end", "def compare_ownership(file, uid, gid)\n if File.exist?(file)\n st = File.lstat(file)\n if st.uid == uid && st.gid == gid\n return true\n end\n end\n false\n end", "def permissionsExist(file)\n\n fAccess = File.absolute_path(file)+\".access\"\n return !File.exist?(fAccess)\n end", "def checkFileAccess(userName, fToAccess, fAccess)\n\n #return false if fAccess.end_with?(\".access\")\n if File.exist?(fAccess) && userExists(userName)\n\n fileProps = YAML.load_file(fAccess)\n fileUsers = fileProps[\"users\"]\n fileGroups = fileProps[\"groups\"]\n if fileGroups.index(\";guests;\") != nil\n return true\n end\n\n if userName == nil\n return false\n end\n\n user = loadUser(userName)\n groups = Array.new\n groupFound = true\n groupAt = 0\n while groupFound\n\n if user.key?(\"group#{groupAt}\")\n groupTmp = user[\"group#{groupAt}\"]\n groups << groupTmp\n groupAt = groupAt.next\n else\n groupFound = false\n end\n end\n\n if(fileUsers.index(\";#{userName};\") != nil || fileUsers.index(\";guest;\") != nil)\n return true\n end\n if fileGroups.index(\";guests;\") != nil\n return true\n end\n for i in 0..groups.size\n\n if(fileGroups.index(\";#{groups[i]};\") != nil)\n return true\n end\n end\n return false\n end\n return false\n end", "def validate_directory_structure\n return false unless File.exists? @root\n Find.find(@root) do |path|\n stat = File::Stat.new path\n return false unless stat.uid == OWNER and stat.gid == GROUP\n if File.directory? path\n return false unless stat.mode == DIRMODE\n else\n return false unless stat.mode == FILEMODE\n end\n end\n true\n end", "def check_directory_perms\n access = get_acl_access_data\n \n files_modified = `git diff-index --cached --name-only HEAD`\n files_modified.select do |path|\n has_file_access = false\n next if path.size == 0\n access[$user].each do |access_path|\n has_file_access = true if !access_path || path.include?(access_path)\n end\n if !has_file_access\n puts \"[POLICY] You do not have access to push to #{path}\"\n exit 1\n end\n end\nend", "def svn_auth_file_checks\n if !@repos_admin # if we are not admin, check if files exist\n if !File.file?(@repos_auth_file)\n raise FileDoesNotExist.new(\"'#{@repos_auth_file}' not a file or not existent\")\n end\n if !File.readable?(@repos_auth_file)\n raise \"File '#{@repos_auth_file}' not readable\"\n end\n end\n return true\n end", "def verify_file_security(path)\n # Not requiring security? File doesn't exist? Then everything is fine...\n return unless (File.exists?(path) && @secure)\n \n # Root can read all files, useful for backups and of course no security lost\n return if Process.uid == 0\n \n # Check file ownership\n stat = File::Stat.new(path)\n raise RuntimeError.new(\"Cannot load settings file #{path} - file must be owned by the user this program is running as (UID #{Process.uid})\") unless stat.owned?\n raise RuntimeError.new(\"Cannot load settings file #{path} - file cannot be world-writable\") if stat.world_writable?\n end", "def integrity_check\n execute( \"PRAGMA integrity_check\" ) do |row|\n raise Exception, row[0] if row[0] != \"ok\"\n end\n end", "def integrity_check\n execute( \"PRAGMA integrity_check\" ) do |row|\n raise DatabaseException, row[0] if row[0] != \"ok\"\n end\n end", "def integrity_check\n execute( \"PRAGMA integrity_check\" ) do |row|\n raise Exceptions::DatabaseException, row[0] if row[0] != \"ok\"\n end\n end", "def check_executable_by_user(file,user)\n file_info = ::File.stat(file)\n perms = get_hash_permissions(file,file_info)\n return if user == \"root\" and perms.values.include?(true)\n #check if the user owns the file\n file_user = Etc.getpwuid(file_info.uid).name\n return if perms[:user] and user == file_user\n #check if the user is in the proper group\n file_group = Etc.getgrgid(file_info.gid)\n return if perms[:group] and file_group.mem.include?(user)\n #check if anyone can execute\n return if perms[:other]\n #otherwise, we fail\n false.should be_true\n end", "def has_access_to_file(file_id, user_id)\n if get_all_user_data(user_id).first[\"rank\"] >= 1\n return true\n end\n \n owner_id = $db.execute(\"SELECT owner_id FROM files WHERE file_id = ?\", file_id).first[\"owner_id\"]\n if owner_id == user_id\n return true\n end\n\n shared_users = $db.execute(\"SELECT user_id FROM shared_files WHERE file_id = ?\", file_id)\n shared_users.each do |user|\n if user[\"user_id\"] == user_id\n return true\n end\n end\n return false\nend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Runs before the build begins
def prebuild(build, listener) # do any setup that needs to be done before this build runs. end
[ "def preRun\n end", "def prebuild(build, listener)\n # do any setup that needs to be done before this build runs.\n end", "def execute_pre_build(software); end", "def before_setup\n # do nothing by default\n end", "def pre_install; end", "def pre_task\n end", "def before_run(ant)\n end", "def preReady()\n\t\t\t#does nothing. extend in subclasses\n\t\tend", "def install_pre_hook\n end", "def run_pre_assembly\n log \"\\nstarting run_pre_assembly(#{info_for_log})\"\n pre_assemble_objects\n log \"\\nfinishing run_pre_assembly(#{info_for_log})\"\n end", "def before(*args)\n require_libs\n require_ing_file\n end", "def begin_build\n build_stack.push(true)\n end", "def pre_execute(&block)\n @hooks[:pre_execute] = block\n end", "def run_before_hooks\n run_hooks(self.class.before_hooks)\n end", "def run_before_scripts\n return if @config['before_script'].empty?\n\n puts 'INFO: Running pre test scripts'\n @config['before_script'].each do |cmd|\n run cmd\n end\n end", "def post_init\n end", "def before_setup(&block)\n pre_setup_actions.unshift block\n end", "def start\n unless @build_config.skip?(:start)\n @env.start\n end\n end", "def setup\n setup! unless setup?\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Generate a password if no password define. This password if generate by rangexp. A gem require by dmsweatshop. If password no send after by mail, the password is not getting another time.
def generate_password if new_record? self.password = self.password_confirmation = /\w{0,10}/.gen unless password end end
[ "def generate_password\n if self.password.blank?\n pass = SecureRandom.urlsafe_base64\n self.password = pass\n self.password_confirmation = pass\n end\n end", "def generate_password( length_of_pass, special_char )\n chars = []\n (\"a\"..\"z\").each {|ele| chars << ele}\n (\"A\"..\"Z\").each {|ele| chars << ele}\n (\"0\"..\"9\").each {|ele| chars << ele}\n if(special_char)\n [\"@\", \"!\", \"_\",].each {|ele| chars << ele}\n end\n newpass = \"\"\n 1.upto(length_of_pass) { |i| newpass << chars[rand(chars.size-1)] }\n #self.password\n self.unencrypted_password = newpass\n self.password_confirmation = newpass\n @unencrypted = true\n end", "def generate_password\n self.password = SecureRandom.urlsafe_base64(10)\n self.password_confirmation = self.password\n end", "def auto_generate_password\n if password.nil?\n rand_password = rand(10 ** 8).to_s\n update(password: rand_password, password_confirmation: rand_password)\n end\n end", "def generate_and_send_new_password\n new_password = \"\"\n chars = \"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890!#,.;:?%&\".split(//)\n (0..8).each do |i|\n new_password += chars.at(rand(chars.length))\n end\n self.update_attributes(:password => new_password)\n save!\n AdminMailer.password_reset(self, new_password).deliver\n new_password\n end", "def generate_password_reminder\n create_password_reminder!(:token=>SecureRandom.hex(32)) unless password_reminder\n end", "def auto_generate_password\n if password.nil?\n rand_password = rand(10 ** 8)\n update(password: rand_password, password_confirmation: rand_password)\n end\n end", "def generate_password(**args)\n request_get('/utils/pwgen', **args)\n end", "def generate_password( random_chars = false, num = 16 )\n password = ''\n \n # Should it be generated with random characters\n if ( random_chars )\n # The characters to use\n cs = [*'0'..'9', *'a'..'z', *'A'..'Z','_','-','!','.','$']\n # Randomize\n password = num.times.map { cs.sample }.join\n # Otherwise use the \"subroutine\" genpass to generate a password with random words\n else\n # Call the \"genpass subroutine\"\n password = `generate_password`.downcase\n end\n \n return password\n end", "def auto_generate_password\n case @generate_option\n when :never_generate\n return nil\n when :generate_no_query\n # Normally, :generate_no_query goes hand-in-hand with skipping\n # the query. However, if the Item's value was pre-assigned\n # and invalid, @skip_query will be set to false. This is so we\n # give the user an opportunity to fix the problem via a query.\n if @skip_query\n return Simp::Cli::Utils.generate_password\n else\n auto_default = 'yes'\n end\n when :generate_as_default\n auto_default = 'yes'\n when :no_generate_as_default\n auto_default = 'no'\n end\n\n if @minimize_queries\n # skip the 'Auto-generate the password?' query\n if auto_default == 'no'\n # assume auto-generation is not appropriate\n return nil\n else\n # assume auto-generation is appropriate\n password = generate_and_print_password\n end\n else\n password = nil\n @password_name = @key if @password_name.nil? or @password_name.empty?\n if agree( \"Auto-generate the #{@password_name} password? \" ){ |q| q.default = auto_default }\n password = generate_and_print_password\n end\n end\n password\n end", "def generate_password(length = nil)\n if length.nil?\n SecureRandom.hex(6) # result length = 12\n else\n chars = (('a'..'z').to_a + ('0'..'9').to_a) - %w(i o 0 1 l 0)\n (1..length).collect{|a| chars[rand(chars.length)] }.join\n end\n end", "def generatePassword\n ('a'..'z').to_a.shuffle[0,8].join\n end", "def generate_and_print_password\n password = Simp::Cli::Utils.generate_password\n logger.say ('~'*80).green + \"\\n\"\n logger.say 'NOTE: '.green.bold + \" The generated password is: \\n\\n\"\n logger.say ' ' + password.yellow.bold + \"\\n\\n\"\n logger.say ' >>>> Please remember this password! <<<<'.bold\n logger.say ' It will ' + '**NOT**'.bold + ' be written to the log or hieradata.'\n logger.say ('~'*80).green + \"\\n\"\n logger.say '*** Press enter to continue ***'.cyan.bold.blink\n ask ''\n password\n end", "def generate_password\n pass = \"\"\n 20.times { pass += (rand(74) + 48).chr }\n\n begin\n Puppet.settings.write(:capass) { |f| f.print pass }\n rescue Errno::EACCES => detail\n raise Puppet::Error, \"Could not write CA password: %s\" % detail.to_s\n end\n\n @password = pass\n\n return pass\n end", "def generate_password\n pass = \"\"\n 20.times { pass += (rand(74) + 48).chr }\n\n begin\n Puppet.settings.write(:capass) { |f| f.print pass }\n rescue Errno::EACCES => detail\n raise Puppet::Error, \"Could not write CA password: #{detail}\"\n end\n\n @password = pass\n\n pass\n end", "def check_or_set_password\n\n if self.password.blank?\n self.password =\n ::Milia::Password.generate(\n 8, Password::ONE_DIGIT | Password::ONE_CASE\n )\n\n self.password_confirmation = self.password\n else\n # if a password is being supplied, then ok to skip\n # setting up a password upon confirm\n self.skip_confirm_change_password = true if ::Milia.use_invite_member\n end\n\n end", "def standard_password\n \"!@Cd5678\"\n end", "def generate_password_hash\n Base64.strict_encode64(Digest::SHA1.hexdigest(Digest::MD5.hexdigest(BirjaKreditov.password) + @nonce + @time))\n end", "def generate_password\n ['test', 'guest'].sample\nend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
GET /trip_routes GET /trip_routes.json
def index @trip_routes = TripRoute.all end
[ "def route_info\n routes = RestClient.get('http://svc.metrotransit.org/NexTrip/Routes?format=json')\n JSON.parse(routes)\nend", "def index\n @driver_trip_routes = DriverTripRoute.all\n end", "def index\n @truck_routes = TruckRoute.all\n render json: @truck_routes\n end", "def routes\n routes = get('/gtfs/routes')\n routes.map do |route|\n Route.from_json(route)\n end\n end", "def trips\n get '/gtfs/trips'\n end", "def trips_by_route_id(route_id)\n get \"/gtfs/trips/routeid/#{route_id}\"\n end", "def index\n @touring_routes = TouringRoute.all\n end", "def list\n parsed = get_request '/api/route.aspx', query: { cmd: :routes }\n parsed['root']['routes']['route'].map { |route| Route.new route }\n end", "def get_route\n data = {\n visits: visits,\n fleet: fleet\n }\n\n data[:options] = options if options\n result = Util.send_request(\"POST\", \"vrp\", Routific.token, data)\n RoutificApi::Route.parse(result)\n end", "def get_routes\n Route.fetch(filter: { type: SUBWAY_TYPES })\n end", "def all\n get('routes')\n end", "def index\n\n begin\n @pathways = Pathway.order(:name).to_a\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @pathways }\n end\n rescue\n handle_unspecified_error\n end\n\n end", "def routes_for_agency(id)\n response = request \"routes-for-agency/#{id}\"\n Route.collect response.dig('data', 'list')\n end", "def index\n trips = Trip.all\n respond_with trips\n end", "def index\n @traditional_routes = TraditionalRoute.all\n end", "def stop_info(route, direction)\n stops = RestClient.get(\"http://svc.metrotransit.org/NexTrip/Stops/#{route}/#{direction}?format=json\")\n JSON.parse(stops)\nend", "def index\n @favorite_routes = FavoriteRoute.all\n render json: @favorite_routes\n end", "def route_entries\n json_array('routes.json').collect do |entry|\n entry.start_time = Time.parse(entry.start_time)\n entry.end_time = Time.parse(entry.end_time)\n entry\n end\n end", "def index\n @route_stops = RouteStop.all\n render json: @route_stops\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
POST /trip_routes POST /trip_routes.json
def create @trip_route = TripRoute.new(trip_route_params) respond_to do |format| if @trip_route.save format.html { redirect_to @trip_route, notice: 'Trip route was successfully created.' } format.json { render :show, status: :created, location: @trip_route } else format.html { render :new } format.json { render json: @trip_route.errors, status: :unprocessable_entity } end end end
[ "def post_route(route, message)\n raise TypeError unless route.is_a? Route\n @changeset = @api.create_changeset(message, tags={'created_by'=>'ITCR'})\n ways_list = []\n nodes_list = create_node_list(route.path)\n\n until nodes_list.empty? # For node's maximum limit of a way\n way_nodes = nodes_list.take(MAX_NODES)\n nodes_list = nodes_list.drop(MAX_NODES)\n way_id = create_way(way_nodes)\n ways_list << way_id\n end\n\n relation = create_relation(ways_list) # Link ways to relation\n relation = add_stops(relation, route.stops) # Add bus stops to relation\n\n @api.save(relation, @changeset) # Save relation using the API\n puts 'Relation created succesfuly.'\n @api.close_changeset(@changeset)\n @changeset.id\n end", "def create\n @truck_route = TruckRoute.new(truck_route_params)\n\n if @truck_route.save\n render json: @truck_route, status: :created\n else\n render json: @truck_route.errors, status: :unprocessable_entity\n end\n end", "def create\n @driver_trip_route = DriverTripRoute.new(driver_trip_route_params)\n\n respond_to do |format|\n if @driver_trip_route.save\n format.html { redirect_to @driver_trip_route, notice: 'Driver trip route was successfully created.' }\n format.json { render :show, status: :created, location: @driver_trip_route }\n else\n format.html { render :new }\n format.json { render json: @driver_trip_route.errors, status: :unprocessable_entity }\n end\n end\n end", "def index\n @trip_routes = TripRoute.all\n end", "def planner_output\n if current_user != nil\n @trip = Trip.find(params[:id])\n else\n redirect_to new_user_session_path\n end\n\n @trip.routes.each { |r| r.destroy }\n\n traveller_matching = TravellerMatching.new(@trip.to_object_container_no_routes)\n @trip.create_routes_from_trip_object(traveller_matching.trip)\n\n trip_json = traveller_matching.trip.to_json\n\n\n @trip.trip_json = trip_json\n @trip.save\n end", "def create\n @train_route = TrainRoute.new(params[:train_route])\n\n respond_to do |format|\n if @train_route.save\n format.html { redirect_to @train_route, notice: 'Train route was successfully created.' }\n format.json { render json: @train_route, status: :created, location: @train_route }\n else\n format.html { render action: \"new\" }\n format.json { render json: @train_route.errors, status: :unprocessable_entity }\n end\n end\n end", "def create_routes_from_trip_object(trip_object)\n trip_object.routes.each do |r|\n driver = Driver.find(r.driver.id)\n route = Route.create(driver: driver)\n r.id = route.id\n r.passengers.each do |p|\n route.travellers << Traveller.find(p.id)\n end\n self.routes << route\n end\n end", "def create\n @trip_waypoint = TripWaypoint.new\n @trip_waypoint.place_id = params[:place_id]\n @trip_waypoint.trip_id = params[:trip_id]\n @trip_waypoint.save\n\n respond_to do |format|\n if @trip_waypoint.save\n format.html { redirect_to edit_admin_trip_path(@trip), notice: \"Le lieu a été ajouté à l'itinéraire\" }\n # format.json { render :show, status: :created, location: @trip_waypoint }\n else\n format.html { render :new }\n # format.json { render json: @trip_waypoint.errors, status: :unprocessable_entity }\n end\n end\n end", "def parse_and_add_routes\n\n routes_list = @json_dict['routes']\n\n routes_list.each do |route|\n source = route['ports'][0]\n destination = route['ports'][1]\n @json_graph_api.add_edge(source,destination,route)\n end\n\n end", "def index\n @driver_trip_routes = DriverTripRoute.all\n end", "def create\n @train_route = TrainRoute.new(train_route_params)\n \n respond_to do |format|\n if @train_route.save\n format.html { redirect_to @train_route, notice: 'TrainRoute was successfully created.' }\n format.json { render :show, status: :created, location: @train_route }\n else\n format.html { render :new }\n format.json { render json: @train_route.errors, status: :unprocessable_entity }\n end\n end\n end", "def post_route(payload)\n with_rescue do\n payload = payload.to_json if payload.is_a?(Hash)\n connection.post do |request|\n request.url routes_path\n request.body = payload\n request.headers['Content-Type'] = 'application/json'\n end\n end\n end", "def create\n @favorite_route = FavoriteRoute.new(favorite_route_params)\n if @favorite_route.save\n render json: @favorite_route, status: :created, location: @favorite_route\n else\n render json: @favorite_route.errors, status: :unprocessable_entity\n end \n end", "def create_route\n return unless request.post?\n @routerinfo = RouterInfo.new(params[:routerinfo])\n respond_to do |format|\n if @routerinfo.save\n map_routes()\n flash[:notice] = 'Commute was successfully created.'\n format.html { redirect_to :action=>'list'}\n format.xml { render :xml => @routerinfo, :status => :created, :location => @routerinfo}\n else\n format.html { render :action => \"create_route\" }\n format.xml { render :xml => @routerinfo.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @trainroute = current_user.trainroutes.build(trainroute_params)\n\n respond_to do |format|\n if @trainroute.save\n format.html { redirect_to @trainroute, notice: 'Trainroute was successfully created.' }\n format.json { render :show, status: :created, location: @trainroute }\n else\n format.html { render :new }\n format.json { render json: @trainroute.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @trip_waypoint = TripWaypoint.new\n @trip_waypoint.place_id = params[:place_id]\n @trip_waypoint.trip_id = params[:trip_id]\n @trip_waypoint.save\n\n respond_to do |format|\n if @trip_waypoint.save\n format.html { redirect_to edit_partner_trip_path(params[:trip_id]), notice: \"Le lieu a été ajouté à l'itinéraire\" }\n # format.json { render :show, status: :created, location: @trip_waypoint }\n else\n format.html { render :new }\n # format.json { render json: @trip_waypoint.errors, status: :unprocessable_entity }\n end\n end\n end", "def create_route(route_list)\n @route_gen.info_list = route_list\n id = @route_gen.gen_id\n path = @route_gen.gen_path\n stops = gen_stops\n path = @route_gen.insert_stops_in_path(path, stops)\n\n #Insert start and end point as stops\n stops.insert(0,Stop.new(\"Punto inicial de la ruta\",path.first))\n #stops.append(Stop.new(\"Punto final de la ruta\",path.last))\n\n route = Route.new(id, path, stops)\n route\n end", "def create\n @trip = Trip.new(trip_params.merge(user_id: current_user.id))\n\n respond_to do |format|\n format.any(:trips_json, :json) do\n if @trip.save\n render json: @trip, status: :created, location: @trip\n else\n render json: @trip.errors, status: :unprocessable_entity\n end\n end\n end\n end", "def route_info\n routes = RestClient.get('http://svc.metrotransit.org/NexTrip/Routes?format=json')\n JSON.parse(routes)\nend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
PATCH/PUT /trip_routes/1 PATCH/PUT /trip_routes/1.json
def update respond_to do |format| if @trip_route.update(trip_route_params) format.html { redirect_to @trip_route, notice: 'Trip route was successfully updated.' } format.json { render :show, status: :ok, location: @trip_route } else format.html { render :edit } format.json { render json: @trip_route.errors, status: :unprocessable_entity } end end end
[ "def update\n if @truck_route.update(truck_route_params)\n render json: @truck_route, status: :ok\n else\n render json: @truck_route.errors, status: :unprocessable_entity\n end\n end", "def update\n respond_to do |format|\n format.any(:trips_json, :json) do\n if @trip.update(trip_params)\n head :no_content\n else\n render json: @trip.errors, status: :unprocessable_entity\n end\n end\n end\n end", "def update\n @trip = Trip.find(params[:id])\n\n if @trip.update(trip_params)\n render :json => {:success => true}\n else\n render :json => {:success => false, :errors => [\"Trip update failed.\"]}\n end\n end", "def update\n respond_to do |format|\n if @driver_trip_route.update(driver_trip_route_params)\n format.html { redirect_to @driver_trip_route, notice: 'Driver trip route was successfully updated.' }\n format.json { render :show, status: :ok, location: @driver_trip_route }\n else\n format.html { render :edit }\n format.json { render json: @driver_trip_route.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @trip = Trip.find(params[:id])\n\n respond_to do |format|\n if @trip.update_attributes(params[:trip])\n format.html { redirect_to @trip, notice: 'Trip was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n end\n end\n end", "def update(id, params)\n put(\"routes/#{id}\", params)\n end", "def update\n Rails.logger.info('👻 Disraptor: Updating route.')\n\n route_id = params.require(:route_id)\n payload = params.require('disraptor/route')\n source_path = normalize_path(payload['sourcePath'])\n target_url = normalize_uri(payload['targetUrl'])\n request_method = normalize_request_method(payload['requestMethod'])\n\n if !@@allowed_methods.include?(request_method)\n error_message = \"Route request method was #{request_method} but expected one of these: #{@@allowed_methods.join(', ')}.\"\n Rails.logger.error('❌ Disraptor: Error: ' + error_message)\n\n return render json: { error: error_message }, status: 400\n end\n\n if source_path != '/' and source_path.end_with?('/')\n error_message = \"Route source path was #{source_path} but it must not end in a slash.\"\n Rails.logger.error('❌ Disraptor: Error: ' + error_message)\n\n return render json: { error: error_message }, status: 400\n end\n\n route = Disraptor::Route.edit(route_id, source_path, target_url, request_method)\n\n Rails.application.reload_routes!\n\n render json: { 'disraptor/route': route }\n end", "def put(*a) route 'PUT', *a end", "def update number, routes\r\n # the base uri for api requests\r\n query_builder = Configuration.BASE_URI.dup\r\n\r\n # prepare query string for API call\r\n query_builder << \"/tns/{number}\"\r\n\r\n # process optional query parameters\r\n query_builder = APIHelper.append_url_with_template_parameters query_builder, {\r\n \"number\" => number,\r\n }\r\n\r\n # validate and preprocess url\r\n query_url = APIHelper.clean_url query_builder\r\n\r\n # prepare headers\r\n headers = {\r\n \"user-agent\" => \"Flowroute SDK 1.0\",\r\n \"content-type\" => \"application/json; charset=utf-8\"\r\n }\r\n\r\n response = CustomAuthUtility.append_custom_auth_params method:'PATCH',\r\n query_url:query_url,\r\n body:\"{\\\"routes\\\": #{routes.to_json}}\",\r\n headers:headers\r\n\r\n # Error handling using HTTP status codes\r\n if response.code == 401\r\n raise APIException.new \"NOT AUTHORIZED\", 401, response.raw_body\r\n elsif response.code == 500\r\n raise APIException.new \"APPLICATION/SERVER ERROR\", 500, response.raw_body\r\n elsif !(response.code.between?(200,206)) # [200,206] = HTTP OK\r\n raise APIException.new \"HTTP Response Not OK\", response.code, response.raw_body\r\n end\r\n\r\n response.body\r\n end", "def update\n @train_route = TrainRoute.find(params[:id])\n\n respond_to do |format|\n if @train_route.update_attributes(params[:train_route])\n format.html { redirect_to @train_route, notice: 'Train route was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @train_route.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @intended_trip = IntendedTrip.get(params[:id])\n @intended_trip.similiar_trips( @intended_trip )\n respond_to do |format|\n if @intended_trip.save\n format.html { redirect_to @intended_trip, notice: 'Your trip has been updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @intended_trip.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @pathway = Pathway.find(params[:id])\n\n respond_to do |format|\n if @pathway.update_attributes(params[:pathway])\n format.html { redirect_to @pathway, notice: 'Pathway was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @pathway.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @vehicle_route.update(vehicle_route_params)\n format.html { redirect_to vehicle_routes_path, notice: 'Vehicle route was successfully updated.' }\n format.json { render :show, status: :ok, location: @vehicle_route }\n else\n format.html { render :edit }\n format.json { render json: @vehicle_route.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @transport_route = TransportRoute.find(params[:id])\n\n respond_to do |format|\n if @transport_route.update_attributes(params[:transport_route])\n format.html { redirect_to @transport_route, notice: 'Transport route was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @transport_route.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @one_way_trip = OneWayTrip.find(params[:id])\n flash[:notice] = 'OneWayTrip was successfully updated.' if @one_way_trip.update_attributes(params[:one_way_trip])\n respond_with(@one_way_trip)\n end", "def update\n @trip = current_user.trips.find(params[:id])\n\n respond_to do |format|\n if @trip.update_attributes(params[:trip])\n format.html { redirect_to @trip, notice: 'Trip was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @trip.errors, status: :unprocessable_entity }\n end\n end\n end", "def updatetrip\n if params[:employee_id] == '0'\n employee = Employee.first\n else\n employee = Employee.find(params[:employee_id])\n end\n lovedone = Lovedone.find(params[:lovedone_id])\n\n if trip_params[:status] == 'started'\n lovedone.employee_id = employee.id\n lovedone.save\n employee.service_status = \"PickUp\" if employee.company.provider_type == \"Transport\"\n employee.service_status = \"Arrival\" if employee.company.provider_type == \"Home_Health\"\n elsif trip_params[:status] == 'completed'\n lovedone.employee_id = ''\n lovedone.save\n employee.service_status = \"DropOff\" if employee.company.provider_type == \"Transport\"\n employee.service_status = \"Departure\" if employee.company.provider_type == \"Home_Health\"\n end\n if employee.save\n send_emailer(employee, lovedone)\n end\n @trip = Trip.find(params[:id]) \n if @trip.update(trip_params)\n render json: @trip, status: :ok\n else\n render json: @trip.errors, status: :unprocessable_entity\n end\n end", "def update\n @gtfs_route = GtfsRoute.find(params[:id])\n\n respond_to do |format|\n if @gtfs_route.update_attributes(params[:gtfs_route])\n format.html { redirect_to(@gtfs_route, :notice => 'Gtfs route was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @gtfs_route.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @ticket_route = TicketRoute.find(params[:id])\n\n respond_to do |format|\n if @ticket_route.update_attributes(params[:ticket_route])\n format.html { redirect_to @ticket_route, notice: 'Ticket route was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @ticket_route.errors, status: :unprocessable_entity }\n end\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
DELETE /trip_routes/1 DELETE /trip_routes/1.json
def destroy @trip_route.destroy respond_to do |format| format.html { redirect_to trip_routes_url, notice: 'Trip route was successfully destroyed.' } format.json { head :no_content } end end
[ "def destroy\n @route = Line.find_by_slug(params[:line_id]).routes.find(params[:id])\n @route.destroy\n\n respond_to do |format|\n format.html { redirect_to routes_url }\n format.json { head :ok }\n end\n end", "def destroy\n @truck_route.destroy\n render json: @truck_route, status: :ok \n end", "def destroy\n @trip.destroy\n\n respond_to do |format|\n format.any(:trips_json, :json) do\n head :no_content\n end\n end\n end", "def destroy\n @train_route = TrainRoute.find(params[:id])\n @train_route.destroy\n\n respond_to do |format|\n format.html { redirect_to train_routes_url }\n format.json { head :no_content }\n end\n end", "def destroy\n Rails.logger.info('👻 Disraptor: Destroying route.')\n\n if Disraptor::Route.remove(params[:route_id])\n Rails.application.reload_routes!\n\n render json: success_json\n else\n render json: failed_json\n end\n end", "def destroy\n @driver_trip_route.destroy\n respond_to do |format|\n format.html { redirect_to driver_trip_routes_url, notice: 'Driver trip route was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @pathway = Pathway.find(params[:id])\n @pathway.destroy\n\n respond_to do |format|\n format.html { redirect_to pathways_url }\n format.json { head :no_content }\n end\n end", "def delete_routes(db, id)\r\n\tdb.execute(\"DELETE FROM route_options WHERE id = ?\", [id])\r\nend", "def destroy\n @trip.destroy\n respond_to do |format|\n format.html { redirect_to tripes_url }\n format.json { head :no_content }\n end\n end", "def delete\n @trip = Trips.find_trip( params[ :id ])\n end", "def destroy\n @route = Route.find(params[:id])\n @route.destroy\n\n respond_to do |format|\n format.html { redirect_to routes_url }\n format.json { head :ok }\n end\n end", "def destroy\n @trip = Trip.find(params[:id])\n @trip.destroy\n\n respond_to do |format|\n format.html { redirect_to find_trips_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @route = Route.find(params[:id])\n @route.destroy\n\n respond_to do |format|\n format.html { redirect_to routes_url }\n format.json { head :no_content }\n end\n end", "def delete(path, opts={})\n add_route(DELETE, path, opts)\n end", "def destroy\n @route.delete\n respond_to do |format|\n format.html { redirect_to routes_url, notice: t(\"routes.deleted\") }\n format.json { head :no_content }\n end\n end", "def destroy\n @transport_route = TransportRoute.find(params[:id])\n @transport_route.destroy\n\n respond_to do |format|\n format.html { redirect_to transport_routes_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @trip_request = TripRequest.find(params[:id])\n @trip_request.destroy\n\n respond_to do |format|\n format.html { redirect_to trip_requests_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @roundtrip = Roundtrip.find(params[:id])\n @roundtrip.destroy\n\n respond_to do |format|\n format.html { redirect_to(roundtrips_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @trip_type = TripType.find(params[:id])\n @trip_type.destroy\n\n respond_to do |format|\n format.html { redirect_to trip_types_url }\n format.json { head :no_content }\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
GET /ped_programs GET /ped_programs.json
def index @ped_programs = PedProgram.all respond_to do |format| format.html # index.html.erb format.json { render json: @ped_programs } end end
[ "def get_all_programs\n\t\tdo_get_request('/system/programs/list')\n\tend", "def index\n @programs = Program.order(\"name\").all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @programs }\n end\n end", "def index\n @programs = Program.where(:user_id => current_user.id)\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @programs }\n end\n end", "def show\n @program_app = ProgramApp.find(params[:id])\n\n respond_to do |format|\n format.json do\n render json: @program_app\n end\n end\n end", "def show\n @program = Program.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @program }\n end\n end", "def index\n @ped_program_definitions = PedProgramDefinition.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @ped_program_definitions }\n end\n end", "def get_client_programs(client_id:, page: 1, programs: [])\n response = get(\"clients/#{client_id}/programs?page=#{page}\")\n\n return PhysitrackApi::Response.from(response) unless response.success?\n\n programs += response['programs']\n\n if response['programs'].try(:size) == 200\n get_client_programs(client_id: client_id, page: page + 1, programs: programs)\n else\n PhysitrackApi::Response.from(response, payload: { 'programs' => programs })\n end\n end", "def index\n @priority_programs = PriorityProgram.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @priority_programs }\n end\n end", "def list_loyalty_programs\n warn 'Endpoint list_loyalty_programs in LoyaltyApi is deprecated'\n new_api_call_builder\n .request(new_request_builder(HttpMethodEnum::GET,\n '/v2/loyalty/programs',\n 'default')\n .header_param(new_parameter('application/json', key: 'accept'))\n .auth(Single.new('global')))\n .response(new_response_handler\n .deserializer(APIHelper.method(:json_deserialize))\n .is_api_response(true)\n .convertor(ApiResponse.method(:create)))\n .execute\n end", "def program(id)\n get(\"/catalog/titles/programs/#{id.to_s}\")\n end", "def show\n @raw_program = RawProgram.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @raw_program }\n end\n end", "def show\n @programset = Programset.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @programset }\n end\n end", "def index\n @related_programs = RelatedProgram.all\n end", "def show\n @ped_program_definition = PedProgramDefinition.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @ped_program_definition }\n end\n end", "def scrape_programs\n doc = Nokogiri::XML(open('http://api.npr.org/list?id=3004'))\n programs = doc.xpath('*//item')\n programs.each do |program|\n new_program = NprStories::Program.new \n new_program.program_title = program.search('title').text\n new_program.search_id = program.attributes[\"id\"].value.to_i\n new_program.additional_info = program.search('additionalInfo').text\n end\n end", "def show\n @program_manifest = ProgramManifest.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @program_manifest }\n end\n end", "def index\n @vendor_programs = VendorProgram.all\n end", "def programs\n @organization = Organization.find(params[:id])\n end", "def index\n @programas = Programa.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @programas }\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
POST /ped_programs POST /ped_programs.json
def create @ped_program = PedProgram.new(params[:ped_program]) respond_to do |format| if @ped_program.save format.html { redirect_to @ped_program, notice: 'Ped program was successfully created.' } format.json { render json: @ped_program, status: :created, location: @ped_program } else format.html { render action: "new" } format.json { render json: @ped_program.errors, status: :unprocessable_entity } end end end
[ "def create\n @program = Program.new(params[:program])\n\n respond_to do |format|\n if @program.save\n format.html { redirect_to @program, :notice => 'Program was successfully created.' }\n format.json { render :json => @program, :status => :created, :location => @program }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @program.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @program = current_codabra.programs.new(params[:program])\n\n respond_to do |format|\n if @program.save\n format.html { redirect_to @program, notice: 'Program was successfully created.' }\n format.json { render json: @program, status: :created, location: @program }\n else\n format.html { render :new }\n format.json { render json: @program.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @program = Program.new(program_params)\n\n respond_to do |format|\n if @program.save\n format.html { redirect_to programs_path, notice: 'Program was successfully created.' }\n format.json { render :show, status: :created, location: @program }\n else\n format.html { render :new }\n format.json { render json: @program.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @postulate_collection_program = PostulateCollectionProgram.new(postulate_collection_program_params)\n\n respond_to do |format|\n if @postulate_collection_program.save\n format.html { redirect_to @postulate_collection_program }\n format.json { render action: 'show', status: :created, location: @postulate_collection_program }\n else\n format.html { render action: 'new' }\n format.json { render json: @postulate_collection_program.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @event = Event.find(params[:event_id])\n @program = @event.programs.build(params[:program])\n\n respond_to do |format|\n if @program.save\n format.html {\n redirect_to @event,\n notice: 'Program was successfully created.'\n }\n format.json {\n render json: @program,\n status: :created,\n location: @program\n }\n else\n format.html { render action: \"new\" }\n format.json {\n render json: @program.errors, status: :unprocessable_entity\n }\n end\n end\n end", "def create\n @related_program = RelatedProgram.new(related_program_params)\n\n respond_to do |format|\n if @related_program.save\n format.html { redirect_to @related_program, notice: 'Related program was successfully created.' }\n format.json { render :show, status: :created, location: @related_program }\n else\n format.html { render :new }\n format.json { render json: @related_program.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @entrepreneur_program = EntrepreneurProgram.new(entrepreneur_program_params)\n\n respond_to do |format|\n if @entrepreneur_program.save\n format.html { redirect_to @entrepreneur_program, notice: 'Entrepreneur program was successfully created.' }\n format.json { render :show, status: :created, location: @entrepreneur_program }\n else\n format.html { render :new }\n format.json { render json: @entrepreneur_program.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @raw_program = RawProgram.new(params[:raw_program])\n\n respond_to do |format|\n if @raw_program.save\n format.html { redirect_to @raw_program, notice: 'Raw program was successfully created.' }\n format.json { render json: @raw_program, status: :created, location: @raw_program }\n else\n format.html { render action: \"new\" }\n format.json { render json: @raw_program.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @action_program = ActionProgram.new(params[:action_program])\n\n respond_to do |format|\n if @action_program.save\n format.html { redirect_to @action_program, notice: 'Action program was successfully created.' }\n format.json { render json: @action_program, status: :created, location: @action_program }\n else\n format.html { render action: \"new\" }\n format.json { render json: @action_program.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @programm = Programm.new(programm_params)\n\n respond_to do |format|\n if @programm.save\n format.html { redirect_to action: :index, notice: 'Programm was successfully created.' }\n format.json { render :index, status: :created, location: @programm }\n else\n format.html { render :new }\n format.json { render json: @programm.errors, status: :unprocessable_entity }\n end\n end\n end", "def create_program_params\n params.require(:program).permit(:name)\n end", "def create\n puts('Program: ' + program_dummy_params)\n @program_dummy = ProgramDummy.new(program_dummy_params)\n\n respond_to do |format|\n if @program_dummy.save\n format.html { redirect_to @program_dummy, notice: 'Program dummy was successfully created.' }\n format.json { render :show, status: :created, location: @program_dummy }\n else\n format.html { render :new }\n format.json { render json: @program_dummy.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @user_program = UserProgram.new(params[:user_program])\n\n respond_to do |format|\n if @user_program.save\n format.html { redirect_to @user_program, notice: 'User program was successfully created.' }\n format.json { render json: @user_program, status: :created, location: @user_program }\n else\n format.html { render action: \"new\" }\n format.json { render json: @user_program.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @collection_program = CollectionProgram.new(collection_program_params)\n\n respond_to do |format|\n if @collection_program.save\n format.html { redirect_to @collection_program }\n format.json { render action: 'show', status: :created, location: @collection_program }\n else\n format.html { render action: 'new' }\n format.json { render json: @collection_program.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @ped_program_definition = PedProgramDefinition.new(params[:ped_program_definition])\n\n respond_to do |format|\n if @ped_program_definition.save\n format.html { redirect_to @ped_program_definition, notice: 'Ped program definition was successfully created.' }\n format.json { render json: @ped_program_definition, status: :created, location: @ped_program_definition }\n else\n format.html { render action: \"new\" }\n format.json { render json: @ped_program_definition.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @program_development = ProgramDevelopment.new(program_development_params)\n\n respond_to do |format|\n if @program_development.save\n format.html { redirect_to @program_development, notice: 'Program development was successfully created.' }\n format.json { render :show, status: :created, location: @program_development }\n else\n format.html { render :new }\n format.json { render json: @program_development.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @live_program = LiveProgram.new(live_program_params)\n\n respond_to do |format|\n if @live_program.save\n format.html { redirect_to @live_program, notice: 'Live program was successfully created.' }\n format.json { render :show, status: :created, location: @live_program }\n else\n format.html { render :new }\n format.json { render json: @live_program.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @odeme_program = Odeme::Program.new(params[:odeme_program])\n # 1.times { @odeme_program.aprograms.build }\n # 1.times { @odeme_program.koordinators.build }\n # 1.times { @odeme_program.lectures.build }\n\n\n respond_to do |format|\n if @odeme_program.save\n format.html { redirect_to @odeme_program, notice: 'Program was successfully created.' }\n format.json { render json: @odeme_program, status: :created, location: @odeme_program }\n else\n format.html { render action: \"new\" }\n format.json { render json: @odeme_program.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @admin_pay_program = Admin::PayProgram.new(admin_pay_program_params)\n\n respond_to do |format|\n if @admin_pay_program.save\n format.html { redirect_to @admin_pay_program, notice: 'Pay program was successfully created.' }\n format.json { render :show, status: :created, location: @admin_pay_program }\n else\n format.html { render :new }\n format.json { render json: @admin_pay_program.errors, status: :unprocessable_entity }\n end\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Calculates the row / column of an adjoining tile, based on the side 'sides' are numbered 0 through 5, with 0 being upper left, and moving clockwise. Returns a tuple of [row, column] for the resulting tile.
def adjoiningTile(row, column, side) if( row.is_a?(Integer) && column.is_a?(Integer) && side.is_a?(Integer) ) case side when 0 return [row - 1, column - 1] when 1 return [row - 1, column] when 2 return [row, column + 1] when 3 return [row + 1, column] when 4 return [row + 1, column - 1] when 5 return [row, column - 1] else raise "Invalid side (" + side.to_s + ")!" end else raise "Expected a (row, column, side) as (int, int, int)" end end
[ "def get_column tile\n\ttile[1].to_i - 1\nend", "def tile_leftward\n num_rising_columns, num_summit_clients = calculate_right_triangle\n heights = (1..num_rising_columns).to_a.push(num_summit_clients).reverse\n arrange_columns heights\n end", "def get_other_row_or_col(position, size)\n size == 1 ? 0 : position % (size - 1)\nend", "def inner_up\n # looking up at the bottom of the inner grid\n return 0 if inner.nil?\n # sum bottom edge\n (0..@size-1).map { |x| inner.get_loc(x, @size - 1) ? 1 : 0 }.sum\n end", "def get_adjacent_walls tile\n adjacent_tiles = self.get_adjacent_tiles tile\n invalid_tiles = Array.new\n adjacent_tiles.each do |adj_tile|\n if (adj_tile.is_floor || adj_tile.x==0 || adj_tile.y==0 || adj_tile.x==@width-1 || adj_tile.y==@height-1)\n invalid_tiles.push adj_tile\n end\n end\n adjacent_tiles-invalid_tiles\n end", "def get_surrounding_tiles(row, column, cell)\n surrounding_tiles = []\n index = GAME_BOARD[column-1].index(cell)\n if column == 1 \n if index == 0\n surrounding_tiles << GAME_BOARD[column-1][index+1]\n surrounding_tiles << GAME_BOARD[column][index]\n elsif index == 8\n surrounding_tiles << GAME_BOARD[column-1][index-1]\n surrounding_tiles << GAME_BOARD[column][index]\n else\n surrounding_tiles << GAME_BOARD[column-1][index-1] \n surrounding_tiles << GAME_BOARD[column-1][index+1] \n surrounding_tiles << GAME_BOARD[column][index]\n end\n elsif column == 12\n if index == 0\n surrounding_tiles << GAME_BOARD[column-1][index+1]\n surrounding_tiles << GAME_BOARD[column-2][index] \n elsif index == 8\n surrounding_tiles << GAME_BOARD[column-1][index-1]\n surrounding_tiles << GAME_BOARD[column-2][index] \n else\n surrounding_tiles << GAME_BOARD[column-1][index-1] \n surrounding_tiles << GAME_BOARD[column-1][index+1] \n surrounding_tiles << GAME_BOARD[column-2][index]\n end\n elsif index == 0\n surrounding_tiles << GAME_BOARD[column-1][index+1] \n surrounding_tiles << GAME_BOARD[column][index]\n surrounding_tiles << GAME_BOARD[column-2][index]\n elsif index == 8\n surrounding_tiles << GAME_BOARD[column-1][index-1] \n surrounding_tiles << GAME_BOARD[column][index]\n surrounding_tiles << GAME_BOARD[column-2][index]\n else\n surrounding_tiles << GAME_BOARD[column-1][index+1] \n surrounding_tiles << GAME_BOARD[column-1][index-1] \n surrounding_tiles << GAME_BOARD[column][index]\n surrounding_tiles << GAME_BOARD[column-2][index]\n end\n\n surrounding_tiles\n end", "def adjacent_tiles row, col, arr\n arr.select do |tile|\n next (row - tile[0]).abs <= 1 && (col - tile[1]).abs <= 1\n end\n end", "def addHorizontal(player, num)\n col = -1 \n sum = 0\n @board.each do |row|#separates board into rows\n sum = 0\n col = 0\n gap = 0\n row.each do |index|#separates board into indexes\n if index == player\n sum += 1\n else\n sum = 0\n end\n if sum == num #Checks if sum combination has been reached and which side to add it on\n if col + 1 < 7 && row[col+1] == nil\n addDisc(player,col+1)\n return col + 1\n end\n if col - 3 > -1 && row[col-3] == nil\n addDisc(player,col-3)\n return col -3\n end\n end\n col +=1\n end\n end\n return -1 \n end", "def calculateAdjacent\r\n #Loops through every tile in the grid.\r\n (0..@rows-1).each do |row|\r\n (0..@columns-1).each do |col|\r\n count = getTier(col+1, row+1) + getTier(col+1, row) + getTier(col+1, row-1)\r\n count += getTier(col, row+1) + getTier(col, row-1) + getTier(col-1, row+1)\r\n count += getTier(col-1, row) + getTier(col-1, row-1)\r\n @tile[col][row].adjacent = count #Totals the tier number in the adjacent tiles.\r\n end\r\n end\r\nend", "def get_adjacent_tiles tile\n x = tile.x\n y = tile.y\n [self.get_tile(x,y+1), self.get_tile(x+1,y), self.get_tile(x,y-1), self.get_tile(x-1,y)]\n end", "def tile_inward\n rising, num_summit_clients, falling = calculate_equilateral_triangle\n\n # distribute extra clients in the middle\n summit = []\n if num_summit_clients > 0\n split = num_summit_clients / 2\n carry = num_summit_clients % 2\n summit = [split, carry, split].reject(&:zero?)\n\n # one client per column cannot be considered as \"tiling\" so squeeze\n # these singular columns together to create one giant middle column\n if summit.length == num_summit_clients\n summit = [num_summit_clients]\n end\n end\n\n arrange_columns rising + summit + falling\n end", "def gen_diagonal_up(row,col)\n array_diagonals =[]\n 0.upto(5).each do |num|\n if ( row - num < 0 || col + num > 6)\n break\n end\n\n array_diagonals << [row-num, col+num] \n end\n array_diagonals.map{|coordinates| @grid[coordinates[1]][coordinates[0]]}\n end", "def diagonals\n [board.map.with_index { |row, i| row[i] },\n board.map.with_index { |row, i| row[row.size - i - 1] }]\n end", "def seatsInTheater(nCols, nRows, col, row)\n (nCols - col + 1) * (nRows - row)\nend", "def part_of_col_down(starting, ending)\n col(starting[0])[(ending[1] + 1)..(starting[1] - 1)]\n end", "def pixels_to_tile (px, py)\n tx = ( px / @tile_size.to_f ).ceil.to_i - 1\n ty = ( py / @tile_size.to_f ).ceil.to_i - 1\n return tx, ty\n end", "def adjacent_rooms(map, y, x)\n \n north = map[y-1][x]\n south = map[y+1][x]\n west = map[y][x-1]\n east = map[y][x+1]\n\n [north, south, west, east]\nend", "def col(p, r)\n x = p * (p - 1)\n r = p - r - 1 if p.odd?\n cell(x - r)\n end", "def pixels_to_tile (px, py)\n tx = (( px / @tile_size.to_f ).ceil - 1 ).to_i\n ty = (( py / @tile_size.to_f ).ceil - 1 ).to_i\n return tx, ty\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns the PublicEarth::Db::Collection, rather than an Atlas::Collection. TODO: Finish conversion of PublicEarth::Db::Collection to Atlas::Collection.
def collection @collection ||= PublicEarth::Db::Collection.find_by_id!(collection_id) end
[ "def collection\n @collection ||= self.db.collection(self.collection_name)\n end", "def collection\n Jrodb::Collection::Basic.new(self)\n end", "def collections\n @collections ||= PublicEarth::Db::Collection.containing(self.id)\n end", "def collection\n return nil if embedded?\n @collection_name = self.to_s.demodulize.tableize\n @collection ||= Mongoid.database.collection(@collection_name)\n end", "def collection\n return nil unless collection_member?\n @collection ||= SolrDocument.new(\n Blacklight.solr.select(\n :params => {\n :fq => \"#{SolrDocument.unique_key}:\\\"#{self[blacklight_config.collection_member_identifying_field].first}\\\"\"\n }\n )[\"response\"][\"docs\"].first\n )\n end", "def collection\n raise Errors::InvalidCollection.new(self) if embedded\n self._collection ||= Mongoid::Collection.new(self, self.collection_name)\n add_indexes; self._collection\n end", "def collection\n @collection ||= mongo.collection(Boom.config.attributes[\"mongodb\"][\"collection\"])\n end", "def collection\n if @collection.kind_of? Proc\n @collection = @collection.call\n ensure_indices if @collection.kind_of? Mongo::Collection\n end\n return @collection\n end", "def collection\n @collection ||= build_collection\n end", "def collection\n root = _base._root\n root.collection unless root.embedded?\n end", "def collection\n root = base._root\n root.collection unless root.embedded?\n end", "def getcollection(path)\n path = check_slashes(path)\n col = Collection.new(@client, path)\n return col\n rescue => e\n raise e \n end", "def collection\n mongo_client[collection_name]\n end", "def collection(name)\n return Collection.new(self, name) if !strict? || collection_names.include?(name)\n raise \"Collection #{name} doesn't exist. Currently in strict mode.\"\n end", "def collection\n Rails.logger.info \"XXXXX COLLECTION NAME #{collection_hash['collection name']}\"\n\n @collection ||= CollectionCreator.find_or_create_collection(\n collection_hash['collection name'],\n collection_hash['unit name'],\n collection_hash['collection description'],\n submitter_user_key\n )\n end", "def collection\n if opts = persistence_options\n coll = mongo_session.with(opts)[opts[:collection] || collection_name]\n clear_persistence_options unless validating_with_query? || _loading_revision?\n coll\n else\n mongo_session[collection_name]\n end\n end", "def db\n collection.db\n end", "def collection_for_query(query)\n collection_name = collection_name_for_query(query)\n unless @db.collection_names.include?(collection_name)\n setup_collection(collection_name)\n end\n @db[collection_name]\n end", "def collections() @collections ||= Muddyit::Collections.new(self) end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
We rewrite the provided canvas ids to: ensure it exists in the manifest (if they don't, mirador puts the user into a weird initial state) rewrite precocina canvas ids to postcocina canvas ids as appropriate (to avoid breaking embeds that used to work) rubocop:disable Metrics/AbcSize
def canvas_id return if request.canvas_id.blank? if canvases.any? { |canvas| canvas['@id'] == request.canvas_id } request.canvas_id elsif cocinafied_canvases? && request.canvas_id.exclude?('cocina-fileSet') cocinafied_canvas_id else Honeybadger.notify( "Unable to find requested canvas id '#{request.canvas_id}' in manifest for #{purl_object.druid}" ) nil end end
[ "def testGetCanvasIDs\n canvasIDs = Canvas.getCanvasIDs()\n assert_equal(\"1|2|3\", canvasIDs, \"Canvas ids were not correctly retrieved/output\")\n Canvas.create(bitmap_id: 1, user_username: 'mylittlelucas888', active: false)\n assert_equal(canvasIDs, Canvas.getCanvasIDs, \"An inactive Canvas record's id was incorrectly retrieved/output\")\n Canvas.last.destroy\n end", "def canvas_images\n @canvas_images ||= manifest_canvases.map do |x|\n Canvas.new(x)\n end\n end", "def save_canvas_params\n save_canvas_objects\n\n if canvas_options\n update_canvas_options(canvas_options, board_owner_id)\n end\n end", "def manifest_canvases\n first_manifest_sequence[\"canvases\"]\n end", "def replace_canvas!(new_width, new_height, new_pixels); end", "def merge_canvases\n x_range.count.times do |x|\n y_range.count.times do |y|\n tile = tile_canvas[x][y]\n if tile\n tiles[x][y].type = tile.type if tile.type\n tiles[x][y].rotation = tile.rotation if tile.rotation\n tiles[x][y].color = tile.color if tile.color\n end\n end\n end\n\n x_range.count.times do |x|\n y_range.count.times do |y|\n z_range.count.times do |z|\n block = block_canvas[x][y][z]\n if block\n blocks[x][y][z].type = block.type if block.type\n blocks[x][y][z].rotation = block.rotation if block.rotation\n blocks[x][y][z].color = block.color if block.color\n end\n end\n end\n end\n end", "def remove_local_courses_missing_from_canvas(canvas_courses)\n canvas_courses.each do |course_id|\n local_course = CanvasCourse.where(sis_course_id: course_id).first\n local_course.remove_all_canvas_events\n local_course.delete\n end\nend", "def old_platform_ids\n begin\n campaign = Campaign.find(params[:id])\n @old_platform_ids = campaign.platforms.map{|p| p.id}\n rescue\n @old_platform_ids = []\n end\n end", "def delete_canvas_events(course)\n course.canvas_events.each do |event|\n delete_canvas_event(event)\n end\nend", "def getAnnoWOriginalCanvas inputAnno\n return if inputAnno.nil?\n p \"in getAnnoWOriginalCanvas: current anno_id: #{inputAnno.annotation_id} and current canvas: #{inputAnno.canvas}\"\n return(inputAnno) if (inputAnno.canvas.to_s.include?('/canvas/'))\n\n #targetAnnotation = Annotation.where(canvas:inputAnno.canvas).first\n targetAnnotation = Annotation.where(annotation_id:inputAnno.canvas).first\n\n getAnnoWOriginalCanvas targetAnnotation\n end", "def attachment_ids!(aids)\n @nbuild.AttachmentIds {\n @nbuild.parent.default_namespace = @default_ns\n aids.each do |aid|\n attachment_id!(aid)\n end\n }\n end", "def canvas_from_id(image_id, label, document)\n image_id_suffix = image_id.gsub(/\\A[\\w-]+:/, '/')\n annotation = image_annotation_from_image_id(image_id, document)\n canvas = IIIF::Presentation::Canvas.new('@id' => \"#{document[:identifier_uri_ss]}/canvas#{image_id_suffix}\")\n canvas.label = label\n canvas.width = annotation.resource['width']\n canvas.height = annotation.resource['height']\n canvas.images << annotation\n # Mirador has a bug where Canvas thumbnails crash the viewer\n # https://github.com/ProjectMirador/mirador/issues/1452\n # uncomment below if this gets fixed at some point\n # canvas_thumb_svc = canvas.images.first.resource.service\n # canvas.thumbnail = {'@id' => document[:identifier_uri_ss].gsub(/\\/[\\w]+\\z/, image_id_suffix) + \"/thumbnail\",\n # 'service' => canvas_thumb_svc}\n canvas\n end", "def regenerate_applicability_by_ids(ids)\n criteria = {\n 'consumer_criteria' => { 'filters' => { 'id' => { '$in' => ids } } }\n }\n regenerate_applicability(criteria)\n end", "def white_canvas_composite\n trans = []\n trans << \"convert \" <<\n trans\n end", "def adam7_merge_pass(pass, canvas, subcanvas); end", "def canvas_uri_for_annotation\n \"#{base_url}/manifest/canvas/#{file_set_id}#{coordinates}\"\n end", "def in_canvas?\n !!env['facebook.in_canvas']\n end", "def getTargetingAnnosCanvas inputAnno\n return if inputAnno.nil?\n p \"in getTargetingAnnosCanvas: current anno_id: #{inputAnno.annotation_id} and current canvas: #{inputAnno.canvas}\"\n return(inputAnno.canvas) if (inputAnno.canvas.to_s.include?('/canvas/'))\n\n #targetAnnotation = Annotation.where(canvas:inputAnno.canvas).first\n targetAnnotation = Annotation.where(annotation_id:inputAnno.canvas).first\n\n getTargetingAnnosCanvas targetAnnotation\n end", "def manage_doc_ids\n # Register the docid preprocessor hook\n Giblish.register_docid_extension\n\n # Make sure that no prior docid's are hangning around\n DocidCollector.clear_cache\n DocidCollector.clear_deps\n idc = DocidCollector.new\n\n # traverse the src file tree and collect ids from all\n # .adoc or .ADOC files\n if @paths.src_root_abs.directory?\n Find.find(@paths.src_root_abs) do |path|\n p = Pathname.new(path)\n idc.parse_file(p) if adocfile? p\n end\n end\n idc\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
true if the message with this message_id has already been seen
def message_seen?(message_id) self.seen_messages.include?(message_id) end
[ "def check_for_duplicate_message_id\n old_mail_log = nil\n user_thread.schedule do\n old_mail_log = user.mail_logs.find_by_message_id(message_id)\n end\n\n if old_mail_log\n Log.librato(:count, \"system.process_uid.duplicate_message_id\", 1)\n return false\n else\n return true\n end\n end", "def key_exists?\n old_message = !@store.msetnx(msg_id, :status =>\"incomplete\", :expires => @expires_at.to_i, :timeout => (now + timeout).to_i)\n if old_message\n logger.debug \"Beetle: received duplicate message: #{msg_id} on queue: #{@queue}\"\n end\n old_message\n end", "def found_duplicate?(msg_data)\n return false unless msg_data && msg_data[:queue]\n timeout = self.queue_timeout(msg_data)\n begin\n !@memcached_client.add(msg_data[:id], \"1\",timeout)\n rescue StandardError => e\n if @dupe_on_cache_failure\n Chore.logger.error \"Error accessing duplicate cache server. Assuming message is a duplicate. #{e}\\n#{e.backtrace * \"\\n\"}\"\n true\n else\n Chore.logger.error \"Error accessing duplicate cache server. Assuming message is not a duplicate. #{e}\\n#{e.backtrace * \"\\n\"}\"\n false\n end\n end\n end", "def message_in_session?(message)\n session_message_ids.include?(message.id)\n end", "def user_exists_in_thread(message_id)\n it = Message.find(message_id)\n\n begin\n return true if it.to_user_id == current_user.id\n it = it.previous_message\n end while !it.previous_message.nil?\n\n false\n rescue\n # No message found, new thread, allow reply (new message in this case)\n true\n end", "def marked_as_duplicate?\n !self.duplicate_of_id.blank?\n end", "def has_message?\n has_message\n # && messages.count > 0\n end", "def seen?(object)\n @seen.key?(object.__id__)\n end", "def has_seen\n if self.users.include?(user)\n return true\n else \n return false\n end\n end", "def has_message_id?\n header.has_message_id?\n end", "def has_message?(mess_id)\r\n found = false\r\n @messages.each {|m| found = true if m.message_id.to_s == mess_id.to_s}\r\n return found\r\n end", "def already_received?(email, raw_email_data)\n message_id = email.message_id\n if message_id.nil?\n raise \"No message id for this message\"\n end\n\n for im in self.incoming_messages\n other_message_id = im.mail.message_id\n if message_id == other_message_id\n return true\n end\n end\n\n return false\n end", "def seen?(event)\n res = seen_events.include? event.id\n seen_events << event.id\n res\n end", "def recorded?(message)\n @recorded.has_key? message.id\n end", "def unique_hash_already_exists?\n return Entry.where(feed_id: self.feed_id, unique_hash: self.unique_hash).exists?\n end", "def has_message_id?; end", "def has_message_id?\n !fields.select { |f| f.responsible_for?('Message-ID') }.empty?\n end", "def scrub_duplicate_message(message)\n messages do |collection|\n idx = collection[message.connection.identifier].index do |msg|\n msg.message_id == message.message_id\n end\n if(idx)\n msg = collection[message.connection.identifier].delete_at(idx)\n if(removal_callback)\n consumer.send(removal_callback, [message])\n end\n true\n else\n false\n end\n end\n end", "def guid_already_exists?\n return Entry.where(feed_id: self.feed_id, guid: self.guid).exists?\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
store the remembered message ids in a yaml file
def store_remembered_messages if configuration.single_delivery File.open(seen_messages_file, 'w') do |file| file.write seen_messages.to_yaml end end end
[ "def restore_remembered_messages\n if configuration.single_delivery\n yaml = default_yaml = \"--- []\"\n begin\n File.open(seen_messages_file, 'r') do |file|\n yaml = file.read\n end\n rescue Object\n yaml = default_yaml\n end\n buffer = []\n begin\n buffer = YAML.load(yaml)\n if !buffer.kind_of?(Array) or !buffer.all?{ |x| x.kind_of?(String)}\n raise Exception, \"Invalid seen_messages.yml file\"\n end\n rescue Object\n buffer = []\n end\n self.seen_messages = buffer\n end\n end", "def write_data\n # Convert from Message objects to raw hashes\n hashes = []\n @messages.each {|k, v| hashes.push v.to_hash}\n File.open(@file, \"w\") {|f| f.puts hashes.to_yaml}\n end", "def init_uid_tracker\n uid_saves = {}\n @conf['mail_jobs'].each { |k, _v| uid_saves[k] = { last_uid: 1 } }\n File.open(@conf['uid_file'], 'w') do |f|\n f.write(JSON.pretty_generate(uid_saves))\n end\nend", "def store_download_history()\n file = File.open(HISTORY_FILE, \"w\")\n file.write $download_history.to_yaml\n file.close\nend", "def save_message(preamble_hash,msg,id)\n # Use @msg_contents instead of msg.body because body may have\n # already been mutated by this point.\n mess = {'body'=>@msg_contents, 'preamble'=>preamble_hash}\n mess = mess.merge(msg[:attributes]) unless msg.kind_of?(String)\n File.write(\"#{id}.message\",mess.to_yaml)\n end", "def save_messages(*keys)\n session[:messages].merge(keys)\n end", "def last_seen_message_ids\n @last_seen_message_ids ||= Hash.new(0)\n end", "def save_tokens(tokens)\n f = File.open(TOKEN_PATH, 'w')\n YAML::dump(tokens, f)\n f.close\n end", "def store_original_payload(payload)\n key = File.join('command-control', payload[:message_id])\n asset_store.put(key, MultiJson.dump(payload))\n key\n end", "def save_ids(id, file)\n entry = \"#{id},#{file}\"\n return if File.exists? IDS and not File.read(IDS).scan(entry).empty?\n File.open(IDS, 'a') {|f| f.puts entry}\n end", "def save_keys\n key_file = File.open(key_file_path, 'w')\n\n key_file.write(YAML::dump(@@keys))\n\n key_file.close\n end", "def create hash\n File.open(\"#{ENV['HOME']}/.aem.yaml\", 'w') {|f| f.write contents.to_yaml }\n end", "def save_hash\n File.open(\"configs/hermithash.yaml\", \"w\") {|fi| YAML.dump(@hermithash, fi)}\n warn(\"Saved hermit tracker hash\")\n end", "def save_yml_test_case_map()\n new_test_case_map_path = self.class.test_case_map_path + '.new'\n yaml_content = @tc_id_map.to_yaml\n File.open(new_test_case_map_path, \"w:UTF-8\"){ |file| file.write(yaml_content) } if yaml_content\n FileUtils.mv(self.class.test_case_map_path, self.class.test_case_map_path + \".bak\")\n FileUtils.mv(new_test_case_map_path, self.class.test_case_map_path)\n end", "def save\r\n File.open(\"./storage/books.yml\", \"w\") do |f|\r\n YAML.dump(@available_books,f)\r\n f.close\r\n end\r\n File.open(\"./storage/users.yml\", \"w\") do |f|\r\n YAML.dump(@users,f)\r\n f.close\r\n end\r\n end", "def save_todos\n File.open(@filename,'w') do |file|\n YAML.dump(@todos,file)\n end\n end", "def save(file)\n serialized_vars = []\n vars.each do |k, v|\n if marked_for_save.include?(k)\n serialized_vars << { 'name' => k, 'value' => v }\n end\n end\n File.open(file, 'w') do |out|\n YAML.dump(serialized_vars, out)\n end\n end", "def store ( yml, name )\n storefile = File.new( name, \"w\")\n storefile.puts(yml)\n storefile.close\n return [yml]\n end", "def save\n open @config_path, 'w' do |io|\n io.write({'files' => @files.collect(&:to_hash)}.to_yaml)\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
reload remembered message ids from a yaml file
def restore_remembered_messages if configuration.single_delivery yaml = default_yaml = "--- []" begin File.open(seen_messages_file, 'r') do |file| yaml = file.read end rescue Object yaml = default_yaml end buffer = [] begin buffer = YAML.load(yaml) if !buffer.kind_of?(Array) or !buffer.all?{ |x| x.kind_of?(String)} raise Exception, "Invalid seen_messages.yml file" end rescue Object buffer = [] end self.seen_messages = buffer end end
[ "def store_remembered_messages\n if configuration.single_delivery\n File.open(seen_messages_file, 'w') do |file|\n file.write seen_messages.to_yaml\n end\n end\n end", "def regenerate_ids\n @regenerate_ids\n end", "def refresh_ids\n\t\t# Find all puzzle IDs retrieved\n\t\tpuzzle_files = Dir[\"./puzzles/*.pdf\"]\n\t\tids = puzzle_files.map do |path|\n\t\t\tpath.split(\"/puzzle-\").last.split(\".\").first\n\t\tend\n\t\t@puzzle_ids = Set.new(ids)\n\n\t\t# Find all puzzle IDs mailed in packets\n\t\tpuzzle_packet_files = Dir[\"./puzzles/packets/*.pdf\"]\n\t\tid_groups = puzzle_packet_files.map do |path|\n\t\t\tpath.split(\"/\").last.split(\".\").first.split(\"-\")\n\t\tend\n\t\t@puzzle_packet_ids = Set.new(id_groups.flatten)\n\tend", "def restore_or_generate_from_file\n existing_data = load_from_file\n if existing_data && existing_data['myid']\n existing_data['myid']\n else\n myid = generate_myid\n save_to_file({ 'myid' => myid })\n\n myid\n end\n end", "def load(filename)\n update! YAML.load_file(filename)\n end", "def reload_responses\n self.config = YAML::load(File.open(RESPONSES_FILENAME))\n end", "def student_id_mapping\n if @student_ids.empty? && File.exist?(MAPPING_FILE)\n puts \"Loading existing ID mappings from #{MAPPING_FILE}\"\n @student_ids = YAML.load_file(MAPPING_FILE)\n end\n @student_ids\nend", "def fill_from_yaml_file(path)\n fill_from_hash read_from_yaml(path)\n end", "def modify_and_load_yamlfile(filepath)\n content = File.read(filepath).split(/[\\r\\n]+/)\n first_line = content.shift\n unless first_line == '--- !ruby/object:Puppet::Node::Facts'\n raise HieraSimulator::FactSourceError, \"#{filepath} was not in expected Puppet::Node::Facts format\"\n end\n content.unshift '---'\n fixed_yaml = content.map { |line| line.sub(/^ /, '') }\n data = YAML.load(fixed_yaml.join(\"\\n\"))\n raise HieraSimulator::FactLookupError, \"Invalid content in node fact file #{filepath}\" unless data.key?('values')\n result = {}\n data['values'].each { |k, v| result['::' + k] = v }\n result\n end", "def reload\n @@languages.each do |lang_code, lang_file|\n lang_file.reload\n end\n end", "def reload\n old_id = @id\n @id = nil\n\n if @data_dir\n # Read the id file from the data directory if it exists as the\n # ID for the pre-existing physical representation of this machine.\n id_file = @data_dir.join(\"id\")\n id_content = id_file.read.strip if id_file.file?\n if !id_content.to_s.empty?\n @id = id_content\n end\n end\n\n if @id != old_id && @provider\n # It changed, notify the provider\n @provider.machine_id_changed\n end\n\n @id\n end", "def update_unique_ids(env, content); end", "def parse\n return unless @mbox\n mbox = YAML.load_file(yaml_file) || {} rescue {}\n return if mbox[:mtime] == File.mtime(@mbox)\n\n # open the YAML file for real (locking it this time)\n self.update do |mbox|\n mbox[:mtime] = File.mtime(@mbox)\n\n # process each message in the mailbox\n self.each do |message|\n # compute id, skip if already processed\n id = Message.hash(message)\n mbox[id] ||= Message.parse(message)\n end\n end\n end", "def deserialize(file, crossword_store)\n s = YAML.load file\n\n # When they haven't picked a crossword yet, serialize() outputs an\n # empty file. YAML.load parses this file as boolean false.\n return unless s\n\n name = s['crossword_name']\n if not name\n puts \"Session is missing crossword name; not restoring\"\n return\n end\n\n ce = crossword_store.get_entry name\n if not ce\n puts \"Couldn't find crossword '#{name}' to restore session\"\n return\n end\n load_crossword ce, name\n\n s['cell_rows'].each_with_index do |r, y|\n r.split('').each_with_index do |ch, x|\n cell = @cells[x][y]\n cell.letter = ch.upcase\n cell.guess = ('a'..'z').include? ch\n cell.uid = nil\n end\n end\n @letters_version += 1\n end", "def yml_to_po\r\nDir.glob(Rails.root.join('po', self.locale_code, '*.po')).each do |t|\r\n @trans_file = File.basename(t)\r\n my_file = Rails.root.join('po', self.locale_code, \"#{@trans_file}_trans.yml\")\r\n msgid\t = { @trans_file => {} }\r\n if File.exist? my_file\r\n msgid_temp = YAML.load(File.open(my_file, 'r'))\r\n if msgid_temp\r\n msgid_temp.each do |l, ldata|\r\n mmsgid = l.gsub(/\\n/, \"\\\\n\").gsub(/\\r/,'')\r\n msgid[@trans_file][mmsgid] = ldata.gsub(/\\r/,'')\r\n end\r\n end\r\n self.generate_po_files(msgid)\r\n end\r\nend\r\nend", "def load_notes(ids)\n if ids.empty?\n []\n else\n $redis.mget(*ids.map { |id| \"note-#{id}\" }).map { |raw| Marshal.load(raw) }\n end\nend", "def load(filename)\n files= filename.include?(',') ? filename.split(',') : [filename]\n @yml = files.inject({}) do |total_merge, file|\n data = yml_key_include(::YAML.load(include_yml(file)))\n total_merge.merge!(data) if data\n end\n end", "def get_message\n File.open @config.data do |file|\n self[:message] = YAML.load(file).sample\n end\n end", "def load_yml(filename); end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
acknowledge message (if headers["ack"] == "client")
def ack(subscription_headers, message) #p [:ack, message.headers["message-id"]] if message.headers["message-id"].to_s.strip != "" && subscription_headers["ack"].to_s == "client" SMQueue.dbg { [:smqueue, :ack, :message, message].inspect } connection.ack message.headers["message-id"], { } else SMQueue.dbg { [:smqueue, :ack, :not_acknowledging, message].inspect } end if ENV['PAUSE_SMQUEUE'] $stderr.print "press enter to continue> " $stderr.flush $stdin.gets end end
[ "def client_ack?(message)\n headers = @subscriptions[message.headers[:destination]]\n !headers.nil? && headers[:ack] == \"client\"\n end", "def send_ack(message)\n @client.ack(message)\n end", "def ack\n @mq.callback do\n @mq.send Protocol::Basic::Ack.new({ :delivery_tag => @header_obj.properties[:delivery_tag] })\n end\n end", "def acknowledge\n # this is a stub because Adyen does not support this feature, as of 2009-10-12\n true\n end", "def send_ack\n sock.put('+')\n vprint_status('Sending ack...')\n end", "def ack\n @server.send(Protocol::Basic::Ack.new(:delivery_tag => properties[:delivery_tag]))\n end", "def acknowledge(authcode = nil)\n payload = raw\n\n uri = URI.parse(Sinopac.notification_confirmation_url)\n\n request = Net::HTTP::Post.new(uri.path)\n\n request[\"Content-Length\"] = payload.size.to_s\n request[\"User-Agent\"] = \"Active Merchant -- http://activemerchant.org/\"\n request[\"Content-Type\"] = \"application/x-www-form-urlencoded\"\n\n http = Net::HTTP.new(uri.host, uri.port)\n http.verify_mode = OpenSSL::SSL::VERIFY_NONE unless @ssl_strict\n http.use_ssl = true\n\n response = http.request(request, payload)\n\n # Replace with the appropriate codes\n raise StandardError.new(\"Faulty Sinopac result: #{response.body}\") unless [\"AUTHORISED\", \"DECLINED\"].include?(response.body)\n response.body == \"AUTHORISED\"\n end", "def send_nack(message)\n @client.nack(message)\n end", "def ack?\n @bytes.first == Message::ACK\n end", "def acknowledge\n api_call(\"acknowledge\")\n self\n end", "def acknowledge\n payload = @raw\n noti_confirm = Alipay.notification_confirmation_url\n noti_confirm += (\"partner=\" + Alipay::ACCOUNT.to_s + \"&notify_id=\" + notify_id)\n PAYMENT_LOG.info \"===============acknowledge init by jumaimai ============\"\n PAYMENT_LOG.info noti_confirm \n uri = URI.parse(noti_confirm)\n result = Net::HTTP.get(uri)\n PAYMENT_LOG.info \"---------acknowledge result #{result} ----------------\"\n result == \"true\"\n end", "def sendAck\n $LOG.debug \"Sending ACK\"\n send_msg ack Messages::EndType::ACK\n end", "def ack!\n #:doc:\n logger.debug \"Beetle: ack! for message #{msg_id}\"\n header.ack\n return if simple?\n if !redundant? || @store.incr(msg_id, :ack_count) >= 2\n # we test for >= 2 here, because we retry increments in the\n # dedup store so the counter could be greater than two\n @store.del_keys(msg_id)\n end\n end", "def acknowledge( message )\n @queue.acknowledge( message )\n end", "def acknowledge!\n ensure_subscription!\n subscription.acknowledge ack_id\n end", "def acknowledge(authcode = nil)\n payload = raw\n\n response = ssl_post(Nochex.notification_confirmation_url, payload,\n 'Content-Length' => \"#{payload.size}\",\n 'User-Agent' => \"Active Merchant -- http://activemerchant.org\",\n 'Content-Type' => \"application/x-www-form-urlencoded\"\n )\n\n raise StandardError.new(\"Faulty Nochex result: #{response}\") unless [\"AUTHORISED\", \"DECLINED\"].include?(response)\n\n response == \"AUTHORISED\"\n end", "def acknowledge(args = {})\n sync = args[:sync] || false\n message = args[:message] if args[:message]\n\n unless message.nil?\n @session_impl.acknowledge message.message_impl, sync\n else\n @session_impl.acknowledge sync\n end\n end", "def ack(opts={})\n\t\t\t# Set delivery tag\n\t\t\tdelivery_tag = opts.delete(:delivery_tag)\n\t\t\tdelivery_tag ||= self.delivery_tag\n\t\t\traise Bunny::AcknowledgementError, \"No delivery tag received\" unless delivery_tag\n\t\t\t\n client.send_frame(\n Qrack::Protocol09::Basic::Ack.new({:delivery_tag => delivery_tag, :multiple => false}.merge(opts))\n )\n\n\t\t\t# reset delivery tag\n\t\t\tself.delivery_tag = nil if self.delivery_tag == delivery_tag\n end", "def test_0010_ack_send_receive\n ack_client_test(:ackparm => \"ack\", :times => 1, :mod => \"0010\")\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
get message from queue if block supplied, loop forever and yield(message) for each message received default headers are: :ack => "client" :client_id => configuration.client_id :subscription_name => configuration.subscription_name
def get(headers = {}, &block) self.connect SMQueue.dbg { [:smqueue, :get, headers].inspect } subscription_headers = {"ack" => "client", "activemq.prefetchSize" => 1 } if client_id = configuration.client_id subscription_headers["client_id"] = client_id end if sub_name = configuration.subscription_name subscription_headers["subscription_name"] = sub_name end # if a client_id is supplied, then user wants a durable subscription # N.B. client_id must be unique for broker subscription_headers.update(headers) #p [:subscription_headers_before, subscription_headers] subscription_headers = normalize_keys(subscription_headers) if configuration.durable and client_id = configuration.client_id || subscription_headers["client_id"] subscription_name = configuration.subscription_name || subscription_headers["subscription_name"] || client_id # activemq only subscription_headers["activemq.subscriptionName"] = subscription_name # JMS subscription_headers["durable-subscriber-name"] = subscription_name end #p [:subscription_headers_after, subscription_headers] destination = configuration.name SMQueue.dbg { [:smqueue, :get, :subscribing, destination, :subscription_headers, subscription_headers].inspect } connection.subscribe destination, subscription_headers message = nil SMQueue.dbg { [:smqueue, :get, :subscription_headers, subscription_headers].inspect } begin # TODO: refactor this if block_given? SMQueue.dbg { [:smqueue, :get, :block_given].inspect } # todo: change to @running - (and set to false from exception handler) # also should check to see if anything left to receive on connection before bailing out while true SMQueue.dbg { [:smqueue, :get, :receive].inspect } # block until message ready message = connection.receive SMQueue.dbg { [:smqueue, :get, :received, message].inspect } case message.command when "ERROR" SMQueue.dbg { [:smqueue, :get, :ERROR, message].inspect } when "RECEIPT" SMQueue.dbg { [:smqueue, :get, :RECEIPT, message].inspect } else SMQueue.dbg { [:smqueue, :get, :yielding].inspect } if !message_seen?(message.headers["message-id"]) yield(message) end SMQueue.dbg { [:smqueue, :get, :message_seen, message.headers["message-id"]].inspect } message_seen message.headers["message-id"] SMQueue.dbg { [:smqueue, :get, :returned_from_yield_now_calling_ack].inspect } ack(subscription_headers, message) SMQueue.dbg { [:smqueue, :get, :returned_from_ack].inspect } end end else SMQueue.dbg { [:smqueue, :get, :single_shot].inspect } message = connection.receive SMQueue.dbg { [:smqueue, :get, :received, message].inspect } if !(message.command == "ERROR" or message.command == "RECEIPT") SMQueue.dbg { [:smqueue, :get, :message_seen, message.headers["message-id"]].inspect } message_seen message.headers["message-id"] SMQueue.dbg { [:smqueue, :get, :ack, message].inspect } ack(subscription_headers, message) SMQueue.dbg { [:smqueue, :get, :returned_from_ack].inspect } end end rescue Object => e SMQueue.dbg { [:smqueue, :get, :exception, e].inspect } handle_error e, "Exception in SMQueue#get: #{e.message}", caller ensure SMQueue.dbg { [:smqueue, :get, :ensure].inspect } SMQueue.dbg { [:smqueue, :unsubscribe, destination, subscription_headers].inspect } connection.unsubscribe destination, subscription_headers SMQueue.dbg { [:smqueue, :disconnect].inspect } connection.disconnect end SMQueue.dbg { [:smqueue, :get, :return].inspect } message end
[ "def process_single_message(queue)\n queue.subscribe(:ack => true) do |h, m|\n return if AMQP.closing?\n Minion.info(\"Received: #{queue_name}:#{m}, #{h}\")\n msg = Message.new(m, h)\n msg.content = block.call(msg)\n h.ack\n msg.callback\n Minion.execute_handlers\n end\n rescue Object => e\n Minion.alert(e)\n end", "def fetch queue_name, source_uri\n ReliableMsg::Queue.new(queue_name, :drb_uri => source_uri).get { |m|\n begin\n tx = Thread.current[ReliableMsg::Client::THREAD_CURRENT_TX]\n Thread.current[ReliableMsg::Client::THREAD_CURRENT_TX] = nil\n\n @logger.info { \"message fetched - <#{m.id}>\" }\n yield m\n\n ensure\n Thread.current[ReliableMsg::Client::THREAD_CURRENT_TX] = tx\n end\n }\n end", "def handle_messages(&block)\n msg = queue.receive_messages(:max_number_of_messages => sqs_polling_amount, :attribute_names => ['ApproximateReceiveCount'])\n messages = *msg\n\n messages.each do |message|\n unless duplicate_message?(message.message_id, message.queue_url, queue_timeout)\n block.call(message.message_id, message.receipt_handle, queue_name, queue_timeout, message.body, message.attributes['ApproximateReceiveCount'].to_i - 1)\n end\n Chore.run_hooks_for(:on_fetch, message.receipt_handle, message.body)\n end\n\n messages\n end", "def with_next(&block)\n \n messages = @queue.shift || []\n yield(messages) unless block.nil?\n messages\n end", "def get_message(queue, options={}, &block)\n if @queues[queue.to_sym]\n @queues[queue.to_sym].receive_message(options) do |header,message,pass_through|\n begin\n if AMQP.closing?\n logger.error(\"Message ignored; it will be redelivered later\")\n else\n block.call(header, message, pass_through)\n end\n rescue Exception => e\n logger.error(\"Error in agent #{@agent_name}: #{e}\")\n logger.error(e)\n logger.error(\"Stopping EM\")\n #FIXME surely this should run run_signal_handlers. Maybe.\n AMQP.stop { EM.stop }\n# ensure\n# run_signal_handlers\n end\n end\n else\n raise RuntimeError, \"No such Queue: #{queue} for #{@agent_name}\"\n end\n end", "def handle_messages\n messages = *disque.fetch(from: queue_name,timeout: 100,count: batch_size)\n messages.each do |queue,id,data|\n Chore.logger.debug \"Received #{id.inspect} from #{queue.inspect} with #{data.inspect}\"\n yield(id, queue, nil, data, 0)\n Chore.run_hooks_for(:on_fetch, id, data)\n end\n messages\n end", "def receive\n\n if @subscriptions.size > 0\n @retrier.do_work(@retry_policy[:config]) do\n queues_to_check = @subscriptions.size > 1 ? @subscriptions.keys.sort_by{rand} : @subscriptions.keys\n queues_to_check.each do |queue|\n if item = @kestrel.get(normalize(queue))\n # TODO: ActiveMessaging ought to provide a way to do messaging\n # without having to wrap the messages in another object\n #logger.debug(\"Got message from queue #{queue}: #{item}\") if logger && logger.debug?\n return Message.new({'destination' => queue}, item, queue)\n end\n end\n end\n end\n # Sleep a little to avoid a spin loop (ActiveMessaging Gateway ought to do this)\n sleep(@empty_queues_delay) if @empty_queues_delay && @empty_queues_delay > 0\n return nil\n end", "def each\n while message = read_message\n yield message\n end\n end", "def fetch_messages\n if !@worker_pool.worker_available? && @state.run?\n @worker_available.wait\n end\n return unless @worker_pool.worker_available? && @state.run?\n\n begin\n args = [self.signals_redis_key, self.queue_redis_keys.shuffle, 0].flatten\n redis_key, encoded_payload = @client.block_dequeue(*args)\n if redis_key != @signals_redis_key\n @worker_pool.push(QueueItem.new(redis_key, encoded_payload))\n end\n rescue RuntimeError => exception\n log \"Error occurred while dequeueing\", :error\n log \"#{exception.class}: #{exception.message}\", :error\n (exception.backtrace || []).each{ |l| log(l, :error) }\n sleep 1\n end\n end", "def dequeue_each(&block)\n until @messages.empty?\n message = @messages.first\n\n yield message\n\n @size -= 1\n @bytesize -= message.bytesize\n @messages.shift\n end\n end", "def consume(&handler)\n while running?\n begin\n messages = handle_messages(&handler)\n sleep (Chore.config.consumer_sleep_interval) if messages.empty?\n rescue Aws::SQS::Errors::NonExistentQueue => e\n Chore.logger.error \"You specified a queue '#{queue_name}' that does not exist. You must create the queue before starting Chore. Shutting down...\"\n raise Chore::TerribleMistake\n rescue => e\n Chore.logger.error { \"SQSConsumer#Consume: #{e.inspect} #{e.backtrace * \"\\n\"}\" }\n end\n end\n end", "def receive(&block)\n print \"Receiving messages\"\n raise \"Missing mandatory block\" unless block\n begin\n while reply = @consumer.receive\n block.call(reply)\n reply.acknowledge\n end\n rescue Exception => exc\n p exc\n end\n end", "def next_message_from(queue)\n delivery_info, properties, body = queue.pop\n return nil if body.nil?\n Bunny::Message.new(delivery_info, properties, body)\n end", "def sharedque_sub(&blk)\n puts \"here sharedque_sub is called!\"\n consumer1=AMQP::Consumer.new(@sharedque_channel,@sharedque_queue) \n consumer1.consume.on_delivery do |metadata,payload| \n metadata.ack\n puts \"Received a message: #{payload}. \"\n blk.call(payload)\n end\n end", "def request_loop\n @request_queue.receive_messages.each do |msg|\n yield(get_request(msg))\n end\n end", "def fetch_loop\n send(\n @route.batch_mode ? :consume_each_batch : :consume_each_message\n ) do |message|\n yield(message)\n end\n end", "def each\n fail 'a block is required' unless block_given?\n until @client.llen(@queue_name) <= 0\n job = pop\n break if job.nil?\n Proc.new.call(job)\n end\n end", "def process_next_message\n @subscriptions.each_pair do |channel, session|\n queue = session[:queue]\n message = next_message_from(queue)\n\n process(channel, message) if message\n end\n end", "def receive_messages_from_subscribed_topics \n\t\t\tif block_given?\n\t\t\t\t\t#@client.get do |topic, message|\n\t\t\t\t\tget do |topic, message|\t\n\t\t\t\t\t\tyield(topic, message)\n\t\t\t\t\tend\n\t\t\telse \n\t\t\t\t#topic, message = @client.get\n\t\t\t\ttopic, message = get\n\t\t\t\treturn topic, message\n\t\t\tend\n\t\tend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
GET /heights GET /heights.json
def index @heights = Height.all respond_to do |format| format.html # index.html.erb format.json { render json: @heights } end end
[ "def show\n @height = Height.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @height }\n end\n end", "def index\n @heights = Height.all\n end", "def getFullHeightFromInfoJson (url)\n data = open(url).read\n results = JSON.parse(data)\n puts results[\"height\"]\n return results[\"height\"]\nend", "def blocks_get_height\n height = @client.query_get \"blocks/getHeight\"\n end", "def index\n @actor_heights = ActorHeight.all\n end", "def heights\n object.heights.split(\",\").map(&:to_f)\n end", "def parse_heights\n \n begin\n @heights = YAML.load cached_heights\n rescue\n end\n \n end", "def show\n @height = Height.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @height }\n end\n end", "def new\n @height = Height.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @height }\n end\n end", "def height(latitude, longitude)\n get_lookup(:height).search(latitude, longitude)\n end", "def create\n @height = Height.new(height_params)\n\n respond_to do |format|\n if @height.save\n format.html { redirect_to @height, notice: 'Height was successfully created.' }\n format.json { render :show, status: :created, location: @height }\n else\n format.html { render :new }\n format.json { render json: @height.errors, status: :unprocessable_entity }\n end\n end\n end", "def height\n\t\treturn @api[\"height\"]\n\tend", "def destroy\n @height = Height.find(params[:id])\n @height.destroy\n\n respond_to do |format|\n format.html { redirect_to admin_heights_url }\n format.json { head :ok }\n end\n end", "def index\n @height_searches = HeightSearch.all\n end", "def index\n @attack_heights = AttackHeight.all\n end", "def height()\n\t\t@pokemon_api[\"height\"]\n\tend", "def destroy\n @height = Height.find(params[:id])\n @height.destroy\n\n respond_to do |format|\n format.html { redirect_to(heights_url) }\n format.xml { head :ok }\n end\n end", "def index\n @heightweights = Heightweight.all(:order=>'name')\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @heightweights }\n end\n end", "def update\n respond_to do |format|\n if @height.update(height_params)\n format.html { redirect_to @height, notice: 'Height was successfully updated.' }\n format.json { render :show, status: :ok, location: @height }\n else\n format.html { render :edit }\n format.json { render json: @height.errors, status: :unprocessable_entity }\n end\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
GET /heights/1 GET /heights/1.json
def show @height = Height.find(params[:id]) respond_to do |format| format.html # show.html.erb format.json { render json: @height } end end
[ "def index\n @heights = Height.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @heights }\n end\n end", "def index\n @heights = Height.all\n end", "def getFullHeightFromInfoJson (url)\n data = open(url).read\n results = JSON.parse(data)\n puts results[\"height\"]\n return results[\"height\"]\nend", "def new\n @height = Height.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @height }\n end\n end", "def show\n @height = Height.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @height }\n end\n end", "def blocks_get_height\n height = @client.query_get \"blocks/getHeight\"\n end", "def create\n @height = Height.new(height_params)\n\n respond_to do |format|\n if @height.save\n format.html { redirect_to @height, notice: 'Height was successfully created.' }\n format.json { render :show, status: :created, location: @height }\n else\n format.html { render :new }\n format.json { render json: @height.errors, status: :unprocessable_entity }\n end\n end\n end", "def destroy\n @height = Height.find(params[:id])\n @height.destroy\n\n respond_to do |format|\n format.html { redirect_to admin_heights_url }\n format.json { head :ok }\n end\n end", "def height\n\t\treturn @api[\"height\"]\n\tend", "def update\n respond_to do |format|\n if @height.update(height_params)\n format.html { redirect_to @height, notice: 'Height was successfully updated.' }\n format.json { render :show, status: :ok, location: @height }\n else\n format.html { render :edit }\n format.json { render json: @height.errors, status: :unprocessable_entity }\n end\n end\n end", "def index\n @actor_heights = ActorHeight.all\n end", "def parse_heights\n \n begin\n @heights = YAML.load cached_heights\n rescue\n end\n \n end", "def height()\n\t\t@pokemon_api[\"height\"]\n\tend", "def destroy\n @height = Height.find(params[:id])\n @height.destroy\n\n respond_to do |format|\n format.html { redirect_to(heights_url) }\n format.xml { head :ok }\n end\n end", "def heights\n object.heights.split(\",\").map(&:to_f)\n end", "def create\n @height = Height.new(params[:height])\n\n respond_to do |format|\n if @height.save\n format.html { redirect_to(@height, :notice => 'Height was successfully created.') }\n format.xml { render :xml => @height, :status => :created, :location => @height }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @height.errors, :status => :unprocessable_entity }\n end\n end\n end", "def destroy\n @height.destroy\n respond_to do |format|\n format.html { redirect_to heights_url, notice: 'Height was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def height(latitude, longitude)\n get_lookup(:height).search(latitude, longitude)\n end", "def create\n @height = Height.new(params[:height])\n\n respond_to do |format|\n if @height.save\n format.html { redirect_to [:admin,@height], notice: 'Height was successfully created.' }\n format.json { render json: @height, status: :created, location: @height }\n else\n format.html { render action: \"new\" }\n format.json { render json: @height.errors, status: :unprocessable_entity }\n end\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
GET /heights/new GET /heights/new.json
def new @height = Height.new respond_to do |format| format.html # new.html.erb format.json { render json: @height } end end
[ "def create\n @height = Height.new(height_params)\n\n respond_to do |format|\n if @height.save\n format.html { redirect_to @height, notice: 'Height was successfully created.' }\n format.json { render :show, status: :created, location: @height }\n else\n format.html { render :new }\n format.json { render json: @height.errors, status: :unprocessable_entity }\n end\n end\n end", "def new\n @height = Height.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @height }\n end\n end", "def create\n @height = Height.new(params[:height])\n\n respond_to do |format|\n if @height.save\n format.html { redirect_to(@height, :notice => 'Height was successfully created.') }\n format.xml { render :xml => @height, :status => :created, :location => @height }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @height.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @height = Height.new(params[:height])\n\n respond_to do |format|\n if @height.save\n format.html { redirect_to [:admin,@height], notice: 'Height was successfully created.' }\n format.json { render json: @height, status: :created, location: @height }\n else\n format.html { render action: \"new\" }\n format.json { render json: @height.errors, status: :unprocessable_entity }\n end\n end\n end", "def new\n @sizing = Sizing.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @sizing }\n end\n end", "def new\n @stage_length_weight = StageLengthWeight.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @stage_length_weight }\n end\n end", "def new\n @boat_height = BoatHeight.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @boat_height }\n end\n end", "def new\n @hood = Hood.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @hood }\n end\n end", "def new\n @hoge = Hoge.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @hoge }\n end\n end", "def new\n @life_jacket_age = LifeJacketAge.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @life_jacket_age }\n end\n end", "def new\n @housing = Housing.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @housing }\n end\n end", "def new\n @boot_size = BootSize.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @boot_size }\n end\n end", "def new\n @size = Size.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @size }\n end\n end", "def new\n @age_population = AgePopulation.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @age_population }\n end\n end", "def new\n @cloth = Cloth.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @cloth }\n end\n end", "def new\n @width = Width.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @width }\n end\n end", "def new\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @life_jacket }\n end\n end", "def new\n\n @herb = Herb.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @herb }\n end\n end", "def new\n @neighborhood = Neighborhood.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @neighborhood }\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
POST /heights POST /heights.json
def create @height = Height.new(params[:height]) respond_to do |format| if @height.save format.html { redirect_to [:admin,@height], notice: 'Height was successfully created.' } format.json { render json: @height, status: :created, location: @height } else format.html { render action: "new" } format.json { render json: @height.errors, status: :unprocessable_entity } end end end
[ "def create\n @height = Height.new(height_params)\n\n respond_to do |format|\n if @height.save\n format.html { redirect_to @height, notice: 'Height was successfully created.' }\n format.json { render :show, status: :created, location: @height }\n else\n format.html { render :new }\n format.json { render json: @height.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @height = Height.new(params[:height])\n\n respond_to do |format|\n if @height.save\n format.html { redirect_to(@height, :notice => 'Height was successfully created.') }\n format.xml { render :xml => @height, :status => :created, :location => @height }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @height.errors, :status => :unprocessable_entity }\n end\n end\n end", "def index\n @heights = Height.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @heights }\n end\n end", "def create\n @actor_height = ActorHeight.new(actor_height_params)\n\n respond_to do |format|\n if @actor_height.save\n format.html { redirect_to @actor_height, notice: 'Actor height was successfully created.' }\n format.json { render :show, status: :created, location: @actor_height }\n else\n format.html { render :new }\n format.json { render json: @actor_height.errors, status: :unprocessable_entity }\n end\n end\n end", "def new\n @height = Height.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @height }\n end\n end", "def create\n @attack_height = AttackHeight.new(attack_height_params)\n\n respond_to do |format|\n if @attack_height.save\n format.html { redirect_to @attack_height, notice: 'Attack height was successfully created.' }\n format.json { render action: 'show', status: :created, location: @attack_height }\n else\n format.html { render action: 'new' }\n format.json { render json: @attack_height.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @height_search = HeightSearch.new(height_search_params)\n\n respond_to do |format|\n if @height_search.save\n format.html { redirect_to @height_search, notice: 'Height search was successfully created.' }\n format.json { render :show, status: :created, location: @height_search }\n else\n format.html { render :new }\n format.json { render json: @height_search.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @trailer_height = TrailerHeight.new(trailer_height_params)\n\n respond_to do |format|\n if @trailer_height.save\n format.html { redirect_to trailer_heights_path, notice: 'Trailer height was successfully created.' }\n format.json { render :show, status: :created, location: @trailer_height }\n else\n format.html { render :new }\n format.json { render json: @trailer_height.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @height.update(height_params)\n format.html { redirect_to @height, notice: 'Height was successfully updated.' }\n format.json { render :show, status: :ok, location: @height }\n else\n format.html { render :edit }\n format.json { render json: @height.errors, status: :unprocessable_entity }\n end\n end\n end", "def index\n @heights = Height.all\n end", "def create\n @boat_height = BoatHeight.new(params[:boat_height])\n\n respond_to do |format|\n if @boat_height.save\n format.html { redirect_to(@boat_height, :notice => 'Boat height was successfully created.') }\n format.xml { render :xml => @boat_height, :status => :created, :location => @boat_height }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @boat_height.errors, :status => :unprocessable_entity }\n end\n end\n end", "def destroy\n @height = Height.find(params[:id])\n @height.destroy\n\n respond_to do |format|\n format.html { redirect_to admin_heights_url }\n format.json { head :ok }\n end\n end", "def create\n @heightweight = Heightweight.new(params[:heightweight])\n\n respond_to do |format|\n if @heightweight.save\n format.html { redirect_to(@heightweight, :notice => 'Heightweight was successfully created.') }\n format.xml { render :xml => @heightweight, :status => :created, :location => @heightweight }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @heightweight.errors, :status => :unprocessable_entity }\n end\n end\n end", "def destroy\n @height.destroy\n respond_to do |format|\n format.html { redirect_to heights_url, notice: 'Height was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def create\n megam_rest.post_billedhistories(to_hash)\n end", "def destroy\n @height = Height.find(params[:id])\n @height.destroy\n\n respond_to do |format|\n format.html { redirect_to(heights_url) }\n format.xml { head :ok }\n end\n end", "def show\n @height = Height.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @height }\n end\n end", "def create\n @heightweightvalue = Heightweightvalue.new(params[:heightweightvalue])\n\n respond_to do |format|\n if @heightweightvalue.save\n format.html { redirect_to(@heightweightvalue, :notice => 'Heightweightvalue was successfully created.') }\n format.xml { render :xml => @heightweightvalue, :status => :created, :location => @heightweightvalue }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @heightweightvalue.errors, :status => :unprocessable_entity }\n end\n end\n end", "def new\n @height = Height.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @height }\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
PUT /heights/1 PUT /heights/1.json
def update @height = Height.find(params[:id]) respond_to do |format| if @height.update_attributes(params[:height]) format.html { redirect_to [:admin,@height], notice: 'Height was successfully updated.' } format.json { head :ok } else format.html { render action: "edit" } format.json { render json: @height.errors, status: :unprocessable_entity } end end end
[ "def update\n respond_to do |format|\n if @height.update(height_params)\n format.html { redirect_to @height, notice: 'Height was successfully updated.' }\n format.json { render :show, status: :ok, location: @height }\n else\n format.html { render :edit }\n format.json { render json: @height.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @height = Height.find(params[:id])\n\n respond_to do |format|\n if @height.update_attributes(params[:height])\n format.html { redirect_to(@height, :notice => 'Height was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @height.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @actor_height.update(actor_height_params)\n format.html { redirect_to @actor_height, notice: 'Actor height was successfully updated.' }\n format.json { render :show, status: :ok, location: @actor_height }\n else\n format.html { render :edit }\n format.json { render json: @actor_height.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @boat_height = BoatHeight.find(params[:id])\n\n respond_to do |format|\n if @boat_height.update_attributes(params[:boat_height])\n format.html { redirect_to(@boat_height, :notice => 'Boat height was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @boat_height.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @height = Height.new(height_params)\n\n respond_to do |format|\n if @height.save\n format.html { redirect_to @height, notice: 'Height was successfully created.' }\n format.json { render :show, status: :created, location: @height }\n else\n format.html { render :new }\n format.json { render json: @height.errors, status: :unprocessable_entity }\n end\n end\n end", "def destroy\n @height = Height.find(params[:id])\n @height.destroy\n\n respond_to do |format|\n format.html { redirect_to admin_heights_url }\n format.json { head :ok }\n end\n end", "def update\n respond_to do |format|\n if @attack_height.update(attack_height_params)\n format.html { redirect_to @attack_height, notice: 'Attack height was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @attack_height.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @height = Height.new(params[:height])\n\n respond_to do |format|\n if @height.save\n format.html { redirect_to [:admin,@height], notice: 'Height was successfully created.' }\n format.json { render json: @height, status: :created, location: @height }\n else\n format.html { render action: \"new\" }\n format.json { render json: @height.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @trailer_height.update(trailer_height_params)\n format.html { redirect_to trailer_heights_path, notice: 'Trailer height was successfully updated.' }\n format.json { render :show, status: :ok, location: @trailer_height }\n else\n format.html { render :edit }\n format.json { render json: @trailer_height.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @heightweight = Heightweight.find(params[:id])\n\n respond_to do |format|\n if @heightweight.update_attributes(params[:heightweight])\n format.html { redirect_to(@heightweight, :notice => 'Heightweight was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @heightweight.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @height_search.update(height_search_params)\n format.html { redirect_to @height_search, notice: 'Height search was successfully updated.' }\n format.json { render :show, status: :ok, location: @height_search }\n else\n format.html { render :edit }\n format.json { render json: @height_search.errors, status: :unprocessable_entity }\n end\n end\n end", "def destroy\n @height = Height.find(params[:id])\n @height.destroy\n\n respond_to do |format|\n format.html { redirect_to(heights_url) }\n format.xml { head :ok }\n end\n end", "def create\n @height = Height.new(params[:height])\n\n respond_to do |format|\n if @height.save\n format.html { redirect_to(@height, :notice => 'Height was successfully created.') }\n format.xml { render :xml => @height, :status => :created, :location => @height }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @height.errors, :status => :unprocessable_entity }\n end\n end\n end", "def show\n @height = Height.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @height }\n end\n end", "def height=(height)\n metadata.height = height\n end", "def height=(value)\n @height = value\n end", "def new\n @height = Height.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @height }\n end\n end", "def destroy\n @height.destroy\n respond_to do |format|\n format.html { redirect_to heights_url, notice: 'Height was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def height=(new_height)\n self.dirty = true\n @s3obj.metadata[:imgheight] = new_height\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
DELETE /heights/1 DELETE /heights/1.json
def destroy @height = Height.find(params[:id]) @height.destroy respond_to do |format| format.html { redirect_to admin_heights_url } format.json { head :ok } end end
[ "def destroy\n @height = Height.find(params[:id])\n @height.destroy\n\n respond_to do |format|\n format.html { redirect_to(heights_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @height.destroy\n respond_to do |format|\n format.html { redirect_to heights_url, notice: 'Height was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @boat_height = BoatHeight.find(params[:id])\n @boat_height.destroy\n\n respond_to do |format|\n format.html { redirect_to(boat_heights_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @heightweight = Heightweight.find(params[:id])\n @heightweight.destroy\n\n respond_to do |format|\n format.html { redirect_to(heightweights_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @attack_height.destroy\n respond_to do |format|\n format.html { redirect_to attack_heights_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @actor_height.destroy\n respond_to do |format|\n format.html { redirect_to actor_heights_url, notice: 'Actor height was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @river_height_record.destroy\n respond_to do |format|\n format.html { redirect_to river_height_records_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @height_percentile = HeightPercentile.find(params[:id])\n @height_percentile.destroy\n\n respond_to do |format|\n format.html { redirect_to(height_percentiles_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @height_search.destroy\n respond_to do |format|\n format.html { redirect_to height_searches_url, notice: 'Height search was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @hoge = Hoge.find(params[:id])\n @hoge.destroy\n\n respond_to do |format|\n format.html { redirect_to hoges_url }\n format.json { head :ok }\n end\n end", "def destroy\n @json.destroy\n\n head :no_content\n end", "def destroy\n @dust_bath.destroy\n respond_to do |format|\n format.html { redirect_to dust_baths_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @heightweightvalue = Heightweightvalue.find(params[:id])\n @heightweightvalue.destroy\n\n respond_to do |format|\n format.html { redirect_to(heightweightvalues_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @thickness.destroy\n respond_to do |format|\n format.html { redirect_to thicknesses_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @drumy = Drumy.find(params[:id])\n @drumy.destroy\n\n respond_to do |format|\n format.html { redirect_to drumies_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @trailer_height.destroy\n respond_to do |format|\n format.html { redirect_to trailer_heights_url, notice: 'Trailer height was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @herb = Herb.find(params[:id])\n @herb.destroy\n\n respond_to do |format|\n format.html { redirect_to root_path }\n format.json { head :no_content }\n end\n end", "def destroy\n @bilder = Bilder.find(params[:id])\n @bilder.destroy\n\n respond_to do |format|\n format.html { redirect_to bilders_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @bath.destroy\n respond_to do |format|\n format.html { redirect_to baths_url, notice: 'Bath was successfully destroyed.' }\n format.json { head :no_content }\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
update target craft sync attrs after moving a sync'd craft
def update_sync_targets old_campaign_id, new_campaign_id sync_with_campaigns = sync_targets return if sync_with_campaigns.blank? target_craft = Craft.where(:campaign_id => sync_with_campaigns, :name => self.name) target_craft.each do |c| next if c.sync[:with].blank? c.sync = {:with => c.sync[:with].map{|id| id.eql?(old_campaign_id) ? new_campaign_id : id } } c.save end end
[ "def update_removed_from_list \n if @removed_from_sync_list && !@removed_from_sync_list.blank?\n rem_craft = Craft.where(:name => self.name, :campaign_id => @removed_from_sync_list)\n rem_craft.each do |c|\n c.sync = {:with => c.sync[:with].select{|id| ![self.campaign_id, self.sync[:with]].flatten.include?(id) } }\n c.save\n end\n end\n end", "def update_transfer_player\n if !Layy_Meta.active\n update_transfer_player_mgc_lm\n elsif $game_player.transfer?\n Layy_Meta.check_translation_end(true)\n $game_player.update_lm_position\n $game_player.followers.each {|follower| follower.update_lm_position}\n update_transfer_player_mgc_lm\n end\n end", "def sync_volume_attributes\n step(\" updating volume attributes\")\n end", "def sync\n sync_attributes.each do |field, value|\n set field, value\n end\n end", "def update_attributes_and_sync(attributes)\n if update_attributes(attributes)\n update_salesforce_attributes(attributes)\n end\n end", "def sync_mirrors_with_regulated_box\n if respond_to?(:mirrors) && is_mirrored_with_regulated_box\n mirrors.each { |mirror| mirror.update_attributes(regulated: self.regulated) }\n end\n end", "def sync!(desired)\n # Ensure that the telegraf.conf item is always mounted\n items = [\n {\n key: 'telegraf.conf',\n path: 'telegraf.conf'\n },\n desired\n ].sort_by { |item| item[:path] }\n\n cm_volume = {\n name: @volume,\n configMap: {\n name: @configmap,\n defaultMode: 420,\n items: items\n }\n }\n\n patch = {\n spec: {\n template: {\n spec: {\n volumes: [cm_volume]\n }\n }\n }\n }\n\n @api\n .api('apps/v1')\n .resource('deployments', namespace: @ns)\n .merge_patch(@deployment, patch)\n\n # Refresh the current state\n fetch!\n end", "def sync\n build_fleet\n fetch_units\n true\n end", "def old_sync=(_arg0); end", "def touch_sync\n self.lastSyncAt = Time.now if self.respond_to?(:lastSyncAt=)\n end", "def arm_sync\n val = wordread(:sync_arm)\n val &= ~1 # Turn off desired bit\n wordwrite(:sync_arm, val)\n wordwrite(:sync_arm, val | 1)\n wordwrite(:sync_arm, val)\n end", "def update\n respond_to do |format|\n if @sync.update(sync_params)\n format.html { redirect_to @sync, notice: \"Sync source was successfully updated.\" }\n format.json { render :show, status: :ok, location: @sync }\n else\n format.html { render :edit, status: :unprocessable_entity }\n format.json { render json: @sync.errors, status: :unprocessable_entity }\n end\n end\n end", "def sync=(*) end", "def old_sync; end", "def sync_local_to_magento\n if self.sync_needed\n if !self.magento_id\n API::SOAP::Catalog::ProductAttribute.create(self)\n else\n API::SOAP::Catalog::ProductAttribute.update(self)\n end\n\n self.sync_needed = false\n self.save\n end\n end", "def sync!(source_placement)\n self.sync(source_placement)\n self.save\n end", "def update_asset\n\n Rails.logger.debug \"Updating the recorded location for asset = #{transam_asset.object_key}\"\n\n if transam_asset.location_updates.empty?\n transam_asset.location_id = nil\n else\n event = transam_asset.location_updates.last\n transam_asset.location_id = event.parent_id\n end\n # save changes to this asset\n transam_asset.save\n end", "def gen_sw_sync\n val = wordread(:sync_arm)\n val &= ~0x10 # Turn off desired bit\n wordwrite(:sync_arm, val)\n wordwrite(:sync_arm, val | 0x10)\n wordwrite(:sync_arm, val)\n end", "def update_target_cursor\n @cursor.update_target(@active_battler, @using_skill)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
takes either a Campaign object or an id for a campaign and adds it to the crafts sync[:with] list
def sync_with campaign campaign_id = campaign.id if campaign.is_a?(Campaign) campaign_id ||= campaign sync_list = self.sync sync_list[:with] ||= [] sync_list[:with] << campaign_id self.sync = sync_list self.save end
[ "def update_sync_targets old_campaign_id, new_campaign_id\n sync_with_campaigns = sync_targets\n return if sync_with_campaigns.blank?\n\n target_craft = Craft.where(:campaign_id => sync_with_campaigns, :name => self.name)\n target_craft.each do |c|\n next if c.sync[:with].blank? \n c.sync = {:with => c.sync[:with].map{|id| id.eql?(old_campaign_id) ? new_campaign_id : id } }\n c.save\n end\n end", "def update_removed_from_list \n if @removed_from_sync_list && !@removed_from_sync_list.blank?\n rem_craft = Craft.where(:name => self.name, :campaign_id => @removed_from_sync_list)\n rem_craft.each do |c|\n c.sync = {:with => c.sync[:with].select{|id| ![self.campaign_id, self.sync[:with]].flatten.include?(id) } }\n c.save\n end\n end\n end", "def add_to_campaign(contact_id, campaign_id, additional_fields = {})\n cm = additional_fields\n cm['CampaignId'] = campaign_id\n\n if cm['CampaignId']\n cm['ContactId'] = contact_id\n\n begin\n cm = client.create('CampaignMember', cm)\n Rails.cache.delete(\"salesforce/user_campaigns/#{contact_id}\")\n return cm['Id']\n rescue Databasedotcom::SalesForceError => e\n # If this failure happens, it is almost certainly just because they\n # are already in the campaign \n # logger.debug \"#{e} #{contact_id} #{campaign_id}\"\n end\n end\n\n nil\n end", "def add_ids_to_campaignmember(obj,instance_url,access_token)\n json_payload = nil\n campaign_id = obj[\"event\"][\"id\"]\n contact_email = obj[\"profile\"][\"email\"]\n contact_fn = escape_characters(obj[\"profile\"][\"first_name\"])\n contact_ln = escape_characters(obj[\"profile\"][\"last_name\"])\n contact_email = obj[\"order\"][\"email\"] if contact_email.nil?\n contact_email = escape_characters(contact_email)\n checked_in = nil\n checked_in = \"Responded\" if obj[\"checked_in\"]\n campaign_search_string =\n url_encode(\n \"FIND {#{campaign_id}}\" \\\n \" IN ALL FIELDS\" \\\n \" RETURNING Campaign(Id)\")\n contact_search_string =\n url_encode(\n \"FIND {#{contact_fn}\" \\\n \" AND #{contact_ln}\" \\\n \" AND #{contact_email}}\" \\\n \" IN ALL FIELDS\" \\\n \" RETURNING Contact(Id)\")\n campaign_base_uri = \"#{instance_url}/services/data/v29.0/search/?q=#{campaign_search_string}\"\n begin\n campaign_query_response = rest_call(\"get\",campaign_base_uri,json_payload,access_token)\n @json_campaign = JSON.parse(campaign_query_response)[0]\n end until !@json_campaign.nil?\n contact_base_uri = \"#{instance_url}/services/data/v29.0/search/?q=#{contact_search_string}\"\n contact_query_response = rest_call(\"get\",contact_base_uri,json_payload,access_token)\n json_contact = JSON.parse(contact_query_response)[0]\n unless json_contact.nil?\n obj.store(\"ContactId\",json_contact[\"Id\"])\n obj.store(\"CampaignId\",@json_campaign[\"Id\"])\n obj.store(\"Status\",checked_in) unless checked_in.nil?\n else\n obj = nil\n end\n return obj\n end", "def campaigns_add name:\n call_adglare_api 'campaigns_add', {name: name}\n end", "def sync_to_chimp\n Chimpactions.available_lists.each do |l|\n add_to(l)\n end\n end", "def add_campaign_id(campaign_id)\n fail(Mailgun::ParameterError, 'Too many campaigns added to message.', campaign_id) if @counters[:attributes][:campaign_id] >= Mailgun::Chains::MAX_CAMPAIGN_IDS\n\n set_multi_complex('o:campaign', campaign_id)\n @counters[:attributes][:campaign_id] += 1\n end", "def assign_campaigns\n # iterate through the matching campaigns and determine whether their requirements have been met\n matching_campaigns.each do |campaign|\n campaign.actions << self\n # associate the campaign with the user if all of the campaign requirements have been met\n if campaign.requirements_met_by_individual?(user) && !user.campaigns.include?(campaign)\n user.campaigns << campaign\n end\n end\n end", "def add_campaign(payload)\n post(url_(\"campaign\"), payload)\n end", "def create\n return false unless self.valid? \n \n # set defaults for budget for campaign.create only\n self.budget = budget.reverse_merge( period: 'DAILY', delivery_method: 'STANDARD' )\n\n # create basic campaign attributes\n operand = Hash[\n [ :name, :status, :start_date, :end_date,\n :budget, :bidding_strategy, :network_setting, :settings ].map do |k|\n [ k.to_sym, self.send(k) ] if self.send(k)\n end.compact\n ]\n\n # set default values for settings (for create only - should we set it also for update?)\n # PS: KeywordMatchSetting is required since 201206\n operand[:settings] ||= []\n unless operand[:settings].map { |s| s[:xsi_type] }.include?('KeywordMatchSetting')\n operand[:settings] << { :xsi_type => 'KeywordMatchSetting', :opt_in => false }\n end\n\n response = self.mutate( \n operator: 'ADD', \n operand: operand\n )\n\n check_for_errors(self)\n\n self.id = response[:value].first[:id] rescue nil\n \n if criteria && criteria.size > 0\n new_criteria = Adapi::CampaignCriterion.create(\n campaign_id: @id,\n criteria: criteria\n )\n\n check_for_errors(new_criteria)\n end\n\n ad_groups.each do |ad_group_data|\n ad_group = Adapi::AdGroup.create(\n ad_group_data.merge( campaign_id: @id )\n )\n\n check_for_errors(ad_group, :prefix => \"AdGroup \\\"#{ad_group[:id] || ad_group[:name]}\\\"\")\n end\n\n self.errors.empty?\n\n rescue CampaignError => e\n false\n end", "def record_converted_on_salesforce(contact_id)\n self.salesforce_id = contact_id\n self.is_converted_on_salesforce = true\n self.save!\n\n self.auto_add_to_salesforce_campaign\n self.create_mailchimp\n end", "def sync!\n raise 'You cannot sync!' unless syncable?\n @api_key = account.mailchimp.api_key\n @list_id = account.mailchimp.list_id\n account.mailchimp.sync_from_wundercoach_to_mailchimp\n # sync should complete before making segment,\n # otherwise we might get a mailchimp error\n # in future versions, this should be handled by checking the batch status\n # and running the sync via delayed_job\n sleep(5)\n segment_id = create_segment!\n create_campaign! segment_id\n end", "def campaign(id)\n make_json_api_request :get, \"v2/#{account_id}/campaigns/#{id}\"\n end", "def add_campaign(options = {})\n response = connection.post do |req|\n req.url \"campaigns/add\", options\n end\n return_error_or_body(response, response.body.response.campaign)\n end", "def update_campaign(access_token, campaign)\n url = Util::Config.get('endpoints.base_url') +\n sprintf(Util::Config.get('endpoints.campaign'), campaign.id)\n url = build_url(url)\n payload = campaign.to_json\n response = RestClient.put(url, payload, get_headers(access_token))\n Components::Campaign.create(JSON.parse(response.body))\n end", "def test_add_campaigns\n operations = []\n operations[0] = {\n :operand => {\n :name => 'Interplanetary Cruise #%s' % (Time.new.to_f * 1000).to_i,\n :status => 'PAUSED',\n :biddingStrategy => @campaign_srv.module::ManualCPC.new,\n :budget => {\n :period => 'DAILY',\n :amount => {\n :microAmount => 50000000\n },\n :deliveryMethod => 'STANDARD'\n }\n },\n :operator => 'ADD'\n }\n\n operations[1] = {\n :operand => {\n :name => 'Interplanetary Cruise #%s' % (Time.new.to_f * 1000 + 1).to_i,\n :status => 'PAUSED',\n :biddingStrategy => @campaign_srv.module::ManualCPC.new,\n :budget => {\n :period => 'DAILY',\n :amount => {\n :microAmount => 50000000\n },\n :deliveryMethod => 'STANDARD'\n }\n },\n :operator => 'ADD'\n }\n\n # Add ad groups.\n response = @campaign_srv.mutate(operations)\n campaigns = response.rval.value\n assert_equal(2, response.rval.value.size,\n 'Unexpected number of entries returned')\n 0.upto(1) do |index|\n assert_not_nil(campaigns[index], 'Invalid campaign returned')\n assert_added_correctly(operations[index], campaigns[index],\n \"Campaign ##{index + 1}\")\n end\n\n @campaign_1 = campaigns[0]\n @campaign_2 = campaigns[1]\n end", "def opportunity_campaign_select(options = {})\n options[:selected] ||= @opportunity.campaign_id || 0\n selected_campaign = Campaign.find_by_id(options[:selected])\n campaigns = ([selected_campaign] + Campaign.my.order(:name).limit(25)).compact.uniq\n collection_select :opportunity, :campaign_id, campaigns, :id, :name, options,\n \"data-placeholder\": t(:select_a_campaign),\n \"data-url\": auto_complete_campaigns_path(format: 'json'),\n style: \"width:330px; display:none;\",\n class: 'ajax_chosen'\n end", "def set_campaign\n @campaign = Campaign.find(params[:id])\n @campaign.user_id != current_user.id ? not_found : @campaign\n end", "def create\n return false unless self.valid? \n \n operand = Hash[\n [ :name, :status, :start_date, :end_date,\n :budget, :bidding_strategy, :network_setting ].map do |k|\n [ k.to_sym, self.send(k) ] if self.send(k)\n end.compact\n ]\n\n response = self.mutate(\n :operator => 'ADD', \n :operand => operand\n )\n \n return false unless (response and response[:value])\n \n self.id = response[:value].first[:id] rescue nil\n \n # create criteria (former targets) if they are available\n if criteria.size > 0\n criterion = Adapi::CampaignCriterion.create(\n :campaign_id => @id,\n :criteria => criteria\n )\n \n if (criterion.errors.size > 0)\n self.errors.add(\"[campaign criterion]\", criterion.errors.to_a)\n self.rollback\n return false \n end\n end\n\n ad_groups.each do |ad_group_data|\n ad_group = Adapi::AdGroup.create(\n ad_group_data.merge(:campaign_id => @id)\n )\n\n if (ad_group.errors.size > 0)\n self.errors.add(\"[ad group] \\\"#{ad_group.name}\\\"\", ad_group.errors.to_a)\n self.rollback\n return false \n end\n end\n\n return true\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
process the craft which have been removed from sync to no longer point back at this craft instance var is set by sync= being called. this method is called by the synchronize method
def update_removed_from_list if @removed_from_sync_list && !@removed_from_sync_list.blank? rem_craft = Craft.where(:name => self.name, :campaign_id => @removed_from_sync_list) rem_craft.each do |c| c.sync = {:with => c.sync[:with].select{|id| ![self.campaign_id, self.sync[:with]].flatten.include?(id) } } c.save end end end
[ "def remove_car\n\t\t@car = nil\n\t\t@is_slot_free = true\n\tend", "def on_removed_entry(state, event, *event_args)\n super\n\n # TODO: simulation - remove\n if simulating\n __debug_sim(\"[remove_emma_items: #{submission.emma_item}]\")\n emma_items = submission.emma_item\n else\n emma_items = true\n end\n\n # Determine whether this is destined for a member repository.\n unless simulating\n emma_items = Upload.emma_native?(record) if record\n end\n\n ok = nil\n\n if simulating\n if emma_items\n # From UploadController#destroy:\n # @succeeded, @failed = bulk_upload_remove(items)\n __debug_sim('CODE') do\n args = \"items=#{submission.items.inspect}\"\n opt = 'index: false'\n \"@succeeded, @failed = bulk_upload_remove(#{args}, #{opt})\"\n end\n self.succeeded += submission.items\n ok = ready?\n else\n ok = true # TODO: Simulate member repository delete request?\n end\n end\n\n unless simulating\n wf_remove_items(*event_args)\n ok = ready?\n end\n\n # TODO: simulation - remove\n __debug_sim do\n if !emma_items; 'Generating removal request for member repository items.'\n elsif !ok; 'The EMMA-native items NOT removed due to failure(s).'\n else; 'The EMMA-native items were removed.'; end\n end\n\n # Automatically transition to the next state based on submission status.\n if ok\n advance! # NOTE: => :staging\n else\n fail! # NOTE: => :failed\n end\n self\n end", "def cleanup!\n can_remove = true\n self.characters.each do |char|\n if char.fetched_at < self.updated_at\n can_remove = false\n break\n end\n end\n \n if can_remove\n Battle.transaction do\n self.characters.each do |character|\n character.battle = nil if character.battle.present?\n # Revive character if he is dead.\n character.health = character.max_health if character.health <= 0\n character.save\n end\n end\n end\n \n can_remove\n end", "def disband\n\t\t$logger.info \"Disbanding #{ self }\"\n\t\tleader = nil\n\t\t@ants.each do |a|\n\t\t\tif leader.nil?\n\t\t\t\tleader = a\n\t\t\t\ta.clear_orders\n\t\t\t\ta.collective = nil\n\t\t\telse\n\t\t\t\ta.collective = nil\n\t\t\t\ta.remove_target_from_order leader\n\t\t\tend\n\t\tend\n\n\t\t@ants = []\n\n\t\t# Collective doesn't exist any more. Skip any other statements\n\t\t$logger.info \"Doing throw because collective disbanded.\"\n\t\tthrow :done\n\tend", "def verify_craft files = nil, args = {:discover_deleted => false}\n files = self.instance.identify_craft_in(self.name) if files.nil?\n\n present_craft = {:sph => [], :vab => []} \n existing_craft = Craft.where(:campaign_id => self.id).to_a\n\n #this rats nest of chained loops is not really that horrible!\n #it takes the array of craft from the above select and groups them by craft_type. Then for each group it makes an hash of {craft_name => craft}. \n #So it results in a hash of; craft_type => hash of {craft_name => craft}\n existing_craft_map = existing_craft.group_by{|c| c.craft_type}.map{|k,v| {k => v.map{|cc| {cc.name => cc}}.inject{|i,j|i.merge(j)} } }.inject{|i,j|i.merge(j)}\n\n #create a new Craft object for each craft file found, unless a craft object for that craft already exists.\n files.each do |type, craft_files| #files is grouped by craft_type\n craft_files.each do |craft_name| \n name = craft_name.sub(\".craft\",\"\") #get the name of the craft \n #and determine if a craft by that name already exists in that craft_type.\n match = existing_craft_map[type.to_s][name] if existing_craft_map && !existing_craft_map[type.to_s].nil?\n if match.nil?\n craft = self.craft.create(:name => name, :craft_type => type) #if the match is nil, create a Craft object\n self.persistence_checksum = nil #set checksum to nil so next pass of System.process will process this campaign.\n elsif match.deleted?\n match.update_attributes(:deleted => false, :history_count => nil) #if the craft object was marked as deleted, but the file was restored, then un-mark the DB object.\n self.persistence_checksum = nil #set checksum to nil so next pass of System.process will process this campaign.\n end\n present_craft[type] << name #add name to list which is used later to indicate which crafts to NOT mark as deleted \n end\n end\n self.save if self.changed?\n \n #Discover deleted - any craft for which no file exists, but which at one point was in the repo\n if args[:discover_deleted] \n ddc = [] #track to ensure each deleted craft is only processed once (in cases where a craft has been deleted multiple times)\n self.discover_deleted_craft(existing_craft_map).each do |del_inf|\n del_inf[:deleted].each do |craft_data|\n next if ddc.include? [craft_data[:craft_type], craft_data[:name]] #skip if a craft of this craft_type and name has already been processed\n ddc << [craft_data[:craft_type], craft_data[:name]] #otherwise add entry to store \n #and create a craft object for the deleted craft.\n self.craft.create!(:name => craft_data[:name].sub(\".craft\",\"\"), :craft_type => craft_data[:craft_type].downcase, :deleted => true, :last_commit => del_inf[:sha])\n end\n end\n end\n\n #remove craft from the repo if the file no longer exists and mark the craft as deleted\n existing_craft.select{|c| !c.deleted?}.each do |craft|\n next if present_craft[craft.craft_type.to_sym] && present_craft[craft.craft_type.to_sym].include?(craft.name)\n craft.deleted = true #actions in .commit will save this attribute\n craft.commit\n end\n end", "def prepare_update\n # All elder broadcasts are trash...\n tmp = @broadcasts; \n @broadcasts = []\n @to_destroy = tmp.reject {|bc| bc.dirty?}\n\n # Get rid of unsolved, conflicts with unactivated broadcasts\n @conflicts = @conflicts.reject{|c| c.active_broadcast.nil?}\n\n # Clean all unactivated broadcasts from remaining conflicts\n @conflicts.each {|c| c.new_broadcasts = [] }\n\n # unless somebody used them\n tmp.select { |bc| bc.dirty? }.each do |bc|\n self.add_conflict( :conflict => find_or_create_conflict_by_active_broadcast(bc) )\n end\n end", "def unfetch(track_provider)\n if @queue[0] && @queue[1] && @queue[1].owner == track_provider\n Trace.ppq(\"player unfetched by #{track_provider.class.name}\".red)\n @queue.slice!(1, PREFETCH_SIZE) # Remove all entries after the first one\n debug_queue if Cfg.trace_gstqueue\n end\n end", "def springboard_desync_before\n return_authorizations.springboard_synced.each(&:springboard_desync!)\n payments.springboard_synced.each(&:springboard_desync!)\n line_items.springboard_synced.each(&:springboard_desync!)\n child_springboard_resources.each(&:destroy)\n end", "def destroy\n @crafted_item = CraftedItem.cached_crafted_item(params[:id])\n CraftedItem.clear_cached_crafted_item(params[:id])\n CraftedItem.clear_all_cached_crafted_item\n CraftedItem.clear_cached_source_description_for_crafted_item(params[:id])\n CraftedItem.clear_cached_crafted_item_by_component_item_id(@crafted_item.crafted_item_generated_id)\n CraftedItem.clear_cached_crafted_item_count(@crafted_item.crafted_item_generated_id)\n @crafted_item.destroy\n \n respond_to do |format|\n format.html { redirect_to(crafted_items_url) }\n format.xml { head :ok }\n end\n end", "def block_removed(block, data); end", "def cleanup\n # delete participating armies\n self.armies.each do |army|\n if army.empty? && !army.garrison?\n if GAME_SERVER_CONFIG['military_only_flag_destroyed_armies']\n army.removed = true\n unless army.save\n raise InternalServerError.new('Failed to flag an army as removed')\n end\n else\n army.destroy\n end\n end\n end\n\n # destroy appropriate event\n self.event.destroy\n\n # remove battle or set to removed\n if GAME_SERVER_CONFIG['military_only_flag_destroyed_battles']\n self.removed = true\n unless self.save\n raise InternalServerError.new('Failed to flag an battle as removed')\n end\n else\n self.destroy\n end\n end", "def clean\n\t\t\twas_updated = updated?\n\t\t\tunsubscribe\n\t\t\t@registered = false\n\n\t\t\tBridge.notify_bridge_callbacks self, false if was_updated\n\n\t\t\t@verified = false\n\t\t\t@config = nil\n\t\t\t@lights.clear\n\t\t\t@groups.clear\n\t\t\t@update_callbacks.clear\n\t\tend", "def on_removing_entry(state, event, *event_args)\n super\n\n # TODO: simulation - remove\n if simulating\n # From UploadController#delete:\n # ids = Upload.expand_ids(@item_id)\n __debug_sim('CODE') do\n args = \"id=#{submission.id.inspect}\"\n \"@items = Upload.expand_ids(#{args})\"\n end\n submission.items << submission.id\n end\n\n unless simulating\n wf_list_items(*event_args)\n end\n\n # TODO: simulation - remove\n __debug_sim('System shows the list of item(s) to be removed.')\n if submission&.auto_cancel\n __debug_sim('[auto_remove_cancel: true]')\n __debug_sim('USER decides not to delete item(s).')\n cancel! # NOTE: => :canceled\n elsif submission&.auto_submit\n __debug_sim('[auto_remove_submit: true]')\n __debug_sim('USER confirms the intent to delete item(s).')\n submit! # NOTE: => :removed\n else\n __debug_sim('USER must `cancel!` or `submit!` to advance...')\n end\n\n self\n end", "def cleanup\n\t\t@spawners.cleanup\n\tend", "def clean\n # dispose all damage sprites\n @damages.each {|sprite| sprite.dispose if sprite.is_a?(Sprite)}\n # dispose all beam sprites\n @beams.each {|ary| ary[0].dispose}\n # create new damage sprite buffer and new projectile buffer\n @damages, @remotes, @beams = [], [], []\n # unfreeze all actor's actions\n $BlizzABS.battlers.each {|actor| actor.freeze_action = false}\n # clear observation and path request data\n $BlizzABS.AI = BlizzABS::AI.new\n end", "def bookkeeping_before_betting()\n @dealer.reset() ## very important to reset dealer\n @players.delete_if{|player| player.amount <= 0} ## remove broke players\n if @players.size == 0\n puts \"**********WE NEED MORE PLAYERS**************\"\n exit() # exit if no more players left\n end\n @players.each do | player|\n player.reset() ## reset remaining players\n end # end reset\n end", "def cleanup_mini\n #set wait count #clean up routines#\n #@wait_count = 60\n @mini_showing = false\n #if not refreshed, battlers are invisible\n @spriteset.in_mini = false #done with mini scene\n @spriteset.refresh_battlers #refresh to ensure directions are correct\n @spriteset.update\n end", "def return_part2(battler)\n $game_system.bgm_play($game_temp.bgm_memorize)\n hide_all = (Summon_Skill[battler.summoned_id] != nil and Summon_Skill[battler.summoned_id][0][4])\n for actor in battler.summoned_actors\n actor.summon_turn = 0\n actor.summoned_turn = 0\n actor.summoned_id = 0\n $game_party.summoned.delete(actor)\n $game_party.remove_actor(actor.id)\n end\n for actor in battler.removed_actors\n $game_party.add_summon_actor_by_index(actor[0].id, actor[1])\n end\n @spriteset.update\n if $atoa_script['Atoa ATB']\n reset_bars\n elsif $atoa_script['Atoa CTB']\n update_all_ctb\n @ctb_window.refresh \n end\n $game_temp.summon_active = !$game_party.summoned.empty?\n battler.summoned_actors.clear\n battler.removed_actors.clear\n for actor in $game_party.actors\n $game_party.removed_actors.delete(actor)\n unless actor.dead? and not check_include(actor, 'NOCOLLAPSE')\n actor.invisible = false\n end\n end\n for actor in $game_party.actors\n actor.pose_id = 0\n end\n reset_actors_position(hide_all)\n @status_window.refresh\n end_summoning(battler)\n @action_battlers.delete(battler)\n @active_battlers.delete(battler)\n update_summoned_party\n battler.wait_time = 8\n end", "def undo!\n case self.state\n when :auction\n if self.auction.complete?\n false # can't undo if auction is complete yo.\n else\n card = self.auction.calls.pop # remove the last undo\n if card\n true\n else\n false\n end\n end\n when :playing\n # remove the last card from everywhere\n card = self.play.history.pop\n if card\n trick = self.play.get_current_trick\n player = self.play.who_played?(card)\n # this was a completed trick, we need to remove it from the winner queue\n if trick.cards.compact.size == 4\n winner = self.play.who_played?(self.play.winning_card(trick))\n self.play.winners.pop if self.play.winners.last == winner\n end\n self.play.get_current_trick.cards.delete(card)\n self.play.played.each { |k,h| h.delete(card) }\n self.board.deal.hands[player] << card\n true\n else\n false \n end\n else\n false\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
GET /cloud_providers/1 GET /cloud_providers/1.json
def show @cloud_provider = current_user.cloud_providers.find(params[:id]) respond_to do |format| format.html # show.html.erb format.json { render json: @cloud_provider } end end
[ "def new\n @cloud_provider = current_user.cloud_providers.new\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @cloud_provider }\n end\n end", "def show\n @provider = current_company.providers.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @provider }\n end\n end", "def providers\n url = url_with_api_version(@base_url, 'providers')\n resp = rest_get(url)\n JSON.parse(resp.body)[\"value\"]\n end", "def index\n @providers = Provider.all\n\n render json: @providers\n end", "def providers(params = {})\n response = default_scope.get('providers/') do |request|\n request.params = params\n end\n JSON.parse(response.body)\n end", "def show\n @cloud = Cloud.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @cloud }\n end\n end", "def show\n @c_provider = CProvider.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @c_provider }\n end\n end", "def fetch_cloud\n cloud = Cloud.find(params[:cloud_id])\n authorize cloud, :show?\n return cloud\n end", "def show\n @cloud = Cloud.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @cloud }\n end\n end", "def new\n @provider = current_company.providers.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @provider }\n end\n end", "def providers\n filter = params.except :utf8, :commit, :action, :controller\n if filter.empty?\n render :json => Provider.all.order(:name)\n else\n geo_ids, services = get_queries filter\n providers = Provider.where(:id => geo_ids).where(services).order(\"name\") \n render :json => providers\n end\n end", "def index\n @providerservices = Providerservice.all\n end", "def show\n @cloud_account = CloudAccount.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @cloud_account }\n end\n end", "def index\n @v1_provider_operations = V1::ProviderOperation.all\n end", "def list_providers(limit=0, currency='MXN')\n uri = @client.deploy_uri+'providers/'\n\n if limit > 0\n uri = uri+'?order_total='+limit.to_s\n end\n\n if limit > 0 && !currency.nil? && currency != 'MXN'\n uri = uri+'&currency='+currency\n end\n\n response = EasyRequest::get(uri, get_auth())\n\n Factory::get_instance_of 'ListProviders', response\n end", "def query_storage_providers\n get_storage_providers\n puts 'The list of Storage Providers in this PowerVC cloud are:'\n headers = ['Storage Providers']\n print_table(headers, @stg_providers_print_list)\n footer\n end", "def retrieve_identity_providers()\n start.uri('/api/identity-provider')\n .get()\n .go()\n end", "def list_providers(params = nil, headers = nil)\n get(\"/api/v1/providers\", params, headers)\n end", "def retrieve_identity_providers()\n start.uri('/api/identity-provider')\n .get()\n .go()\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
GET /cloud_providers/new GET /cloud_providers/new.json
def new @cloud_provider = current_user.cloud_providers.new respond_to do |format| format.html # new.html.erb format.json { render json: @cloud_provider } end end
[ "def new\n @provider = current_company.providers.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @provider }\n end\n end", "def new\n @title = t('view.providers.new_title')\n @provider = Provider.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @provider }\n end\n end", "def new\n @provider = Provider.new\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @provider }\n end\n end", "def provider_new\n @page_title = _('New_provider')\n @page_icon = \"add.png\"\n @provider, @tariffs, @servers = Provider.provider_new(session)\n\n if not @tariffs\n flash[:notice] = _('No_tariffs_available')\n redirect_to :action => 'list'\n end\n end", "def new\n @cloud = Cloud.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @cloud }\n end\n end", "def new\n @cloud = Cloud.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @cloud }\n end\n end", "def create\n @provider = current_company.providers.new(params[:provider])\n\n respond_to do |format|\n if @provider.save\n format.html { redirect_to @provider, notice: 'Provider was successfully created.' }\n format.json { render json: @provider, status: :created, location: @provider }\n else\n format.html { render action: \"new\" }\n format.json { render json: @provider.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @provider = Provider.new(provider_params)\n\n respond_to do |format|\n if @provider.save\n format.html { redirect_to providers_path, notice: 'Provider was successfully created.' }\n format.json { render :show, status: :created, location: @provider }\n else\n format.html { render :new }\n format.json { render json: @provider.errors, status: :unprocessable_entity }\n end\n end\n end", "def new\n @provider = Provider.new\n respond_to do |format|\n format.json { render json: @provider }\n format.xml { render xml: @provider }\n end\n end", "def create\n @provider = Provider.new(provider_params)\n\n if @provider.save\n render json: @provider, status: :created, location: @provider\n else\n render json: @provider.errors, status: :unprocessable_entity\n end\n end", "def new\n @cloud_account = CloudAccount.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @cloud_account }\n end\n end", "def create\n @info_provider = Info::Provider.new(info_provider_params)\n\n respond_to do |format|\n if @info_provider.save\n format.html { redirect_to @info_provider, notice: 'Provider was successfully created.' }\n format.json { render :show, status: :created, location: @info_provider }\n else\n format.html { render :new }\n format.json { render json: @info_provider.errors, status: :unprocessable_entity }\n end\n end\n end", "def new\n @cloud_record = CloudRecord.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @cloud_record }\n end\n end", "def new\n @cloudpost = Cloudpost.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @cloudpost }\n end\n end", "def new\n @data_provider = DataProvider.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @data_provider }\n end\n end", "def new\n @cloud = Cloud.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @cloud }\n end\n end", "def new\n @provider = Provider.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @provider }\n end\n end", "def new\n @platform = Platform.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @platform }\n end\n end", "def new\n @cloudstrgplugin = Cloudstrgplugin.new\n \n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @cloudstrgplugin }\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
PUT /cloud_providers/1 PUT /cloud_providers/1.json
def update @cloud_provider = current_user.cloud_providers.find(params[:id]) respond_to do |format| if @cloud_provider.update_attributes(params[:cloud_provider]) format.html { redirect_to @cloud_provider, notice: 'Cloud provider was successfully updated.' } format.json { head :no_content } else format.html { render action: "edit" } format.json { render json: @cloud_provider.errors, status: :unprocessable_entity } end end end
[ "def update_provider!(provider, provider_info = {}, access = {})\n Api::Request.new do |request|\n request[:access] = access\n request[:method] = :PUT\n request[:path] = \"/mgmt/{{client_id}}/storages/#{provider}\"\n request[:request_body] = provider_info\n end.execute!\n\n end", "def update\n if @provider.update(provider_params)\n head :no_content\n else\n render json: @provider.errors, status: :unprocessable_entity\n end\n end", "def update\n @provider = Provider.find(params[:id])\n\n if @provider.update(provider_params)\n head :no_content\n else\n render json: @provider.errors, status: :unprocessable_entity\n end\n end", "def update\n respond_to do |format|\n if @v1_provider.update(v1_provider_params)\n format.html { redirect_to @v1_provider, notice: 'Provider was successfully updated.' }\n format.json { render :show, status: :ok, location: @v1_provider }\n else\n format.html { render :edit }\n format.json { render json: @v1_provider.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @provider = current_company.providers.find(params[:id])\n\n respond_to do |format|\n if @provider.update_attributes(params[:provider])\n format.html { redirect_to @provider, notice: 'Provider was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @provider.errors, status: :unprocessable_entity }\n end\n end\n end", "def update_provider(org, box, version, provider, url, access_token, options)\n if !url\n @env.ui.warn(I18n.t(\"cloud_command.upload.no_url\"))\n end\n account = VagrantCloud::Account.new(\n custom_server: api_server_url,\n access_token: access_token\n )\n\n with_provider(account: account, org: org, box: box, version: version, provider: provider) do |p|\n p.checksum = options[:checksum] if options.key?(:checksum)\n p.checksum_type = options[:checksum_type] if options.key?(:checksum_type)\n p.url = url if !url.nil?\n p.save\n\n @env.ui.success(I18n.t(\"cloud_command.provider.update_success\",\n provider: provider, org: org, box_name: box, version: version))\n\n format_box_results(p, @env)\n 0\n end\n rescue VagrantCloud::Error => e\n @env.ui.error(I18n.t(\"cloud_command.errors.provider.update_fail\",\n provider: provider, org: org, box_name: box, version: version))\n @env.ui.error(e.message)\n 1\n end", "def update\n respond_to do |format|\n if @v1_provider_operation.update(v1_provider_operation_params)\n format.html { redirect_to @v1_provider_operation, notice: 'Provider operation was successfully updated.' }\n format.json { render :show, status: :ok, location: @v1_provider_operation }\n else\n format.html { render :edit }\n format.json { render json: @v1_provider_operation.errors, status: :unprocessable_entity }\n end\n end\n end", "def put\n request_method('PUT')\n end", "def update\n \n respond_to do |format|\n if @service_provider.update(service_provider_params)\n format.html { redirect_to @service_provider, notice: 'Service provider was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @service_provider.errors, status: :unprocessable_entity }\n end\n end\n end", "def update_identity_provider(identity_provider_id, request)\n start.uri('/api/identity-provider')\n .url_segment(identity_provider_id)\n .body_handler(FusionAuth::JSONBodyHandler.new(request))\n .put()\n .go()\n end", "def update\n respond_to do |format|\n if @service_provider.update(service_provider_params)\n format.html { redirect_to @service_provider, notice: \"Service provider was successfully updated.\" }\n format.json { render :show, status: :ok, location: @service_provider }\n else\n format.html { render :edit, status: :unprocessable_entity }\n format.json { render json: @service_provider.errors, status: :unprocessable_entity }\n end\n end\n end", "def new\n @cloud_provider = current_user.cloud_providers.new\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @cloud_provider }\n end\n end", "def update_identity_provider(identity_provider_id, request)\n start.uri('/api/identity-provider')\n .url_segment(identity_provider_id)\n .body_handler(FusionAuth::JSONBodyHandler.new(request))\n .put()\n .go()\n end", "def update\n @provider = Provider.find(params[:id])\n\n respond_to do |format|\n if @provider.update_attributes(params[:provider])\n format.html { redirect_to(@provider, :notice => 'Provider was successfully updated.') }\n format.xml { head :ok }\n format.json { render :json => { :resp=> \"ok\" }}\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @provider.errors, :status => :unprocessable_entity }\n format.json { render :json => { :resp=> \"error\" }}\n end\n end\n end", "def update\n respond_to do |format|\n if @sprovider.update(sprovider_params)\n format.html { redirect_to @sprovider, notice: 'Sprovider was successfully updated.' }\n format.json { render :show, status: :ok, location: @sprovider }\n else\n format.html { render :edit }\n format.json { render json: @sprovider.errors, status: :unprocessable_entity }\n end\n end\n end", "def put *args\n make_request :put, *args\n end", "def update_tenant_circle(args = {}) \n put(\"/tenantcircles.json/#{args[:circleId]}\", args)\nend", "def update\n @cloud = Cloud.find(params[:id])\n\n respond_to do |format|\n if @cloud.update_attributes(params[:cloud])\n format.html { redirect_to @cloud, :notice => 'Cloud was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @cloud.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @info_provider.update(info_provider_params)\n format.html { redirect_to @info_provider, notice: 'Provider was successfully updated.' }\n format.json { render :show, status: :ok, location: @info_provider }\n else\n format.html { render :edit }\n format.json { render json: @info_provider.errors, status: :unprocessable_entity }\n end\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
DELETE /cloud_providers/1 DELETE /cloud_providers/1.json
def destroy @cloud_provider = current_user.cloud_providers.find(params[:id]) @cloud_provider.destroy respond_to do |format| format.html { redirect_to cloud_providers_url } format.json { head :no_content } end end
[ "def destroy\n @v1_provider.destroy\n respond_to do |format|\n format.html { redirect_to v1_providers_url, notice: 'Provider was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @provider.destroy\n respond_to do |format|\n format.html { redirect_to providers_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @cloud = Cloud.find(params[:id])\n @cloud.destroy\n\n respond_to do |format|\n format.html { redirect_to clouds_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @provider = Provider.find(params[:id])\n @provider.destroy\n\n respond_to do |format|\n format.html { redirect_to providers_url }\n format.json { head :ok }\n end\n end", "def destroy\n @v1_provider_operation.destroy\n respond_to do |format|\n format.html { redirect_to v1_provider_operations_url, notice: 'Provider operation was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @service_provider.destroy\n respond_to do |format|\n format.html { redirect_to service_providers_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @c_provider = CProvider.find(params[:id])\n @c_provider.destroy\n\n respond_to do |format|\n format.html { redirect_to c_providers_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @service_provider.destroy\n respond_to do |format|\n format.html { redirect_to service_providers_url, notice: \"Service provider was successfully destroyed.\" }\n format.json { head :no_content }\n end\n end", "def delete_tenant_circle(args = {}) \n delete(\"/tenantcircles.json/#{args[:circleId]}\", args)\nend", "def destroy\n @data_provider = DataProvider.find(params[:id])\n @data_provider.destroy\n\n respond_to do |format|\n format.html { redirect_to data_providers_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @provider_service.destroy\n respond_to do |format|\n format.html { redirect_to provider_services_url, notice: 'Provider service was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @healthcare_provider.destroy\n respond_to do |format|\n format.html { redirect_to healthcare_providers_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @auth_provider.destroy\n respond_to do |format|\n format.html { redirect_to auth_providers_url, notice: 'Auth provider was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @storage = @client.storages.find(params[:id])\n @storage.destroy\n\n respond_to do |format|\n format.html { redirect_to client_url(@client) }\n format.json { head :no_content }\n end\n end", "def destroy\n @providerservice.destroy\n respond_to do |format|\n format.html { redirect_to providerservices_url, notice: 'Providerservice was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @sprovider.destroy\n respond_to do |format|\n format.html { redirect_to sproviders_url, notice: 'Sprovider was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @prize_provider.destroy\n respond_to do |format|\n format.html { redirect_to prize_providers_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @cloud_account = CloudAccount.find(params[:id])\n @cloud_account.destroy\n\n respond_to do |format|\n format.html { redirect_to cloud_accounts_url }\n format.json { head :ok }\n end\n end", "def destroy\n onevnet \"delete\", resource[:name]\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
If the request passed a callback, answer in the appropiate way. Or else, answer normally.
def callback_response(callback,response) if callback.nil? response else callback + "(" + response + ")" end end
[ "def dispatch_http_response(res, options, &block)\n callback = options.fetch('callback', nil)\n (res.blank? && callback.present?) ? callback.call(res) : block.call(res)\nend", "def response_callback(&block)\n @response_callback = block if block_given?\n \n @response_callback\n end", "def hookup_request_callbacks(req, errback, &blk)\n req.callback &blk\n req.callback { stop }\n\n req.errback &errback if errback\n req.errback { stop }\n end", "def dispatch_http_response(res, options)\n callback = options.fetch('callback', nil)\n res.blank? && callback.present? ? callback.call(res, nil) : yield(res)\n end", "def call_ok_handler; call_handler(:ok); end", "def handle_request( request, &block )\n\t\tif block\n\t\t\treturn block.call( request )\n\t\telse\n\t\t\treturn request.response\n\t\tend\n\tend", "def handle_callback_response(authenticator, callback)\n @response = authenticator.send callback\n debug \"Callback received and return #{@response.inspect}\"\n end", "def register_request_callback; end", "def request_handler(request_data)\n # assume request_data is a single line with a possible newline trailing.\n request = JSON.load(request_data.chomp)\n if 2 == request.keys.size && request.has_key?(LAST_EXIT_CODE_KEY) && request.has_key?(LAST_ERROR_MESSAGE_KEY)\n # pop the next action from the queue.\n command = @callback.call(:respond, request)\n return JSON.dump(NEXT_ACTION_KEY => command) + \"\\n\";\n end\n raise ArgumentError, \"Invalid request\"\n rescue Exception => e\n return JSON.dump(:Error => \"#{e.class}: #{e.message}\", :Detail => e.backtrace.join(\"\\n\")) + \"\\n\"\n end", "def response_proc(code=nil,&blk)\n Proc.new do | message |\n if code.nil? or code.empty? or code == message\n @logger.debug \"Calling proc. code: #{code} message: #{message}\"\n blk.call(message)\n end\n end\n end", "def call_ok_handler\n call_handler(:ok)\n end", "def callback\n unless params[:callback].blank?\n redirect_to params[:callback] \n return false\n else\n return true\n end\n end", "def proxy_callback(callback, *args)\n # Log inputs and attempt\n resource.response.trace << attempt(callback, args)\n # Do the call\n _result = resource.send(callback, *args)\n add_dynamic_callback_proxies(_result) if CALLBACK_REFERRERS.include?(callback.to_sym)\n resource.response.trace << result(_result)\n _result\n rescue => exc\n exc.backtrace.reject! { |s| s.include?(__FILE__) }\n resource.response.trace << exception(exc)\n raise\n end", "def call(request, _next)\n @block ? @block.call(request, _next) : _next.call\n end", "def call(request, _next); end", "def execute_request(default=nil, timeout=nil, &request)\n timeout ||= 30\n raise \"No block provided\" unless request\n response = nil\n begin\n Timeout::timeout(timeout) do\n response = request.call\n end\n\n rescue Timeout::Error => ex\n Rudy::Huxtable.le \"Timeout (#{timeout}): #{ex.message}!\"\n rescue SocketError => ex\n #Rudy::Huxtable.le ex.message\n #Rudy::Huxtable.le ex.backtrace\n raise SocketError, \"Check your Internets!\" unless Rudy::Huxtable.global.offline\n ensure\n response ||= default\n end\n response\n end", "def callback\n @callback ||= ERB::Util.url_encode(\"#{request.url}&callback\")\n end", "def callback; end", "def on_request &b\n @request_proc = b\n self\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
string ID of automation that created request
def automation_id @attributes[:automation_id] end
[ "def automation_id\n return @automation_id\n end", "def automation_id\n existence_check\n @automation_element.current.automation_id.to_s\n end", "def automation_id=(value)\n @automation_id = value\n end", "def request_id\n SecureRandom.uuid\n end", "def request_id\n @rid ||= Jabber::Generators.request\n end", "def request_id\n SecureRandom.hex(17)\n end", "def request_id=(_); end", "def request_id\n # FIXME make this number configurable and document the probabilities of clashes\n @request_id ||= SecureRandom.hex(DEFAULT_REQUEST_ID_LENGTH)\n end", "def generate_request_identifier\n request_id = \"#{SecureRandom.uuid}#{self.id}#{rand(1000...9999)}\"\n Rails.cache.write(\"request-identifier-#{request_id}\", self.id)\n return request_id\n end", "def extract_request_id(env)\n SecureRandom.uuid\n end", "def get_request_id\n request_id = ''\n @id_lock.synchronize do\n request_id = @@current_request_id += 1\n end\n request_id\n end", "def request_id\n # This one set by Rack::RequestId middleware\n Thread.current[:request_id]\n end", "def id\n config[:id] || Origen.target.name\n end", "def rest_request_id\n (@params[\"request_id\"].to_i - 1000).to_s\n end", "def runtime_id\n Datadog::Core::Environment::Identity.id\n end", "def _appointment_request_id\n appointment_request_id\n end", "def id_for_body\n id = request[:controller].to_s\n \" id=\\\"#{id}\\\"\"\n end", "def request_id=(value)\n @request_id = value\n end", "def last_request_id\n @last_response && @last_response.body.to_s[%r{<requestId>(.+?)</requestId>}] && $1\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Initializes a new Table instance with the specified columns. +columns+:: A list of Column instances representing the columns in the new table.
def initialize(*columns) @columns = columns end
[ "def initialize_columns(columns = {})\n colunms_config = {}\n columns.each do |column, config|\n colunms_config[column.to_sym] = initialize_column column, config\n end\n colunms_config\n end", "def set_columns(new_columns)\n @columns = new_columns\n def_column_accessor(*new_columns) if new_columns\n @str_columns = nil\n @columns\n end", "def create_table(table_name, columns = {})\n @database.create_table?(table_name) do\n columns.each do |column_name, datatype|\n column column_name, datatype\n end\n end\n end", "def initialize(rows, col_names: [], row_names: [], attributes: {}, &block)\n @rows = rows.map(&:to_a)\n @header = col_names.to_a\n row_names = row_names.to_a\n unless row_names.empty?\n row_names.each_with_index do |name, i|\n if i < @rows.size\n @rows[i].prepend(name) \n else\n @rows << [name]\n end\n end\n end\n @header.prepend(\"\") unless row_names.empty? || @header.empty?\n\n @table_attributes = attributes[:table] || {}\n @header_attributes = attributes[:header] || {}\n @header_cell_attributes = attributes[:header_cell] || {}\n @row_attributes = attributes[:row] || {}\n @cell_attributes = attributes[:cell] || {}\n @block = block\n end", "def initialize(columns, **options, &block)\n @columns = columns\n @store = Gtk::ListStore.new(*[String] * columns.length)\n\n super(Gtk::TreeView.new(@store), options, &block)\n\n # Init columns\n columns.each_with_index do |col, index|\n renderer = Gtk::CellRendererText.new\n new_col = Gtk::TreeViewColumn.new(col, renderer, text: index)\n gtk_instance.append_column(new_col)\n end\n\n @rows = Interphase::Helpers::Observable.new([]) { refresh_rows }\n\n refresh_rows\n end", "def initialize(*headers)\n @table = [[]]\n headers.each { |h| @table[0].push h }\n end", "def add_table(rows, columns, options = {})\n @nodes << Table.new(page_config, rows, columns, options)\n end", "def set_initial_columns( columns = nil )\n if columns.nil?\n if @opts[:header] == false\n columns = (0...csv_column_count).map{ |i| :\"col#{i}\" }\n else\n columns = fetch_csv_headers.map{ |name| self.class.getter_name( name ) }\n end\n else\n unless !@csv || columns.length == csv_column_count\n $stderr.puts \"Warning <#{@spreadsheet_file}>: columns array does not match the number of columns in the spreadsheet.\" \n compare_columns_to_headers\n end\n end\n \n for column in columns\n raise \"#{column} is in the list FORBIDDEN_COLUMN_NAMES\" if FORBIDDEN_COLUMN_NAMES.include?(column)\n end\n \n @columns = columns\n end", "def initialize(lines, columns, hypothesis=nil)\n\t\t@lines = lines\n\t\t@columns = columns\n\n\t\tif hypothesis != nil then\n\t\t\t@grid = Array.new(lines) do |j|\n\t\t\t\tArray.new(columns) do |i|\n\t\t\t\t\tCell.new(hypothesis, j, i)\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\tend", "def initialize(delimited, columns: nil, **args)\n @tabular = IOStreams::Tabular.new(columns: columns, **args)\n @delimited = delimited\n\n # Render header line when `columns` is supplied.\n @delimited << @tabular.render_header if columns && @tabular.requires_header?\n end", "def initialize(collection, options = {}, &block)\n puts \"INITIALIZING TABLE with options = #{options.inspect}\"\n @column_names = []\n @columns = {}\n @collection = collection \n @options = options\n yield(self) if block_given?\n end", "def initialize(columns, size_or_array=nil)\n size_or_array ||= columns.size \n\n case size_or_array\n when Integer\n @arr = Array.new(size_or_array)\n when Array\n @arr = size_or_array\n else\n raise TypeError, \"parameter must be either Integer or Array\"\n end\n\n # The '@column_map' is used to map column names to integer values so\n # that users can reference row values by name or number.\n\n @column_map = {}\n @column_names = columns\n columns.each_with_index { |c,i| @column_map[c] = i }\n super(@arr)\n end", "def initialize_table\n @total_rows.times do |row|\n row_array = Array.new(@total_columns, \"\")\n @reconstructed_table.push(row_array)\n end\n end", "def columns=(columns)\n reset_fields\n\n header_labels, fields = break_columns_into_header_and_field_names(columns)\n fields.zip(header_labels) do |field,label|\n add_field( field, :label => label )\n end\n end", "def table(rows, columns, *widths)\n node = TableNode.new(self, rows, columns, *widths)\n yield node if block_given?\n store(node)\n node\n end", "def table(rows, columns, *widths)\n node = TableNode.new(self, rows, columns, *widths)\n yield node if block_given?\n store(node)\n node\n end", "def initialize (row_columns, row_values)\n @hash = {}\n @columns = row_columns\n row_columns.each_with_index do |column_name, index|\n @hash[column_name.to_sym] = row_values[index]\n end\n end", "def define_columns(columns_props_array)\n view.define_columns(columns_props_array)\n\n # Sync again after adding the columns.\n sync_ui_from_model\n end", "def sqlite3_create_tb(table_name, columns, primary_key, if_not_exist)\n if if_not_exist.downcase == \"n\"\n status = @dbm.table_exist?(table_name)\n @assert.table_already_exist(status, table_name, @dbh)\n end \n\n # Retrieve only the column names\n col_names = columns[0].keys\n\n table_spec_str = '('\n # col: Column name\n # columns[0][col][0]: Column type\n # columns[1][col][1]: Column nullable\n col_names.each {|col|\n col_type = columns[0][col][0]\n @assert.check_type(col_type)\n if col == primary_key\n table_spec_str.concat(\"#{col} #{col_type} PRIMARY KEY NOT NULL,\")\n else\n if columns[0][col][1].downcase == \"no\"\n table_spec_str.concat(\"#{col} #{col_type} NOT NULL,\")\n else\n table_spec_str.concat(\"#{col} #{col_type},\")\n end\n end\n }\n table_spec_str.chomp!(',')\n table_spec_str.concat(')')\n \n create_query = \"CREATE TABLE IF NOT EXISTS #{table_name} #{table_spec_str};\"\n @dbh.execute(create_query)\n\n return create_query + \"\\n\"\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sorts the receiver's data in ascending order using the specified sort keys. +sort_keys+:: A list of column IDs to sort on. If table does not have a column with one of the IDs, that ID is ignored.
def sort_data_ascending!(*sort_keys) self.sort_data!(true, sort_keys) end
[ "def sort_data!(is_ascending, sort_keys)\n sort_key_indices = sort_keys.collect { |key| @columns.index(@columns.find { |column| column.id == key }) }.reject { |e| !e }\n\n @data.sort! do |row1, row2|\n comparison_result = 0\n\n sort_key_indices.each do |index|\n comparison_result = @columns[index].data_comparator.call(row1[index], row2[index])\n comparison_result *= -1 if !is_ascending\n break unless comparison_result == 0\n end\n\n comparison_result\n end\n end", "def sort(*keys)\n sort_directions = {}\n asc_regexp = /(.*)_asc$/\n desc_regexp = /(.*)_desc$/\n keys.each_with_index do |key,key_index|\n key_as_string = key.to_s\n if m = asc_regexp.match(key_as_string)\n new_key = m[1].to_sym\n keys[key_index] = new_key\n sort_directions[new_key] = :asc\n elsif m = desc_regexp.match(key_as_string)\n new_key = m[1].to_sym\n keys[key_index] = new_key\n sort_directions[new_key] = :desc\n else\n sort_directions[key] = :asc\n end\n end\n\n key_size = keys.size\n new_rows = rows.sort do |x,y|\n compare = 0\n idx = 0\n while compare == 0 && idx < key_size\n key = keys[idx]\n raise(Plottr::InvalidRowKeyError, \"Unrecognised row key #{key}\") if !x.has_key?(key) || !y.has_key?(key)\n compare = if sort_directions[key] == :asc\n (x[key] <=> y[key])\n else\n (y[key] <=> x[key])\n end\n idx += 1\n end\n compare\n end\n new_table = Plottr::Table.new(self.columns)\n new_table.rows = new_rows\n new_table\n end", "def sort\n table_name = @tables.keys.sample\n table = @tables[table_name]\n\n column = table.defs.to_a.sample\n while String == column[1]\n column = table.defs.to_a.sample\n end\n\n column_idx = table.defs.keys.index column[0]\n\n sorted = table.data.sort_by{ |r| r[column_idx] }\n\n assert_exec([\"rows\".k, [\"sort-by\".k,\n [\"table\".k, table_name],\n [\"row-lambda\".k, column[0].k]]],\n sorted)\n end", "def sort\n return unless @model\n return if @sort_keys.empty?\n $log.debug \"TABULAR SORT KEYS #{sort_keys} \"\n @model.sort!{|x,y| \n res = 0\n @sort_keys.each { |ee| \n e = ee.abs-1 # since we had offsetted by 1 earlier\n abse = e.abs\n if ee < 0\n res = y[abse] <=> x[abse]\n else\n res = x[e] <=> y[e]\n end\n break if res != 0\n }\n res\n }\n end", "def sort_data\n sort_cols = @columns.select{ |col| col.sort_order }.sort_by{ |col| col.sort_order }\n if sort_cols.length > 0\n log.fine \"Sorting by #{sort_cols.map(&:header).to_sentence}\"\n @data.sort! do |a, b|\n ord = nil\n sort_cols.each do |col|\n if col.sort_ascending\n a_val = a[col.column_index]\n b_val = b[col.column_index]\n else\n a_val = b[col.column_index]\n b_val = a[col.column_index]\n end\n ord = a_val <=> b_val || (a_val && 1) || (b_val && -1) || 0\n break unless ord == 0\n end\n ord\n end\n end\n end", "def sort_keys\n sort_key_column_names = if can_optimise_for_different_table?\n [foreign_key_column_name_to_optimised_table]\n else\n [primary_key_column_name].compact\n end\n\n sort_key_column_names + DEFAULT_SORT_KEYS.select { |column_name| table_includes_column?(column_name) }\n end", "def order_by(*sort_heads)\n # Sort the rows in order and add to new_rows.\n key_hash = partition_sort_keys(sort_heads)\n new_rows = rows.sort do |r1, r2|\n # Set the sort keys based on direction\n key1 = []\n key2 = []\n key_hash.each_pair do |h, dir|\n if dir == :forward\n key1 << r1[h]\n key2 << r2[h]\n else\n key1 << r2[h]\n key2 << r1[h]\n end\n end\n # Make any booleans comparable with <=>\n key1 = key1.map_booleans\n key2 = key2.map_booleans\n\n # If there are any nils, <=> will return nil, and we have to use the\n # special comparison method, compare_with_nils, instead.\n result = (key1 <=> key2)\n result.nil? ? compare_with_nils(key1, key2) : result\n end\n\n # Add the new_rows to the table, but mark a group boundary at the points\n # where the sort key changes value. NB: I use self.class.new here\n # rather than Table.new because if this class is inherited, I want the\n # new_tab to be an instance of the subclass. With Table.new, this\n # method's result will be an instance of FatTable::Table rather than of\n # the subclass.\n new_tab = empty_dup\n last_key = nil\n new_rows.each_with_index do |nrow, k|\n new_tab << nrow\n key = nrow.fetch_values(*key_hash.keys)\n new_tab.mark_boundary(k - 1) if last_key && key != last_key\n last_key = key\n end\n new_tab.normalize_boundaries\n new_tab\n end", "def sort_by_key!\n sorted = @data.sort!{|a, b| a.first <=> b.first}\n return self\n end", "def sort_data_descending!(*sort_keys)\n self.sort_data!(false, sort_keys)\n end", "def sort_records!(recs)\n recs.sort_by! { |rec| sort_keys(rec) }\n rescue\n recs.sort_by! { |rec| sort_keys(rec, false) }\n end", "def sortable(*attr_names)\r\n attr_names.each do |attr_name|\r\n scope \"sort_#{attr_name}\", -> ordering { order(\"#{table_name}.#{attr_name} #{ordering}\") }\r\n end\r\n end", "def uid_sort(sort_keys, search_keys, charset); end", "def sort(key, opts={})\n cmd = \"SORT #{key}\"\n cmd << \" BY #{opts[:by]}\" if opts[:by]\n cmd << \" GET #{opts[:get]}\" if opts[:get]\n cmd << \" INCR #{opts[:incr]}\" if opts[:incr]\n cmd << \" DEL #{opts[:del]}\" if opts[:del]\n cmd << \" DECR #{opts[:decr]}\" if opts[:decr]\n cmd << \" #{opts[:order]}\" if opts[:order]\n cmd << \" LIMIT #{opts[:limit].join(' ')}\" if opts[:limit]\n cmd << \"\\r\\n\"\n write cmd\n multi_bulk_reply\n end", "def ascending(*fields)\n sort_with_list(*fields, 1)\n end", "def sort_rows(collection, sort_by, sort_direction)\n return collection if sort_by.blank?\n\n sort_by.split(\",\").each do |sort_attribute|\n sort_attribute_symbol = sort_attribute.to_sym\n collection.sort! { |x, y| (x.send(sort_attribute_symbol) || \"\") <=> (y.send(sort_attribute_symbol) || \"\") }\n end\n\n collection.reverse! if sort_direction == \"desc\"\n collection\n end", "def sort_headers(columns)\n\t\tfront_headers = ['id']\n\t\tback_headers = ['created_at', 'updated_at']\n\t\texcluded_headers = []\n\n\t\tsorted = []\n\t\tfront = []\n\t\tback = []\n\t\texcluded = []\n\n\t\tcolumns.each do |column|\n\t\t\tif front_headers.include?(column.name)\n\t\t\t\tfront << column\n\t\t\telsif back_headers.include?(column.name)\n\t\t\t\tback << column\n\t\t\telsif excluded_headers.include?(column.name)\n\t\t\t\texcluded << column\n\t\t\telse\n\t\t\t\tsorted << column\n\t\t\tend\n\t\tend\n\t\tfront + sorted + back\n\tend", "def sort(ids)\n klass = self.content_type.klass_with_custom_fields(:entries)\n klass.sort_entries!(ids, self.content_type.sortable_column)\n\n # we need to clear out the cache\n content_type.site.touch(:content_version)\n\n track_activity 'content_entry.sorted', parameters: activity_parameters\n end", "def order_by(params = [])\n @options[:sort] = params; self\n end", "def order(*columns)\n warn \"DynamoDB only supports order by range_key\" if columns.any?\n\n query\n @options[:scan_index_forward] = true\n self\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sorts the receiver's data in descending order using the specified sort keys. +sort_keys+:: A list of column IDs to sort on. If table does not have a column with one of the IDs, that ID is ignored.
def sort_data_descending!(*sort_keys) self.sort_data!(false, sort_keys) end
[ "def sort_data!(is_ascending, sort_keys)\n sort_key_indices = sort_keys.collect { |key| @columns.index(@columns.find { |column| column.id == key }) }.reject { |e| !e }\n\n @data.sort! do |row1, row2|\n comparison_result = 0\n\n sort_key_indices.each do |index|\n comparison_result = @columns[index].data_comparator.call(row1[index], row2[index])\n comparison_result *= -1 if !is_ascending\n break unless comparison_result == 0\n end\n\n comparison_result\n end\n end", "def descending(*fields)\n sort_with_list(*fields, -1)\n end", "def sort(*keys)\n sort_directions = {}\n asc_regexp = /(.*)_asc$/\n desc_regexp = /(.*)_desc$/\n keys.each_with_index do |key,key_index|\n key_as_string = key.to_s\n if m = asc_regexp.match(key_as_string)\n new_key = m[1].to_sym\n keys[key_index] = new_key\n sort_directions[new_key] = :asc\n elsif m = desc_regexp.match(key_as_string)\n new_key = m[1].to_sym\n keys[key_index] = new_key\n sort_directions[new_key] = :desc\n else\n sort_directions[key] = :asc\n end\n end\n\n key_size = keys.size\n new_rows = rows.sort do |x,y|\n compare = 0\n idx = 0\n while compare == 0 && idx < key_size\n key = keys[idx]\n raise(Plottr::InvalidRowKeyError, \"Unrecognised row key #{key}\") if !x.has_key?(key) || !y.has_key?(key)\n compare = if sort_directions[key] == :asc\n (x[key] <=> y[key])\n else\n (y[key] <=> x[key])\n end\n idx += 1\n end\n compare\n end\n new_table = Plottr::Table.new(self.columns)\n new_table.rows = new_rows\n new_table\n end", "def desc(*columns)\n Lotus::Utils::Kernel.Array(columns).each do |column|\n conditions.push(Proc.new{ sort_by{|r| r.fetch(column)}.reverse })\n end\n\n self\n end", "def order_desc(*columns)\n order(*columns)\n\n @sql += ' DESC'\n\n self\n end", "def descending(*fields)\n clone.tap do |crit|\n crit.options[:sort] = [] unless options[:sort] || fields.first.nil?\n fields.flatten.each { |field| crit.options[:sort] << [ field, :desc ] }\n end\n end", "def desc(*columns)\n Array(columns).each do |column|\n conditions.push([_order_operator, Sequel.desc(column)])\n end\n\n self\n end", "def desc(*columns)\n warn \"DynamoDB only supports order by range_key\" if columns.any?\n\n query\n @options[:scan_index_forward] = false\n self\n end", "def sort\n table_name = @tables.keys.sample\n table = @tables[table_name]\n\n column = table.defs.to_a.sample\n while String == column[1]\n column = table.defs.to_a.sample\n end\n\n column_idx = table.defs.keys.index column[0]\n\n sorted = table.data.sort_by{ |r| r[column_idx] }\n\n assert_exec([\"rows\".k, [\"sort-by\".k,\n [\"table\".k, table_name],\n [\"row-lambda\".k, column[0].k]]],\n sorted)\n end", "def sort_data_ascending!(*sort_keys)\n self.sort_data!(true, sort_keys)\n end", "def descending(*fields)\n clone.tap do |crit|\n setup_sort_options(crit.options) unless fields.first.nil?\n fields.flatten.each { |field| merge_options(crit.options[:sort], [ localize(field), :desc ]) }\n end\n end", "def descending\n swap(:startkey, :endkey) if query[:startkey] || query[:endkey]\n swap(:startkey_docid, :endkey_docid) if query[:startkey_docid] || query[:endkey_docid]\n\n update_query(:descending => true)\n end", "def sort_data\n sort_cols = @columns.select{ |col| col.sort_order }.sort_by{ |col| col.sort_order }\n if sort_cols.length > 0\n log.fine \"Sorting by #{sort_cols.map(&:header).to_sentence}\"\n @data.sort! do |a, b|\n ord = nil\n sort_cols.each do |col|\n if col.sort_ascending\n a_val = a[col.column_index]\n b_val = b[col.column_index]\n else\n a_val = b[col.column_index]\n b_val = a[col.column_index]\n end\n ord = a_val <=> b_val || (a_val && 1) || (b_val && -1) || 0\n break unless ord == 0\n end\n ord\n end\n end\n end", "def sort\n return unless @model\n return if @sort_keys.empty?\n $log.debug \"TABULAR SORT KEYS #{sort_keys} \"\n @model.sort!{|x,y| \n res = 0\n @sort_keys.each { |ee| \n e = ee.abs-1 # since we had offsetted by 1 earlier\n abse = e.abs\n if ee < 0\n res = y[abse] <=> x[abse]\n else\n res = x[e] <=> y[e]\n end\n break if res != 0\n }\n res\n }\n end", "def sort_by_column!(table_column_proxy=nil)\n index = nil\n auto_exec do\n index = swt_widget.columns.to_a.index(table_column_proxy.swt_widget) unless table_column_proxy.nil?\n end\n new_sort_property = table_column_proxy.nil? ? @sort_property : table_column_proxy.sort_property || [column_properties[index]]\n return if table_column_proxy.nil? && new_sort_property.nil? && @sort_block.nil? && @sort_by_block.nil?\n if new_sort_property && table_column_proxy.nil? && new_sort_property.size == 1 && (index = column_sort_properties.index(new_sort_property))\n table_column_proxy = table_column_proxies[index]\n end\n if new_sort_property && new_sort_property.size == 1 && !additional_sort_properties.to_a.empty?\n selected_additional_sort_properties = additional_sort_properties.clone\n if selected_additional_sort_properties.include?(new_sort_property.first)\n selected_additional_sort_properties.delete(new_sort_property.first)\n new_sort_property += selected_additional_sort_properties\n else\n new_sort_property += additional_sort_properties\n end\n end\n \n @sort_direction = @sort_direction.nil? || @sort_property.first != new_sort_property.first || @sort_direction == :descending ? :ascending : :descending\n auto_exec do\n swt_widget.sort_direction = @sort_direction == :ascending ? SWTProxy[:up] : SWTProxy[:down]\n end\n \n @sort_property = [new_sort_property].flatten.compact\n table_column_index = column_properties.index(new_sort_property.to_s.to_sym)\n table_column_proxy ||= table_column_proxies[table_column_index] if table_column_index\n auto_exec do\n swt_widget.sort_column = table_column_proxy.swt_widget if table_column_proxy\n end\n \n if table_column_proxy\n @sort_by_block = nil\n @sort_block = nil\n end\n @sort_type = nil\n if table_column_proxy&.sort_by_block\n @sort_by_block = table_column_proxy.sort_by_block\n elsif table_column_proxy&.sort_block\n @sort_block = table_column_proxy.sort_block\n else\n detect_sort_type\n end\n \n sort!\n end", "def sort(key, opts={})\n cmd = \"SORT #{key}\"\n cmd << \" BY #{opts[:by]}\" if opts[:by]\n cmd << \" GET #{opts[:get]}\" if opts[:get]\n cmd << \" INCR #{opts[:incr]}\" if opts[:incr]\n cmd << \" DEL #{opts[:del]}\" if opts[:del]\n cmd << \" DECR #{opts[:decr]}\" if opts[:decr]\n cmd << \" #{opts[:order]}\" if opts[:order]\n cmd << \" LIMIT #{opts[:limit].join(' ')}\" if opts[:limit]\n cmd << \"\\r\\n\"\n write cmd\n multi_bulk_reply\n end", "def sort_by_points_desc\n logger.info 'Sorting by \"Points\" descending'\n wait_for_update_and_click_js sort_by_points_element\n sort_by_points if sort_asc?\n end", "def sort_rows(collection, sort_by, sort_direction)\n return collection if sort_by.blank?\n\n sort_by.split(\",\").each do |sort_attribute|\n sort_attribute_symbol = sort_attribute.to_sym\n collection.sort! { |x, y| (x.send(sort_attribute_symbol) || \"\") <=> (y.send(sort_attribute_symbol) || \"\") }\n end\n\n collection.reverse! if sort_direction == \"desc\"\n collection\n end", "def sort_array\n @data.sorted_by.inject([]) do |memo, (key, value)|\n memo << [@data.columns.index(key), value == 'descending' ? 0 : 1]\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sorts the receiver's data using the specified sort order and keys. +is_ascending+:: Whether to sort the data in ascending order or not. +sort_keys+:: A list of column IDs to sort on. If table does not have a column with one of the IDs, that ID is ignored.
def sort_data!(is_ascending, sort_keys) sort_key_indices = sort_keys.collect { |key| @columns.index(@columns.find { |column| column.id == key }) }.reject { |e| !e } @data.sort! do |row1, row2| comparison_result = 0 sort_key_indices.each do |index| comparison_result = @columns[index].data_comparator.call(row1[index], row2[index]) comparison_result *= -1 if !is_ascending break unless comparison_result == 0 end comparison_result end end
[ "def sort_data_ascending!(*sort_keys)\n self.sort_data!(true, sort_keys)\n end", "def sort(*keys)\n sort_directions = {}\n asc_regexp = /(.*)_asc$/\n desc_regexp = /(.*)_desc$/\n keys.each_with_index do |key,key_index|\n key_as_string = key.to_s\n if m = asc_regexp.match(key_as_string)\n new_key = m[1].to_sym\n keys[key_index] = new_key\n sort_directions[new_key] = :asc\n elsif m = desc_regexp.match(key_as_string)\n new_key = m[1].to_sym\n keys[key_index] = new_key\n sort_directions[new_key] = :desc\n else\n sort_directions[key] = :asc\n end\n end\n\n key_size = keys.size\n new_rows = rows.sort do |x,y|\n compare = 0\n idx = 0\n while compare == 0 && idx < key_size\n key = keys[idx]\n raise(Plottr::InvalidRowKeyError, \"Unrecognised row key #{key}\") if !x.has_key?(key) || !y.has_key?(key)\n compare = if sort_directions[key] == :asc\n (x[key] <=> y[key])\n else\n (y[key] <=> x[key])\n end\n idx += 1\n end\n compare\n end\n new_table = Plottr::Table.new(self.columns)\n new_table.rows = new_rows\n new_table\n end", "def sort_data_descending!(*sort_keys)\n self.sort_data!(false, sort_keys)\n end", "def sort\n table_name = @tables.keys.sample\n table = @tables[table_name]\n\n column = table.defs.to_a.sample\n while String == column[1]\n column = table.defs.to_a.sample\n end\n\n column_idx = table.defs.keys.index column[0]\n\n sorted = table.data.sort_by{ |r| r[column_idx] }\n\n assert_exec([\"rows\".k, [\"sort-by\".k,\n [\"table\".k, table_name],\n [\"row-lambda\".k, column[0].k]]],\n sorted)\n end", "def sort_data\n sort_cols = @columns.select{ |col| col.sort_order }.sort_by{ |col| col.sort_order }\n if sort_cols.length > 0\n log.fine \"Sorting by #{sort_cols.map(&:header).to_sentence}\"\n @data.sort! do |a, b|\n ord = nil\n sort_cols.each do |col|\n if col.sort_ascending\n a_val = a[col.column_index]\n b_val = b[col.column_index]\n else\n a_val = b[col.column_index]\n b_val = a[col.column_index]\n end\n ord = a_val <=> b_val || (a_val && 1) || (b_val && -1) || 0\n break unless ord == 0\n end\n ord\n end\n end\n end", "def order_by(*sort_heads)\n # Sort the rows in order and add to new_rows.\n key_hash = partition_sort_keys(sort_heads)\n new_rows = rows.sort do |r1, r2|\n # Set the sort keys based on direction\n key1 = []\n key2 = []\n key_hash.each_pair do |h, dir|\n if dir == :forward\n key1 << r1[h]\n key2 << r2[h]\n else\n key1 << r2[h]\n key2 << r1[h]\n end\n end\n # Make any booleans comparable with <=>\n key1 = key1.map_booleans\n key2 = key2.map_booleans\n\n # If there are any nils, <=> will return nil, and we have to use the\n # special comparison method, compare_with_nils, instead.\n result = (key1 <=> key2)\n result.nil? ? compare_with_nils(key1, key2) : result\n end\n\n # Add the new_rows to the table, but mark a group boundary at the points\n # where the sort key changes value. NB: I use self.class.new here\n # rather than Table.new because if this class is inherited, I want the\n # new_tab to be an instance of the subclass. With Table.new, this\n # method's result will be an instance of FatTable::Table rather than of\n # the subclass.\n new_tab = empty_dup\n last_key = nil\n new_rows.each_with_index do |nrow, k|\n new_tab << nrow\n key = nrow.fetch_values(*key_hash.keys)\n new_tab.mark_boundary(k - 1) if last_key && key != last_key\n last_key = key\n end\n new_tab.normalize_boundaries\n new_tab\n end", "def sort(column=@sort_by, asc = @sort_order)\n column_format = format[index(column)]\n x_sort_string = \"x.data[index('#{column}')]\"\n y_sort_string = \"y.data[index('#{column}')]\"\n \n if !(NUMBER_FORMATS + DATE_FORMATS).include? format[index(column)]\n x_sort_string = x_sort_string + \".to_s\"\n y_sort_string = y_sort_string + \".to_s\"\n end\n \n if NUMBER_FORMATS.include? format[index(column)]\n x_sort_string = x_sort_string + \".to_f\"\n y_sort_string = y_sort_string + \".to_f\" \n end\n\n # Sort the rows:\n # By returning -1 in case the comparison fails, we ensure that the sort never raises an error; however,\n # we may not get a proper sort, but it's better to have the app not fail\n @rows.sort! {|x, y| (instance_eval(x_sort_string) <=> instance_eval(y_sort_string)) || -1}\n \n @rows.reverse! unless asc\n end", "def sort_by(att, options = {})\n sort(options.merge(:by => to_key(att)))\n end", "def sort_keys\n sort_key_column_names = if can_optimise_for_different_table?\n [foreign_key_column_name_to_optimised_table]\n else\n [primary_key_column_name].compact\n end\n\n sort_key_column_names + DEFAULT_SORT_KEYS.select { |column_name| table_includes_column?(column_name) }\n end", "def datagrid_sort(sort_param, direction_param, options = {})\n sort_cols = self._sort_columns || self.column_names\n default_sort_col = self._default_sort_column || :id\n sort = sort_cols.include?(sort_param) ? sort_param : default_sort_col\n sort = \"#{quoted_table_name}.#{sort}\" unless sort.to_s.include?('.')\n default_sort_dir = self._default_sort_direction || :asc\n direction = %w[asc desc].include?(direction_param) ? direction_param : default_sort_dir\n [[sort, direction]]\n end", "def sort_rows(collection, sort_by, sort_direction)\n return collection if sort_by.blank?\n\n sort_by.split(\",\").each do |sort_attribute|\n sort_attribute_symbol = sort_attribute.to_sym\n collection.sort! { |x, y| (x.send(sort_attribute_symbol) || \"\") <=> (y.send(sort_attribute_symbol) || \"\") }\n end\n\n collection.reverse! if sort_direction == \"desc\"\n collection\n end", "def sort\n return unless @model\n return if @sort_keys.empty?\n $log.debug \"TABULAR SORT KEYS #{sort_keys} \"\n @model.sort!{|x,y| \n res = 0\n @sort_keys.each { |ee| \n e = ee.abs-1 # since we had offsetted by 1 earlier\n abse = e.abs\n if ee < 0\n res = y[abse] <=> x[abse]\n else\n res = x[e] <=> y[e]\n end\n break if res != 0\n }\n res\n }\n end", "def sort(in_ds, out:, by:, in_memory: false, split_size: RemiConfig.sort.split_size)\n if in_memory\n DataStepHelper.sort_in_memory(in_ds, out: out, by: by)\n else\n DataStepHelper.sort_external(in_ds, out: out, by: by, split_size: split_size)\n end\n end", "def use_sort(query, evt)\n sort_index, sort_order = grid_get_sort(evt)\n if sortable_columns[sort_index]\n column = columns[sortable_columns[sort_index]] \n else\n if default_sort\n column = columns[sortable_columns[default_sort[0]]]\n sort_order = default_sort[1] ? 'ASC' : 'DESC'\n else\n column = nil\n end\n end\n if column\n if column[:sortable] == true\n if column[:field].include?('.')\n # special handling for associations, try to do the right thing in the simplest/commonest case\n # turns advisor.person.first_name to people.first_name\n # if this doesn't work, you need to use a custom order (set sortable to ['asc string', 'desc string'])\n x = column[:field].split('.').pop(2)\n x[0] = x[0].pluralize\n query = query.order(\"#{x.join('.')} #{sort_order}\")\n else\n query = query.order(\"#{column[:field]} #{sort_order}\")\n end\n else\n # custom override, presuming it is an array\n query = query.order(column[:sortable][sort_order == 'DESC' ? 1 : 0])\n end\n end\n query\n end", "def sortable(*attr_names)\r\n attr_names.each do |attr_name|\r\n scope \"sort_#{attr_name}\", -> ordering { order(\"#{table_name}.#{attr_name} #{ordering}\") }\r\n end\r\n end", "def sort(field: 'user_id', order: :asc)\n @customers.sort_by! { |c| c[field] }\n @customers.reverse! if order == :desc\n @customers\n end", "def sortBy(keyPath, ascending:ascending)\n scopeByAddingSortDescriptor(NSSortDescriptor.alloc.initWithKey(keyPath.to_s, ascending:ascending))\n end", "def ascending(*fields)\n sort_with_list(*fields, 1)\n end", "def sortable_by(*attributes, **options)\n @default_sort_attribute = options.delete(:default)\n @default_sort_dir = options.delete(:direction) || :asc\n @sortable_mapping = options.merge(attributes.to_h { |att| [att, att] })\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Initializes a new Column instance with the specified ID and options. Valid keys for options are +:alignment+, +:data_comparator+, +:data_to_s+, +:label+, +:padding+, and +:width+, whose usage should be obvious. +id+:: The new instance's ID. +options+:: An options map specifying zero or more of the column's instance variable values.
def initialize(id, options = { }) @id = id @alignment = options[:alignment] || :center @data_comparator = options[:data_comparator] || Proc.new { |a, b| a <=> b } @data_to_s = options[:data_to_s] || Proc.new { |o| o.to_s } @label = options[:label] || id @padding = options[:padding] || " " @width = options[:width] || label.length end
[ "def column id = nil, options = {}\n unless id\n id = options[:id]\n raise \"Need a column ID\" if id.nil?\n end\n if options.empty?\n @cols << {}\n else\n @cols << options\n end\n @col_count = @cols.size\n @col_ids << id\n end", "def initialize(options={})\n @cols = []\n @rows = []\n @options = options\n return if options.empty?\n\n new_columns(options[:cols]) unless options[:cols].nil?\n\n return if options[:rows].nil?\n rows = options[:rows]\n rows.each do |row|\n add_row(row[:c])\n end\n end", "def initialize(options)\n @id = options[\"id\"]\n @table_name = options[\"table_name\"]\n end", "def initialize(id, *values)\n @id = id\n\n return if values.empty?\n\n # set the instance variables\n get_columns.each do |key, value|\n instance_variable_set(\"@#{key}\", values[value])\n end\n end", "def initialize(options={})\n @cols = []\n @rows = []\n @listeners = []\n @options = options\n return if options.empty?\n\n new_columns(options[:cols]) unless options[:cols].nil?\n\n return if options[:rows].nil?\n\n rows = options[:rows]\n rows.each do |row|\n add_row(row[:c])\n end\n end", "def initialize(options = {})\n options = {\n array_class: Array,\n element_class: Hash,\n column_type: :text,\n allow_nil: true\n }.merge(options)\n\n @array_class = options[:array_class]\n @element_class = options[:element_class]\n @column_type = options[:column_type]\n @allow_nil = options[:allow_nil]\n end", "def initialize(options = {})\n @column_count = options[:column_count]\n @column_width = options[:column_width]\n @gutter_width = options[:gutter_width]\n @input_padding = options[:input_padding]\n @input_border = options[:input_border]\n end", "def create_column(id)\n column = Column.new(id)\n column.insert_left self\n return @columns[id] = column\n end", "def column(symbol, options)\n @columns ||= {}\n @columns[symbol] = options\n end", "def initialize(options = {}, data = GoogleVisualr::DataTable::DataTable.new, formatters = [])\n set_options(options)\n @data_table = data\n @formatters = GoogleVisualr::Formatters::Formatters.new(formatters)\n end", "def column name, options = {}\n\n if name.is_a?(Numeric) || name.is_a?(String)\n @columns << Constant.wrap(name)\n elsif name.is_a?(ValueExtractor)\n @columns << name\n name.column = COLUMNS[@assigned_column_index]\n elsif name == Empty.instance\n @columns << nil\n elsif @definitions.variable?(name)\n @definitions.variable(name).column = COLUMNS[@assigned_column_index]\n @columns << @definitions.variable(name)\n elsif @definitions.formula?(name)\n @definitions.formula(name).column = COLUMNS[@assigned_column_index]\n @columns << @definitions.formula(name)\n else\n raise \"Unknown function or variable '#{name}'\"\n end\n\n raise ArgumentError, \"Options should be a Hash\" unless options.is_a?(Hash)\n\n @columns_by_id[options[:id]] = @assigned_column_index if options[:id]\n \n title = nil\n title = options[:title] if options[:title]\n\n style = nil\n style = Style.new(options[:style]) if options[:style]\n\n type = nil\n type = options[:type] if options[:type]\n\n column_style = nil\n column_style = options[:column_style] if options[:column_style]\n\n title_style = nil\n title_style = options[:title_style] if options[:title_style]\n\n @column_titles << title\n @has_titles = @has_titles || !title.nil?\n\n @cell_styles << style\n @column_types << type\n @title_styles << title_style\n @column_styles << column_style\n\n @assigned_column_index += 1\n\n end", "def format(id, value, options = {})\n id.split(/,\\s/).each do |i|\n col = col_by_id i, true\n col.format = value\n col.format_options = options\n end\n end", "def initialize(options = {})\n @options = OpenStruct.new(options)\n self.class.instance_eval do\n def_delegators :@options, *options.keys\n end\n end", "def initialize(option_values, id = nil)\n @options = OpenStruct.new\n self.class.options.each do |option|\n option.process(@options, option_values)\n end\n\n # commands (not metrics) have ids that uniquely identify a command\n # instance and its response\n @id = id\n end", "def initialize(cql_options = {})\n @cql_options = cql_options\n end", "def initialize(table, options={}, col_types=nil)\n @table, @options = table.to_sym, options.freeze\n\n if !col_types\n col_types = {}\n Mao.sql(\n 'SELECT column_name, data_type FROM information_schema.columns ' \\\n 'WHERE table_name=$1',\n [@table.to_s]) do |pg_result|\n if pg_result.num_tuples.zero?\n raise ArgumentError, \"invalid or blank table #@table\"\n end\n\n pg_result.each do |tuple|\n col_types[tuple[\"column_name\"].to_sym] = tuple[\"data_type\"]\n end\n end\n end\n\n @col_types = col_types.freeze\n end", "def initialize(klass, options={})\n @referent = klass\n @options = options\n @options[:column] ||= \"row_id\"\n @column = options[:column]\n end", "def initialize(options={})\n @id = options[\"id\"]\n @games_id = options[\"games_id\"]\n @users_id = options[\"users_id\"]\n @score = options[\"score\"]\n end", "def checkbox(id = nil, options = { }, &block)\n options[:block] = block if block_given?\n t.columns[id] = options.reverse_merge! checkbox: true, align: :center\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns a string representation of specified data with the appropriate alignment and padding for the receiver. +data+:: The data to pad and align.
def padded_aligned_data(data) string_repr = @data_to_s.call(data) string_repr = case @alignment when :left string_repr.ljust(@width) when :center string_repr.center(@width) when :right string_repr.rjust(@width) else string_repr end @padding ? @padding + string_repr + @padding : string_repr end
[ "def print_data(data, padding)\n p = \"\"\n (0..padding.length - data.to_s.length).map {p += \" \"}\n print data.to_s + p\n end", "def pad_to_block_size(data)\n data = [data.length].pack('N') + data\n return data if data.length % block_size == 0\n data + \"\\0\" * (block_size - data.length % block_size)\n end", "def table_data(data)\n left, middle, right = [@chars[:ldb], @chars[:idb], @chars[:rdb]]\n a = []\n data.each_with_index do |item, x|\n a << (' ' + item.to_s.send(@align[x] || :ljust, @widths[x]) + ' ')\n end\n s = @chars.wrap(left) + a.join(@chars.wrap(middle)) + @chars.wrap(right) + \"\\n\"\n s\n end", "def dump(data)\n lines = data.to_s.scan(/.{1,16}/m)\n max_offset = (offset + data.size) / 256 #16 * 16\n max_offset_width = max_offset.to_s.size + 1\n max_hex_width = 49 #3 * 16 + 1\n\n out = template()\n off = offset()\n\n if index?\n puts((' ' * max_offset_width) + \" 0 1 2 3 4 5 6 7 8 9 A B C D E F\\n\")\n end\n\n lines.each_with_index do |line, n|\n offset = off + n * 16\n bytes = line.unpack(\"C*\")\n hex = bytes.map{ |c| \"%0.2x\" % c }.insert(8, '').join(' ')\n\n plain = bytes.map do |c|\n if ASCII_PRINTABLE.include?(c)\n c = c.chr\n else\n color ? Code::WHITE + Code::STRIKE + '.' + Code::CLEAR : '.' \n end\n end.join('')\n\n fill = [offset.to_s.rjust(max_offset_width), hex.ljust(max_hex_width), plain]\n\n puts(out % fill)\n end \n end", "def to_data(prog, offset, data)\n if data.is_a?(Hash)\n res = \"\\x0\"*@struct_size\n @members.each do |n, m|\n if data.key?(n.to_sym)\n items = Array(data[n.to_sym])\n item_offset = m.offset\n m.count.times do |index|\n s = member_item_to_data(prog, m, offset + item_offset, items[index])\n res[item_offset, size=s.bytesize] = s\n item_offset += size\n end\n end\n end\n elsif data.respond_to? :to_z80bin\n res = data.to_z80bin[0,@struct_size].ljust(@struct_size, \"\\x0\")\n elsif data.is_a?(String)\n res = data.dup.force_encoding(Encoding::ASCII_8BIT)[0,@struct_size].ljust(@struct_size, \"\\x0\")\n else\n data = Array(data)\n res = ''\n index = 0\n @members.reject {|_, m| m.alias}.each do |_, m|\n m.count.times do\n s = member_item_to_data(prog, m, offset, data[index])\n offset += s.bytesize\n index += 1\n res << s\n end\n end\n end\n res\n end", "def build_data_tpdu(data)\n tpkt_length = data.length + 7\n\n \"\\x03\\x00\" + # TPKT Header version 03, reserved 0\n [tpkt_length].pack(\"S>\") + # TPKT length\n \"\\x02\\xf0\\x80\" + # X.224 Data TPDU (2 bytes: 0xf0 = Data TPDU, 0x80 = EOT, end of transmission)\n data\n end", "def put_aligned_data(arg)\n # arg.size must be aligned\n @data += [arg].pack(\"a#{arg.size}\")\n self\n end", "def serialize\n serialize_data\n @length = data[0..-2].length\n @checksum = chksum data[0..-2]\n name + [length, chksum(data[0..-2])].pack('vv') + data\n end", "def padding_str\n if buffer.length > block_size\n raise(RuntimeError, \"internal error: buffer is larger than block size\")\n end\n\n pad_size = block_size - buffer.length\n pad_size = block_size if pad_size == 0 # add a block to disambiguate\n pad_char = nil\n case padding\n when :pkcs\n pad_char = \"%c\" % pad_size\n when :zeros\n pad_char = \"%c\" % 0\n else\n raise(RuntimeError, \"Input is not an even multiple of the block size \" +\n \"(#{block_size}), but no padding has been specified.\")\n end\n pad_char * pad_size\n end", "def ralign(a, p=' ')\n p ||= ' '\n l = self.bytesize\n pad = l.pad(a)\n self.rjust(pad+l, p)\n end", "def padd_packet(data, padding)\n return data if padding.nil?\n return data if data.size > 7\n data + [ padding ] * (8 - data.size)\n end", "def hexdump( data )\n\t\t\tdata.bytes.to_a.map {|byte| sprintf('%#02x',byte) }.join( ' ' )\n\t\tend", "def rdp_build_data_tpdu(data)\n tpkt_length = data.length + 7\n\n \"\\x03\\x00\" + # TPKT Header version 03, reserved 0\n [tpkt_length].pack(\"S>\") + # TPKT length\n \"\\x02\\xf0\" + # X.224 Data TPDU (2 bytes)\n \"\\x80\" + # X.224 End Of Transmission (0x80)\n data\n end", "def length_prefixed_string(data)\n msg = data.encode(Encoding::UTF_8)\n # https://ruby-doc.org/core-1.9.3/Array.html#method-i-pack\n [msg.bytes.length].pack('V') + msg.force_encoding(Encoding::BINARY)\n end", "def padding\n \"\\0\" * padding_size\n end", "def to_str\n data = \"\"\n self.class::RTATTRS.each do |code, (name, info)|\n if val = @attrs[name]\n Message.nlmsg_pad(data) # assume NLMSG_ALIGNTO == NLA_ALIGNTO\n if !info\n val = val.to_str # raw binary or nested structure\n elsif pack = info[:pack]\n val = pack[val,self]\n elsif pattern = info[:pattern]\n val = Array(val).pack(pattern)\n else\n val = val.to_str\n end\n data << [val.bytesize+RTATTR_SIZE, code].pack(RTATTR_PACK) << val\n end\n end\n data.empty? ? super : Message.nlmsg_pad(super) + data\n end", "def align(a)\n case a\n when 1\n when 2, 4, 8\n bits = a - 1\n @packet = @packet.ljust(@packet.length + bits & ~bits, 0.chr)\n else\n raise \"Unsupported alignment\"\n end\n end", "def blockify(data)\n return '0x00' if data.size == 0\n blocks = data.scan(/.{1,255}/m)\n blocks[0..-2].each{ |d| d.prepend('0xFF') } if blocks.size > 1\n blocks[-1].prepend([blocks[-1].size].pack('C'))\n blocks.append('0x00')\n blocks.join\n end", "def inspect_to_s(data)\n data.inspect\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
S _ _ _ ... row, col = 1 _ _ _ _ Strategy: 1. Solve this for 1 row only. Solve for horizontally only. 2. Transpose the matrix. Then we can use same code on transposed matrix to solve it vertically. Solve for 1 row: Test cases: Word = "test" OK. All same length as word 1. _ _ _ _ All blanks. Left and right of word must be either "" or border. 2. T _ N _ >=1 of characters are in word Not OK: 1. _ _ T E S T _ _ Can't start word from middle 2. T _ _ S Can't have 1 or more letters not matching letters in word 3. Can't have "" in word 4. T E S T _ W
def crossword(arr, word) earliest_col = -1 ans = [] arr.each_with_index do |row, i| earliest_col = solve_horiz(row, word) ans = [i, earliest_col] break end tarr = transpose arr tarr.each_with_index do |col, i| earliest_row = solve_horiz(row, word) if earliest_row <= ans[0] && i <= ans[1] ans = [earliest_row, i] end break end ans end
[ "def horizontal_solve(word, puzzle)\n puzzle.each do |row|\n return true if row.join.include?(word) || row.join.reverse.include?(word)\n end\n false\nend", "def word_square?(arr)\n # first brute force, using i and j \n i = 0 \n while i < arr.length \n j = 0 \n while j < arr.length \n # next if i >= j will overflow as j never increments\n if i >= j \n j+= 1 \n next \n end \n return false unless arr[i][j] == arr[j][i]\n j += 1 \n end \n i+=1 \n end \n true\n # now think how i can optimize it when i == j \n # it's comparing itself against itself, so we don't need to check \n # also we checked each pair twice , [0][2] first and [2][0]\n # the first round since we increment inner loop first, second number\n # is bigger, so we don't need to check if first number is bigger than secodn number \n # means it's the same pair \nend", "def solve(word)\n size = word.length\n grid = Grid.new\n\n # starting position allows room for word to go leftwards or upwards\n # without bumping into left or top edges; assumes initially heading\n # rightwards and only right turns can be made\n position = [size - 5, size - 4]\n direction = Direction.rightwards\n\n # put first character at starting position\n grid[position] = word[0, 1]\n\n # generate solutions in results parameter\n results = []\n find_loops(word, 1, grid, position, direction, results)\n results\nend", "def count_palindromes_2 str\n\n counts = [1]\n palindromes = Array.new(str.length) { Array.new(str.length, false) }\n# palindromes[0][0] = true # for purposes of getting a full & correct matrix only.\n\n 1.upto(str.length-1) do |right|\n palindromes[right][right] = true # diagonal\n counts[right] = counts[right-1] + 1 # worst case\n\n (right-1).downto(0) do |left|\n # why right-left <= 2\n # 1) there is no palindrome between adjacent letters.\n # Looking up palindromes[left+1][right-1]) will fail in this case.\n # To avoid this failure, at least the following is necessary:\n # left+1 == right\n # 2) [optimization] one letter per se is a palindrome, therefore do not need to additionally\n # check if it is a palindrome using the palindromes[][] matrix.\n # Otherwise left+1 == right would work fine.\n if str[left] == str[right] && (right-left <= 1 || palindromes[left+1][right-1])\n palindromes[left][right] = true\n\n # select better splitting\n count_before_left = left > 0 ? counts[left-1] : 0\n if count_before_left+1 < counts[right]\n counts[right] = count_before_left+1\n end\n end\n end\n end\n\n# show_matrix(palindromes)\n\n counts.last\nend", "def place_word( direction, row, col, word )\r\n position = 0 # Initial position setting for slicing letters from the word\r\n case direction # Case select for the 8 different directions\r\n when 0 # If the direction is 0 ( word is printed N )\r\n while position < word.length # Starts at the first letter of the word and continues to get each letter until the last letter of the word\r\n @answer_grid[ row ][ col ] = word.slice( position ) # Places the letter onto the answer grid\r\n position += 1 # Increment counter to get the next letter in the word\r\n row -= 1 # Moves the current position to the previous row ( N )\r\n end\r\n\r\n when 1 # If the direction is 1 ( word is printed NE )\r\n while position < word.length # Starts at the first letter of the word and continues to get each letter until the last letter of the word\r\n @answer_grid[ row ][ col ] = word.slice( position ) # Places the letter onto the answer grid\r\n position += 1 # Increment counter to get the next letter in the word\r\n row -= 1 # Moves the current position to the previous row ( N )\r\n col += 1 # Moves the current position to the next column ( E )\r\n end\r\n\r\n when 2 # If the direction is 2 ( word is printed E )\r\n while position < word.length # Starts at the first letter of the word and continues to get each letter until the last letter of the word\r\n @answer_grid[ row ][ col ] = word.slice( position ) # Places the letter onto the answer grid\r\n position += 1 # Increment counter to get the next letter in the word\r\n col += 1 # Moves the current position to the next column ( E )\r\n end\r\n\r\n when 3 # If the direction is 3 ( word is printed SE )\r\n while position < word.length # Starts at the first letter of the word and continues to get each letter until the last letter of the word\r\n @answer_grid[ row ][ col ] = word.slice( position ) # Places the letter onto the answer grid\r\n position += 1 # Increment counter to get the next letter in the word\r\n row += 1 # Moves the current position to the next row ( S )\r\n col += 1 # Moves the current position to the next column ( E )\r\n end\r\n\r\n when 4 # If the direction is 4 ( word is printed S )\r\n while position < word.length # Starts at the first letter of the word and continues to get each letter until the last letter of the word\r\n @answer_grid[ row ][ col ] = word.slice( position ) # Places the letter onto the answer grid\r\n position += 1 # Increment counter to get the next letter in the word\r\n row += 1 # Moves the current position to the next row ( S )\r\n end\r\n\r\n when 5 # If the direction is 5 ( word is printed SW )\r\n while position < word.length # Starts at the first letter of the word and continues to get each letter until the last letter of the word\r\n @answer_grid[ row ][ col ] = word.slice( position ) # Places the letter onto the answer grid\r\n position += 1 # Increment counter to get the next letter in the word\r\n row += 1 # Moves the current position to the next row ( S )\r\n col -= 1 # Moves the current position to the previous column ( W )\r\n end\r\n\r\n when 6 # If the direction is 6 ( word is printed W )\r\n while position < word.length # Starts at the first letter of the word and continues to get each letter until the last letter of the word\r\n @answer_grid[ row ][ col ] = word.slice( position ) # Places the letter onto the answer grid\r\n position += 1 # Increment counter to get the next letter in the word\r\n col -= 1 # Moves the current position to the previous column ( W )\r\n end\r\n\r\n when 7 # If the direction is 7 ( word is printed NW )\r\n while position < word.length # Starts at the first letter of the word and continues to get each letter until the last letter of the word\r\n @answer_grid[ row ][ col ] = word.slice( position ) # Places the letter onto the answer grid\r\n position += 1 # Increment counter to get the next letter in the word\r\n row -= 1 # Moves the current position to the previous row ( N )\r\n col -= 1 # Moves the current position to the previous column ( W )\r\n end\r\n end\r\n end", "def fits?(word, row, column, direction, board)\n if direction == VERTICAL\n word.length + row <= board.length\n else\n word.length + column <= board[0].length\n end\n end", "def check_fit( direction, x, y, word )\r\n position = 0 # Initial position setting for slicing letters from the word\r\n case direction # Case select for the 8 different directions\r\n when 0 # If the direction is 0 ( word is printed N )\r\n if ( x - word.length ) > 0 # Checks to see if the word will fit on the puzzle grid\r\n while position < word.length # If word will fit on puzzle grid, counter starts at the first letter and continues until end of the word\r\n if @answer_grid[ x ][ y ] == @placeholder_char || @answer_grid[ x ][ y ] == word.slice( position ) # Checks to see if the sliced letter will be place on a usable position on the puzzle grid. Usable position\r\n # is a placeholder character or the same letter\r\n position += 1 # Increment counter to the next letter in the word\r\n x -= 1 # Moves the current position to the previous row ( N )\r\n else # If the position on the grid doesn't contain a placeholder character or the same letter\r\n return false # it returns false. The word will not fit on the puzzle grid using this direction ( printing N )\r\n end\r\n end # If the word successfully fits on the puzzle grid with this direction ( N ) it returns true so\r\n return true # the program knows to place the word at this location and print it in this direction ( N )\r\n end\r\n\r\n when 1 # If the direction is 1 ( word is printed NE )\r\n if ( x - word.length ) > 0 && ( y + word.length ) < @grid_size # Checks to see if the word will fit on the puzzle grid\r\n while position < word.length # If word will fit on puzzle grid, counter starts at the first letter and continues until end of the word\r\n if @answer_grid[ x ][ y ] == @placeholder_char || @answer_grid[ x ][ y ] == word.slice( position ) # Checks to see if the sliced letter will be place on a usable position on the puzzle grid. Usable position\r\n # is a placeholder character or the same letter\r\n position += 1 # Increment counter to the next letter in the word\r\n x -= 1 # Moves the current position to the previous row ( N )\r\n y += 1 # Moves the current position to the next column ( E )\r\n else # If the position on the grid doesn't contain a placeholder character or the same letter\r\n return false # it returns false. The word will not fit on the puzzle grid using this direction ( printing NE )\r\n end\r\n end # If the word successfully fits on the puzzle grid with this direction ( NE ) it returns true so\r\n return true # the program knows to place the word at this location and print it in this direction ( NE )\r\n end\r\n\r\n when 2 # If the direction is 2 ( word is printed E )\r\n if ( y + word.length ) < @grid_size # Checks to see if the word will fit on the puzzle grid\r\n while position < word.length # If word will fit on puzzle grid, counter starts at the first letter and continues until end of the word\r\n if @answer_grid[ x ][ y ] == @placeholder_char || @answer_grid[ x ][ y ] == word.slice( position ) # Checks to see if the sliced letter will be place on a usable position on the puzzle grid. Usable position\r\n # is a placeholder character or the same letter\r\n position += 1 # Increment counter to the next letter in the word\r\n y += 1 # Moves the current position to the next column ( E )\r\n else # If the position on the grid doesn't contain a placeholder character or the same letter\r\n return false # it returns false. The word will not fit on the puzzle grid using this direction ( printing E )\r\n end\r\n end # If the word successfully fits on the puzzle grid with this direction ( E ) it returns true so\r\n return true # the program knows to place the word at this location and print it in this direction ( E )\r\n end\r\n\r\n when 3 # If the direction is 3 ( word is printed SE )\r\n if ( y + word.length ) < @grid_size && ( x + word.length ) < @grid_size # Checks to see if the word will fit on the puzzle grid\r\n while position < word.length # If word will fit on puzzle grid, counter starts at the first letter and continues until end of the word\r\n if @answer_grid[ x ][ y ] == @placeholder_char || @answer_grid[ x ][ y ] == word.slice( position ) # Checks to see if the sliced letter will be place on a usable position on the puzzle grid. Usable position\r\n # is a placeholder character or the same letter\r\n position += 1 # Increment counter to the next letter in the word\r\n x += 1 # Moves the current position to the next row ( S )\r\n y += 1 # Moves the current position to the next column ( E )\r\n else # If the position on the grid doesn't contain a placeholder character or the same letter\r\n return false # it returns false. The word will not fit on the puzzle grid using this direction ( printing SE )\r\n end\r\n end # If the word successfully fits on the puzzle grid with this direction ( SE ) it returns true so\r\n return true # the program knows to place the word at this location and print it in this direction ( SE )\r\n end\r\n\r\n when 4 # If the direction is 4 ( word is printed S )\r\n if ( x + word.length ) < @grid_size # Checks to see if the word will fit on the puzzle grid\r\n while position < word.length # If word will fit on puzzle grid, counter starts at the first letter and continues until end of the word\r\n if @answer_grid[ x ][ y ] == @placeholder_char || @answer_grid[ x ][ y ] == word.slice( position ) # Checks to see if the sliced letter will be place on a usable position on the puzzle grid. Usable position\r\n # is a placeholder character or the same letter\r\n position += 1 # Increment counter to the next letter in the word\r\n x += 1 # Moves the current position to the next row ( S )\r\n else # If the position on the grid doesn't contain a placeholder character or the same letter\r\n return false # it returns false. The word will not fit on the puzzle grid using this direction ( printing S )\r\n end\r\n end # If the word successfully fits on the puzzle grid with this direction ( S ) it returns true so\r\n return true # the program knows to place the word at this location and print it in this direction ( S )\r\n end\r\n\r\n when 5 # If the direction is 5 ( word is printed SW )\r\n if ( x + word.length ) < @grid_size && ( y - word.length ) > 0 # Checks to see if the word will fit on the puzzle grid\r\n while position < word.length # If word will fit on puzzle grid, counter starts at the first letter and continues until end of the word\r\n if @answer_grid[ x ][ y ] == @placeholder_char || @answer_grid[ x ][ y ] == word.slice( position ) # Checks to see if the sliced letter will be place on a usable position on the puzzle grid. Usable position\r\n # is a placeholder character or the same letter\r\n position += 1 # Increment counter to the next letter in the word\r\n x += 1 # Moves the current position to the next row ( S )\r\n y -= 1 # Moves the current position to the previous column ( W )\r\n else # If the position on the grid doesn't contain a placeholder character or the same letter\r\n return false # it returns false. The word will not fit on the puzzle grid using this direction ( printing SW )\r\n end\r\n end # If the word successfully fits on the puzzle grid with this direction ( SW ) it returns true so\r\n return true # the program knows to place the word at this location and print it in this direction ( SW )\r\n end\r\n\r\n when 6 # If the direction is 6 ( word is printed W )\r\n if ( y - word.length ) > 0 # Checks to see if the word will fit on the puzzle grid\r\n while position < word.length # If word will fit on puzzle grid, counter starts at the first letter and continues until end of the word\r\n if @answer_grid[ x ][ y ] == @placeholder_char || @answer_grid[ x ][ y ] == word.slice( position ) # Checks to see if the sliced letter will be place on a usable position on the puzzle grid. Usable position\r\n # is a placeholder character or the same letter\r\n position += 1 # Increment counter to the next letter in the word\r\n y -= 1 # Moves the current position to the previous column ( W )\r\n else # If the position on the grid doesn't contain a placeholder character or the same letter\r\n return false # it returns false. The word will not fit on the puzzle grid using this direction ( printing W )\r\n end\r\n end # If the word successfully fits on the puzzle grid with this direction ( W ) it returns true so\r\n return true # the program knows to place the word at this location and print it in this direction ( W )\r\n end\r\n\r\n when 7 # If the direction is 7 ( word is printed NW )\r\n if ( x - word.length ) > 0 && ( y - word.length ) > 0 # Checks to see if the word will fit on the puzzle grid\r\n while position < word.length # If word will fit on puzzle grid, counter starts at the first letter and continues until end of the word\r\n if @answer_grid[ x ][ y ] == @placeholder_char || @answer_grid[ x ][ y ] == word.slice( position ) # Checks to see if the sliced letter will be place on a usable position on the puzzle grid. Usable position\r\n # is a placeholder character or the same letter\r\n position += 1 # Increment counter to the next letter in the word\r\n x -= 1 # Moves the current position to the previous row ( N )\r\n y -= 1 # Moves the current position to the previous column ( W )\r\n else # If the position on the grid doesn't contain a placeholder character or the same letter\r\n return false # it returns false. The word will not fit on the puzzle grid using this direction ( printing NW )\r\n end\r\n end # If the word successfully fits on the puzzle grid with this direction ( NW ) it returns true so\r\n return true # the program knows to place the word at this location and print it in this direction ( NW )\r\n end\r\n end\r\n end", "def test_place(matrix_word, start_point)\n 8.times do |dir|\n current_point = start_point\n good = true\n matrix_word.word.each_byte do |chr|\n good = (@grid[current_point].ord == chr) && (current_point = next_point(current_point, dir))\n break unless good\n end\n # If good (word found in start_point point and with @direction direction), it marks the word and returns.\n return mark_word(matrix_word, start_point, dir) if good\n # If it did not succeed with that direction, it tries with the next one\n dir = (dir + 1) % 8\n end\n nil \n end", "def convert(s, num_rows)\n return s if s == nil || num_rows == nil || s.length <= 1 || num_rows <= 1\n\n # approach: create a matrix with num_rows and columns\n # keep track of whether going down or up\n # append the character to the appropriate row\n # with Ruby, we can leverage .push method\n # with strongly typed language and array of primitive type, character,\n # we will need to keep track of which column we are on for each row.\n downwards = true\n matrix = Array.new(num_rows){Array.new(s.length)}\n\n # approach that'd work with non Ruby lanaguages\n column_index = Array.new(num_rows)\n num_rows.times do |row|\n column_index[row] = 0\n end\n\n current_row = 0\n s.length.times do |i|\n matrix[current_row][column_index[current_row]] = s[i]\n column_index[current_row] += 1\n if downwards\n if current_row == num_rows - 1\n downwards = false\n current_row -= 1\n else # keep moving to the next row down\n current_row += 1\n end\n else # upwards\n if current_row == 0\n downwards = true\n current_row += 1\n else # keep moving to the next row up\n current_row -= 1\n end\n end\n end\n\n output_string = \"\" # create an output string with same length as input string\n i = 0\n num_rows.times do |j|\n column_index[j].times do |k|\n output_string[i] = matrix[j][k]\n i += 1\n end\n end\n\n return output_string\nend", "def valid_word_for_board?(letters, word)\n letters.each_with_index do |row, i|\n row.each_with_index do |letter, j|\n if word[0] == letter\n if make_word_from_board?(letters, word, i, j)\n return true\n end\n end\n end\n end\n false\n end", "def find_valid_words(grid, dict)\n # iterate through characters in grid\n grid.each_with_index do |row, row_index|\n row.each_with_index do |char, index|\n # recurse over all possibilities starting at char\n running_string = char\n adjacent_characters = adjacent_characters_for(char, row_index, index, grid)\n\n adjacent_characters.each do |adjacent_char|\n running_string += adjacent_char\n # add adjacent character to the running string\n # check for a match in dict\n # remove char and current adjacent char from a list of available characters\n #\n\n #\n end\n end\n end\n # for each character, test each possible direction\n # remove character from consideration after it's been used\n # keep a running string for each iteration, check if it matches\n # a word in dictionary every time it's appended to\nend", "def lps_matrix(string)\n return '' if string == ''\n\n matrix = Array.new(string.size, nil)\n matrix.each_index { |index| matrix[index] = Array.new(string.size, nil)}\n\n (1..string.size).each do |length|\n\n if length == 1\n\n (0..string.size - 1).each do |idx|\n matrix[idx][idx] = string[idx]\n end\n\n else\n\n (0..string.size - length).each do |row_idx|\n col_idx = row_idx + length - 1\n matrix[row_idx][col_idx] =\n if string[row_idx] == string[col_idx]\n string[row_idx] + (matrix[row_idx + 1][col_idx - 1]).to_s + string[col_idx]\n else\n [\n matrix[row_idx][col_idx - 1],\n matrix[row_idx + 1][col_idx]\n ].max_by { |string| string.length }\n end\n end\n\n end\n end\n\n matrix[0][string.size - 1]\n\nend", "def find_word_in_matrix (matrix, word)\n\t$find_next_found = false\n\t\n\tmatrix.each_index do |row_index|\t\t\n\t\tmatrix[row_index].each_index do |col_index|\n\t\t\tif matrix[row_index][col_index] == word[0]\t\t\t\t\t\t\t\n\t\t\t\tfind_next(matrix, word, 0, row_index, col_index)\n\t\t\tend\n\t\tend\n\tend\n\n\t$find_next_found\nend", "def solve(*words)\n words = words.collect { |word| word.upcase_trim }\n\n #\n # Make the various transformations, checking for matches along the\n # way.\n #\n normalize ; replicate_match(words) # match L->R\n flip_horizontal ; replicate_match(words) # match R->L\n diagonalize ; replicate_match(words) # match TL->BR\n flip_horizontal ; replicate_match(words) # match BR->TL\n undiagonalize(true) ; replicate_match(words) # match T->B\n flip_horizontal ; replicate_match(words) # match B->T\n flip_vertical ; flip_horizontal\n diagonalize ; replicate_match(words) # match BL->TR\n flip_horizontal ; replicate_match(words) # match TR->BL\n undiagonalize(false)\n\n #\n # And return the solution.\n #\n @sltn_lines\n end", "def check_chars(x, y, len, hei, board, control, word, index)\n return true if index == word.length # if the index == length we've found the word\n\n # check if the searched character is in one the neighboors\n # if it is, move to that cell and check for the next one until all are found\n # or the character doesn't appear in any of the neighboors\n\n if check_char(x - 1, y, len, hei, board, control, word[index])\n return true if check_chars(x - 1, y, len, hei, board, control, word, index + 1)\n\n control[y][x - 1] = true\n end\n\n if check_char(x + 1, y, len, hei, board, control, word[index])\n return true if check_chars(x + 1, y, len, hei, board, control, word, index + 1)\n\n control[y][x + 1] = true\n end\n\n if check_char(x, y - 1, len, hei, board, control, word[index])\n return true if check_chars(x, y - 1, len, hei, board, control, word, index + 1)\n\n control[y - 1][x] = true\n end\n\n if check_char(x, y + 1, len, hei, board, control, word[index])\n return true if check_chars(x, y + 1, len, hei, board, control, word, index + 1)\n\n control[y + 1][x] = true\n end\n\n false\nend", "def has_space_down_for_word?(row, col, word)\n word.length.times do |index|\n board_space = @board[row + index][col]\n return if !board_space.nil? && board_space != word[index]\n end\n return word\n end", "def solver (seed_char, blanks_words_sizes, matrix)\n\t# Set numerical target\n\ttarget = magic_num(seed_char)\t\n\t# Find magic number sum buckets\n\tskynet(target, blanks_words_sizes, blanks_words_sizes.length - 1, 0, [])\n\t# Alphabetical sort input matrix\n\tsorted_seed_char = seed_char.chars.sort.join\t\n\n\t# Find unique sets from skynet solutions\n\t$answer[:trace].each do |arrOarr|\n\t\tarrOarr.sort!\n\tend \n\n\t$answer[:trace].uniq!\t\n\t\n\t# Finds match for complete set of words from skynet solutions\n\t$answer[:trace].each do |answer_arr_el|\t\t\t\t\n\t\tunordered_match(sorted_seed_char, matrix, answer_arr_el, answer_arr_el.length - 1, \"\", [])\n\t\t# Can be ignored\n\t\t$ops += $seed[answer_arr_el[0][0]][:num_groups][answer_arr_el[0][1]].length *\n\t\t\t$seed[answer_arr_el[1][0]][:num_groups][answer_arr_el[1][1]].length *\n\t\t\t$seed[answer_arr_el[1][0]][:num_groups][answer_arr_el[1][1]].length *\n\t\t\t$seed[answer_arr_el[1][0]][:num_groups][answer_arr_el[1][1]].length\t\t\n\tend\n\t\n\treturn $answer[:words]\nend", "def triangular_word?(word)\n # debugger\n alpha = (\"a\"..\"z\").to_a\n\n word_encoded = word.chars.inject(0) do |sum, char|\n char_encoded = alpha.index(char) + 1 # start alpha encoding at 1, not 0\n sum += char_encoded\n end\n\n is_triangular?(word_encoded)\nend", "def simple_transposition(text)\n letters = text.split('')\n row_one = []\n row_two = []\n letters.each_index do |i|\n if i.odd?\n row_two.push(letters[i])\n elsif i.even?\n row_one.push(letters[i])\n end\n end\n return (row_one + row_two).join('')\nend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Start the database transaction, or fail if one is already open.
def start_transaction! fail DbMod::Exceptions::AlreadyInTransaction if @in_transaction @in_transaction = true query 'BEGIN' end
[ "def begin_db_transaction\n puts_log \"begin_db_transaction\"\n log(\"begin transaction\", \"TRANSACTION\") {\n # Turns off the auto-commit\n IBM_DB.autocommit(@connection, IBM_DB::SQL_AUTOCOMMIT_OFF) }\n end", "def begin_db_transaction\n @pool.hold(true) do |conn|\n conn.query(\"begin\")\n end\n end", "def begin_db_transactions\n [:left, :right].each do |database|\n session.send(database).transaction_manager.begin_transaction\n if maintain_activity_status?\n session.send(database).execute(\"insert into #{activity_marker_table} values(1)\")\n end\n end\n end", "def start\n Thread.current[:transaction_store] = self\n\n @transaction_nesting += 1\n\n # nop on myISAM based tables\n exec_statement \"START TRANSACTION\"\n end", "def begin_db_transaction\n @connection.autocommit = false\n end", "def start\n Thread.current[:transaction_store] = self\n\n # neumann: works with earlier PSQL databases too.\n exec('BEGIN TRANSACTION') if @transaction_nesting < 1\n\n if @transaction_nesting >= 1 && @conn.server_version > 80000\n exec(\"SAVEPOINT SP#{@transaction_nesting}\")\n end\n\n @transaction_nesting += 1\n end", "def begin_transaction\n return System.begin_transaction\n end", "def open( tran: false, mode: :immediate ) # tran = true transaction\n #DBlog::stoD( parse_caller( caller.first ) ) if $debug == true\n @db = SQLite3::Database.new( @DBfile )\n @db.busy_timeout(1000)\n ecount = 0\n roll = false\n begin\n roll = false\n if tran == true\n @db.transaction( mode ) do\n roll = true\n yield self\n end\n else\n yield self\n end\n rescue SQLite3::BusyException => e\n DBlog::sto(\"SQLite3::BusyException tran = #{tran.to_s} #{ecount}\")\n begin\n @db.rollback() if roll == true\n rescue\n DBlog::sto(\"rollback fail #{$!}\")\n end\n if ecount > 59\n Commlib::errPrint( \"SQLite3::BusyException exit\", $!, e )\n return\n else\n #Commlib::errPrint( \"SQLite3::BusyException retry\", $!, e )\n ecount += 1\n sleep( 1 )\n DBlog::sto(\"retry\")\n retry\n end\n rescue => e\n Commlib::errPrint( \"SQLite3::another error\", $!, e )\n begin\n @db.rollback() if roll == true\n rescue\n DBlog::sto(\"rollback fail #{$!}\")\n end\n return\n ensure\n close()\n end\n end", "def transaction(mode = :deferred)\n execute \"begin #{mode.to_s} transaction\"\n @transaction_active = true\n\n if block_given?\n abort = false\n begin\n yield self\n rescue ::Object\n abort = true\n raise\n ensure\n abort and rollback or commit\n end\n end\n\n true\n end", "def transaction\n start_transaction\n\n yield\n ensure\n end_transaction if transaction_started?\n end", "def begin_transaction\n tx_req = Google::Datastore::V1::BeginTransactionRequest.new(\n project_id: project\n )\n\n execute { datastore.begin_transaction tx_req }\n end", "def transaction(&block)\n @db.query('START TRANSACTION;')\n begin\n yield\n @db.query('COMMIT;')\n rescue Exception => e\n @db.query('ROLLBACK;')\n raise e\n end\n end", "def add_transaction\n th = Thread.current\n unless @transactions.include?(th)\n th[:sequel_transaction_depth] = 0\n super\n end\n end", "def start_transaction\n @stats_engine.start_transaction\n end", "def begin_transaction(conn, opts=OPTS)\n if supports_savepoints?\n if savepoint_level(conn) > 1\n begin_savepoint(conn, opts)\n else\n begin_new_transaction(conn, opts)\n end\n else\n begin_new_transaction(conn, opts)\n end\n end", "def begin_transaction_sql\n \"BEGIN TRANSACTION\"\n end", "def transaction(&block)\n connection.exec \"BEGIN;\"\n yield\n connection.exec \"COMMIT;\"\n end", "def begin_isolated_db_transaction(isolation)\n raise ActiveRecord::TransactionIsolationError, \"adapter does not support setting transaction isolation\"\n end", "def start(&block)\n res = database.execute_request(\n :post,\n '/transaction/begin',\n headers: { content_type: 'text/plain' }\n ).body\n self.id = res\n @state = :started\n instance_eval(&block) if block_given?\n self\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
End the database transaction
def end_transaction! @in_transaction = false end
[ "def end\n\t\tcommit\n\t\t@connection.close\n\tend", "def end_transaction\n @stats_engine.end_transaction\n end", "def rollback_db_transaction\n execute(\"ROLLBACK\")\n end", "def commit_db_transaction\n @connection.commit\n end", "def end_transaction\n @connection = @auth_token = @version = nil\n end", "def finalize(success = true)\n if success\n commit_db_transactions\n else\n rollback_db_transactions\n end\n end", "def finish\r\n @db.close\r\n end", "def commit_db_transaction\n @connection.commit\n @connection.autocommit = true\n end", "def commit_transaction_sql\n SQL_COMMIT\n end", "def end_transaction(result = nil)\n agent&.end_transaction(result)\n end", "def end_transaction\n case @transaction_stack.length\n when 0\n PEROBS.log.fatal 'No ongoing transaction to end'\n when 1\n # All transactions completed successfully. Write all modified objects\n # into the backend storage.\n @transaction_stack.pop.each { |id| @transaction_objects[id]._sync }\n @transaction_objects = ::Hash.new\n @transaction_thread = nil\n else\n # A nested transaction completed successfully. We add the list of\n # modified objects to the list of the enclosing transaction.\n transactions = @transaction_stack.pop\n # Merge the two lists\n @transaction_stack.push(@transaction_stack.pop + transactions)\n # Ensure that each object ID is only included once in the list.\n @transaction_stack.last.uniq!\n end\n end", "def commit_transaction_sql\n 'COMMIT'\n end", "def committed_transaction; end", "def commit_db_transactions\n [:left, :right].each do |database|\n if maintain_activity_status?\n session.send(database).execute(\"delete from #{activity_marker_table}\")\n end\n session.send(database).transaction_manager.commit_transaction\n end\n end", "def commit\n execute \"commit transaction\"\n @transaction_active = false\n true\n end", "def rollback_transaction_sql\n SQL_ROLLBACK\n end", "def commit_transaction_sql\n \"COMMIT TRANSACTION\"\n end", "def rollback\n execute \"rollback transaction\"\n @transaction_active = false\n true\n end", "def commit\n db_interface.commit\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
returns the customer for that given review
def customer @@all.select {|review| self.restaurant.customers} end
[ "def customer\n return @customer\n end", "def customer_info(customer)\n return customer\n end", "def review_for(course)\n self.reviews.where(course_id: course.id).first\n end", "def related_customers\n ratings.customers\n end", "def get_design_review(name)\n self.design_reviews.detect { |dr| dr.review_type.name == name }\n end", "def review(review, options = {})\n get(\"reviews/#{review}\", options).pop\n end", "def review\n fetch('restaurant.review')\n end", "def customer\n @customer ||= Customer.new(value_for('customer'))\n end", "def customer()\n sql = \"SELECT * FROM customers INNER JOIN rentals ON rentals.customer_id\n = customers.id WHERE rentals.id = $1\"\n values = [@id]\n result = SqlRunner.run(sql, values)[0]\n return Customer.new(result)\n end", "def review_name(review)\n if Profile.exists?(user_id: review.user_id)\n review.user.profile.name\n else\n review.user.email\n end\n end", "def customer() #in the tickets (join) table so we can grab customer_id easily, the _id's are already bundled in this table\n sql = \"SELECT * FROM customers WHERE id = $1\"\n values = [@customer_id]\n customer = SqlRunner.run(sql, values).first\n return Customer.new(customer)\n end", "def show\n @client_review = ClientReview.find(params[:id])\n end", "def reviewed?(restaurant, options = {})\n self.reviews.of_restaurant(restaurant, options).first\n end", "def get_review(params)\n review_id = params[:id]\n utoken = params[:utoken]\n get(\"/reviews/#{review_id}\", { utoken: utoken })\n end", "def retrieveCustomer stripeCustomerId\n\t\tStripe::Customer.retrieve(stripeCustomerId)\n\tend", "def customer_id\n customer.id if customer\n end", "def review_user\n self.user\n end", "def customer_name\n customer.name if customer\n end", "def customer_id\n return @customer_id\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
GET /vocab_words GET /vocab_words.json
def index logger.debug "-----index-----" @vocab_words = VocabWord.all end
[ "def index\n @vocabularies = Vocabulary.page(params[:page]).per(10)\n end", "def show\n @vocab = Vocab.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @vocab }\n end\n end", "def index\n @controlled_vocabularies = ControlledVocabulary.all\n end", "def get_word_list\n\n text_id = params[:text_id]\n \n # => If text id is not given in the parameter of the url, then\n # => return null\n if text_id != nil && text_id.numeric?\n \n text = Text.find(text_id.to_i)\n \n @words = text.words\n \n else\n \n # @text = nil\n # @words = Word.find(:all)\n @words = nil\n \n end\n \n respond_to do |format|\n # format.html # index.html.erb\n # format.json { render json: @texts }\n # format.json { render json: @words }\n format.html { render json: @words }\n end\n \n end", "def load_vocabularies\n vocabularies.each do |name, config|\n load_vocab(name)\n end\n end", "def vocabulary\n wayfinder.decorated_vocabulary\n end", "def index\n @words = Word.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @words }\n end\n end", "def index\n @vocabs = Vocab.all\n end", "def show\n @spanish_vocab = SpanishVocab.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @spanish_vocab }\n end\n end", "def json_vocabulary_info\n # Get vocabulary information for documentation\n vocab_graph = RDF::Graph.load(File.expand_path(\"../../public/vocabs/rdfa-test.html\", __FILE__))\n vocab_info = {}\n SPARQL.execute(VOCAB_QUERY, vocab_graph).each do |solution|\n prop_name = solution[:prop].to_s.split('/').last\n vocab_info[prop_name] = {\n \"@id\" => prop_name,\n \"label\" => solution[:label].to_s,\n \"description\" => solution[:description].to_s\n }\n end\n vocab_info\n end", "def list\n @words=Word.find(:all)\n end", "def index\n @frequency_vocabularies = FrequencyVocabulary.all\n end", "def create\n @vocab = Vocab.new(params[:vocab])\n\n respond_to do |format|\n if @vocab.save\n format.html { redirect_to @vocab, notice: 'Vocab was successfully created.' }\n format.json { render json: @vocab, status: :created, location: @vocab }\n else\n format.html { render action: \"new\" }\n format.json { render json: @vocab.errors, status: :unprocessable_entity }\n end\n end\n end", "def index\n @words = Word.order(:word).all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @words }\n end\n end", "def index\n @words = Word.paginate(:page => params[:page], :per_page => 20)\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @word_meanings }\n end\n end", "def index\n @vocab_books = VocabBook.all\n end", "def sparql_vocab()\n q = Dev::Query.new()\n q.select(\"?term ?id\")\n q.where(\"?id skos:prefLabel ?term\")\n \n sparql = Dev::SparqlQuery.new(:uri=>\"http://bio2rdf.semanticscience.org:8026/sparql?\")\n sparql.prefix(:skos=>\"http://www.w3.org/2004/02/skos/core#\")\n return sparql.query(q,:graph=>\"cebs\",:format=>\"xml\")\n\n end", "def index\n # Get all registered ControlledVocabularies\n @controlled_vocabularies = ControlledVocabulary.all.order(:string_key)\n\n # Also get additional controlled vocabularies from UriService that haven't been registered\n controlled_vocabulary_string_keys = @controlled_vocabularies.map(&:string_key)\n @additional_uri_service_controlled_vocabularies = UriService.client.list_vocabularies(1000) # Ridiculously high limit to show all\n @additional_uri_service_controlled_vocabularies.delete_if { |uri_service_vocabulary| controlled_vocabulary_string_keys.include?(uri_service_vocabulary['string_key']) }\n end", "def index\n @learnwordlists = Learnwordlist.all\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
PATCH/PUT /vocab_words/1 PATCH/PUT /vocab_words/1.json
def update logger.debug "-----update-----" respond_to do |format| if @vocab_word.update(vocab_word_params) format.html { redirect_to @vocab_word, notice: 'Vocab word was successfully updated.' } format.json { render :show, status: :ok, location: @vocab_word } else format.html { render :edit } format.json { render json: @vocab_word.errors, status: :unprocessable_entity } end end end
[ "def update\n @spanish_vocab = SpanishVocab.find(params[:id])\n\n respond_to do |format|\n if @spanish_vocab.update_attributes(params[:spanish_vocab])\n format.html { redirect_to @spanish_vocab, notice: 'Spanish vocab was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @spanish_vocab.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @controlled_vocabulary.update(controlled_vocabulary_params)\n format.html { redirect_to @controlled_vocabulary, notice: 'Controlled vocabulary was successfully updated.' }\n format.json { render :show, status: :ok, location: @controlled_vocabulary }\n else\n format.html { render :edit }\n format.json { render json: @controlled_vocabulary.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @controlled_vocabulary.update(controlled_vocabulary_params)\n format.html { redirect_to edit_controlled_vocabulary_path(@controlled_vocabulary), notice: 'Controlled vocabulary was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @controlled_vocabulary.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @word = Word.find(params[:id])\n\n respond_to do |format|\n if @word.update_attributes(params[:word])\n format.html { redirect_to @word, :notice => 'Word was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @word.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @vocab_book.update(vocab_book_params)\n format.html { redirect_to @vocab_book, notice: 'Vocab book was successfully updated.' }\n format.json { render :show, status: :ok, location: @vocab_book }\n else\n format.html { render :edit }\n format.json { render json: @vocab_book.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @key_word = KeyWord.find(params[:id])\n\n respond_to do |format|\n if @key_word.update_attributes(params[:key_word])\n format.html { redirect_to @key_word, notice: 'Key word was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @key_word.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @vocabtype = Vocabtype.find(params[:id])\n\n respond_to do |format|\n if @vocabtype.update_attributes(params[:vocabtype])\n format.html { redirect_to(@vocabtype, :notice => 'Vocabtype was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @vocabtype.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @word_set = WordSet.find(params[:id])\n\n respond_to do |format|\n if @word_set.update_attributes(params[:word_set])\n format.html { redirect_to @word_set, notice: 'Word set was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @word_set.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @operator_vocabulary.update(operator_vocabulary_params)\n format.html { redirect_to @operator_vocabulary, notice: 'Vocabulary was successfully updated.' }\n format.json { render :show, status: :ok, location: @operator_vocabulary }\n else\n format.html { render :edit }\n format.json { render json: @operator_vocabulary.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @controlled_vocabulary_term.update(controlled_vocabulary_term_params)\n format.html { redirect_to @controlled_vocabulary_term.metamorphosize, notice: 'Controlled vocabulary term was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @controlled_vocabulary_term.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @key_word_set = KeyWordSet.find(params[:id])\n\n respond_to do |format|\n if @key_word_set.update_attributes(params[:key_word_set])\n format.html { redirect_to @key_word_set, notice: 'Key word set was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @key_word_set.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n\t\t\t\t@word = Word.update(params[:word][:id], params[:word])\n\t\t\t\trender json: @word\n\t\t\tend", "def update\n @my_word = MyWord.find(params[:id])\n\n respond_to do |format|\n if @my_word.update_attributes(params[:my_word])\n format.html { redirect_to @my_word, notice: 'My word was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @my_word.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @frequency_vocabulary.update(frequency_vocabulary_params)\n format.html { redirect_to @frequency_vocabulary, notice: 'Frequency vocabulary was successfully updated.' }\n format.json { render :show, status: :ok, location: @frequency_vocabulary }\n else\n format.html { render :edit }\n format.json { render json: @frequency_vocabulary.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @word = Word.find(params[:id])\n\n respond_to do |format|\n if @word.update_attributes(params[:word])\n flash[:notice] = 'Word was successfully updated.'\n format.html { redirect_to(@word) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @word.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @metaword = Metaword.find(params[:id])\n\n respond_to do |format|\n if @metaword.update_attributes(params[:metaword])\n format.html { redirect_to @metaword, notice: 'Metaword was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @metaword.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @vocabulary_term.update(vocabulary_term_params)\n format.html { redirect_to @vocabulary_term, notice: 'Vocabulary term was successfully updated.' }\n format.json { render :show, status: :ok, location: @vocabulary_term }\n else\n format.html { render :edit }\n format.json { render json: @vocabulary_term.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @lsa_training_unique_word = LsaTrainingUniqueWord.find(params[:id])\n\n respond_to do |format|\n if @lsa_training_unique_word.update_attributes(params[:lsa_training_unique_word])\n format.html { redirect_to @lsa_training_unique_word, notice: 'Lsa training unique word was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @lsa_training_unique_word.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n # raise params.to_h.to_yaml\n\n respond_to do |format|\n if @word_set.update(word_set_params)\n format.html { redirect_to @word_set, info: 'Word set was successfully updated.' }\n format.json { render action: 'show', status: :ok, location: @word_set }\n else\n format.html { render action: 'edit' }\n format.json { render json: @word_set.errors, status: :unprocessable_entity }\n end\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Create two line items, then delete one and post.
def test_should_create_and_delete_line_items_before_post assert_difference 'Invoice.count' do assert_difference 'LineItem.count', 1 do @basic_user.creates_new_invoice @basic_user.enter_new_customer( :name => "Test 8 Customer Name 4") #get first row in the table. trows = @basic_user.line_items_rows assert_equal 1, trows.length tr1 = trows[::WatirBrowser.item_index(1)] assert tr1.exists? @basic_user.populate(tr1.text_field(:name, "invoice[line_items_attributes][][description]"), "Description one") @basic_user.link(:id, "add_line_item").click #get second row in the table. trows = @basic_user.line_items_rows assert_equal 2, trows.length tr2 = trows[::WatirBrowser.item_index(2)] assert tr2.exists? @basic_user.populate(tr2.text_field(:name, 'invoice[line_items_attributes][][description]'),'Description two') #remove the first line item @basic_user.line_items_rows[::WatirBrowser.item_index(1)].link(:name, 'remove').click @basic_user.submits end end invoice = Invoice.find(:first, :order => 'id desc') assert_equal 1, invoice.line_items.count assert_equal 'Description two', invoice.line_items[0].description end
[ "def move_line_items_to_order basket ,order\n basket.line_items.each do |item|\n item.basket_id = nil #prevent cascade delete\n order.line_items << item\n end\n end", "def add(new_line_items)\n new_line_items = Array.wrap(new_line_items).reject{ |li| li.product.supplier_id.nil? || li.product.supplier_id != self.supplier_id }\n new_line_items.each do |new_line_item|\n if line_item = self.drop_ship_line_items.find_by_line_item_id(new_line_item.id)\n else\n self.drop_ship_line_items.create({line_item_id: new_line_item.id}, without_protection: true)\n end\n end\n # TODO: remove any old line items?\n self.save ? self : nil\n end", "def update_line_items(table, ticket)\n ticket.ticket_line_items.destroy_all\n create_line_items(table, ticket)\n end", "def destroy_booking_line(item_id)\n\n product_lines = self.booking_lines.select do |booking_line|\n booking_line.item_id == item_id\n end\n\n if booking_line = product_lines.first\n transaction do\n self.item_cost -= booking_line.item_cost\n self.product_deposit_cost -= booking_line.product_deposit_cost\n self.category_supplement_1_cost -= booking_line.category_supplement_1_cost\n self.category_supplement_2_cost -= booking_line.category_supplement_2_cost\n self.category_supplement_3_cost -= booking_line.category_supplement_3_cost \n self.calculate_cost(false, false)\n self.save\n booking_line.destroy\n # Create newsfeed\n ::Yito::Model::Newsfeed::Newsfeed.create(category: 'booking',\n action: 'destroy_booking_line',\n identifier: self.id.to_s,\n description: BookingDataSystem.r18n.t.booking_news_feed.destroyed_booking_line(item_id),\n attributes_updated: {item_id: item_id}.merge({booking: newsfeed_summary}).to_json)\n end\n self.reload\n end\n\n end", "def down\n # split items with a quantity>1 into multiple items\n LineItem.where(\"quantity>1\").each do |line_item|\n # add individual items\n line_item.quantity.times do\n LineItem.create(\n cart_id: line_item.cart_id,\n product_id: line_item.product_id,\n quantity: 1\n )\n end\n # remove the original item\n line_item.destroy\n end\n end", "def create_drop_ship_orders\n self.drop_ship_orders.destroy_all\n self.line_items.group_by{ |li| li.product.supplier_id }.each do |supplier_id, supplier_items|\n if supplier_id.present?\n supplier = Spree::Supplier.find(supplier_id)\n dso = supplier.orders.create({:order_id => self.id})\n supplier_items.each do |line_item|\n dso.drop_ship_line_items.create({line_item_id: line_item.id})\n end\n end\n end\n end", "def post_create\n super_post_create\n @initial_lines.each{ |il| add_line il }\n @initial_lines.clear\n end", "def destroy\n @line_items = LineItem.find(params[:id])\n @line_item.destroy\n if @line_item.destroy\n render json: {status: :successfully}\n else\n render json: { status: error}\n end\n end", "def destroy\n\n @po_line.so_line.destroy if @po_line.destroy && @po_line.so_line\n\n respond_to do |format|\n format.html { redirect_to new_po_header_po_line_path(@po_header), :notice => 'Line item was successfully deleted.' }\n format.json { head :ok }\n end\n end", "def test_should_remove_second_line_item_from_invoice\r\n assert_no_difference 'Invoice.count' do\r\n assert_difference 'LineItem.count', -1 do\r\n @basic_user.edits_invoice(invoices(:invoice_with_line_items).id)\r\n \r\n #get first row in the table.\r\n trows = @basic_user.line_items_rows\r\n assert_equal 2, trows.length\r\n tr2 = trows[::WatirBrowser.item_index(2)]\r\n assert tr2.exists?\r\n @basic_user.populate(tr2.text_field(:name, \"invoice[line_items_attributes][][description]\"),\"Removed Description two\")\r\n\r\n #remove the second line item\r\n tr2.link(:name, 'remove').click\r\n\r\n @basic_user.submits\r\n end\r\n end\r\n \r\n invoice = Invoice.find(invoices(:invoice_with_line_items).id)\r\n assert_equal 1, invoice.line_items.count\r\n assert_equal line_items(:line_item_one).description, invoice.line_items[0].description \r\n \r\n end", "def handle_merge(current_line_item, other_order_line_item)\n if current_line_item\n current_line_item.quantity += other_order_line_item.quantity\n handle_error(current_line_item) unless current_line_item.save\n else\n order.line_items << other_order_line_item\n handle_error(other_order_line_item) unless other_order_line_item.save\n end\n end", "def create\n if(params[:line_id]) # issue, unissue, reverse po according refer line_id\n if params[:reverse_order] # reverse po\n @cart = current_cart\n so_line = LineItem.find(params[:line_id])\n \n logger.debug \"=====##reverse##cart.add_po_line_item:item, quantity== \n #{so_line.full_part_number}, #{params[:quantity]}\"\n @line_item = @cart.add_po_line_item(so_line.price_id, \n so_line.full_part_number, 0-params[:quantity].to_i, so_line.fixed_price, so_line.id) \n else # cart.add_so_line_item & issue, unissue po\n @cart = current_issue_cart\n po_line = LineItem.find(params[:line_id])\n logger.debug \"=====cart.add_so_line_item, quantity== #{params[:quantity]}\"\n @line_item = @cart.add_so_line_item(po_line, params[:quantity].to_i)\n end \n else # cart.add_po_line_item\n @cart = current_cart\n item = Item.find(params[:item_id])\n item_suffix = '' if (params[:suffix] == '' or params[:suffix].nil?)\n full_part_number = item.partNo + item_suffix\n fixed_price = Price.find(params[:price_id]).price\n\n logger.debug \"=====cart.add_po_line_item:item, quantity== #{full_part_number}, #{params[:quantity]}\"\n @line_item = @cart.add_po_line_item(params[:price_id], \n full_part_number, params[:quantity].to_i, fixed_price, nil)\n end\n\n respond_to do |format|\n if @line_item.save\n format.html { redirect_to @line_item, notice: 'Line item was successfully created.' }\n format.js {}\n format.json { render json: @line_item, status: :created, location: @line_item }\n else\n format.html { render action: \"new\" }\n format.js {}\n format.json { render json: @line_item.errors, status: :unprocessable_entity }\n end\n end\n end", "def destroy\n @line1_item.destroy\n respond_to do |format|\n format.html { redirect_to line1_items_url, notice: 'Line1 item was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def duplicate\n new_invoice = Invoice.create(self.attributes.except!('id', 'number', 'issue_date', 'sent_by_email', 'paid_amount'))\n self.items.each do |item|\n i = Item.create(item.attributes.except!('id'))\n new_invoice.items << i\n i.taxes << item.taxes\n end\n new_invoice.set_amounts\n new_invoice.save\n end", "def post_create\n super_post_create\n @initial_accounting_lines.each { |il| add_accounting_line il }\n @initial_accounting_lines.clear\n end", "def merge_line_items\n processed_items = []\n self.line_items.each do |item|\n if processed_items.include?(item.product_format_id)\n item.destroy\n else\n duplicates = self.line_items.where(\"product_format_id = ?\", item.product_format_id).all\n if duplicates.size > 1\n qty = duplicates.sum(&:quantity)\n amt = duplicates.sum(&:total_amount)\n item.update_attributes({:quantity => qty, :total_amount => amt})\n end\n end\n processed_items << item.product_format_id\n end\n end", "def test_dsl_should_add_edit_and_remove_line_items_in_invoice\r\n assert_no_difference 'Invoice.count' do\r\n assert_no_difference 'LineItem.count' do\r\n @basic_user.edits_invoice(invoices(:invoice_with_line_items).id)\r\n assert_equal 3, @basic_user.adds_line_item(:unit => 'New line item', :description => 'Description of new line item')\r\n assert @basic_user.removes_line_item(2)\r\n @basic_user.edits_line_item(1, :description => 'Changed Description One').\r\n and_submits\r\n end\r\n end\r\n \r\n invoice = Invoice.find(invoices(:invoice_with_line_items).id)\r\n assert_equal 2, invoice.line_items.count\r\n\r\n assert_equal 'Changed Description One', invoice.line_items[0].description\r\n assert_equal line_items(:line_item_one).description, invoice.line_items[0].description\r\n end", "def duplicate(qty_ordered, qty_received = nil, qty_issued = nil)\n new_item = OrderedLineItem.create!(self.attributes.merge({\n :quantity_ordered => qty_ordered, \n :quantity_received => qty_received, \n :issued_quantity => qty_issued,\n :date_received => nil\n }))\n new_item.line_item_no = self.line_item_no\n new_item.save!\n end", "def create\n @quote = Quote.find(params[:quote_id])\n params[:quote_line][:item_name_sub] = params[:alt_name_id]\n @quote_line = @quote.quote_lines.build(params[:quote_line])\n @attachable = @quote\n\n respond_to do |format|\n if @quote_line.save\n @quote.quote_vendors.each do |quote_vendor|\n QuoteLineCost.create(quote_vendor_id: quote_vendor.id, quote_line_id: @quote_line.id)\n end\n\n format.html { redirect_to new_quote_quote_line_path(@quote), :notice => 'Quote line was successfully created.' }\n format.json { render :json => @quote_line, :status => :created, :location => [@quote_line.quote, @quote_line] }\n # if @quote.quote_vendors.count > 0\n # @quote.quote_vendors.each do |quote_vendor|\n # @dup_quote_line = @quote_line.clone\n # @dup_quote_line.vendor_id = quote_vendor.organization_id\n # @dup_quote_line.save\n # end\n # format.html { redirect_to new_quote_quote_line_path(@quote), :notice => 'Quote line was successfully created.' }\n # format.json { render :json => @quote_line, :status => :created, :location => [@quote_line.quote, @quote_line] }\n # else\n # format.html { redirect_to new_quote_quote_line_path(@quote), :notice => 'Please select vendors to add quote items.' }\n # end\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @quote_line.errors, :status => :unprocessable_entity }\n end\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test editing an invoice with line items Remove first line item from existing invoice
def test_should_remove_first_line_item_from_invoice assert_no_difference 'Invoice.count' do assert_difference 'LineItem.count', -1 do @basic_user.edits_invoice(invoices(:invoice_with_line_items).id) #get first row in the table. trows = @basic_user.line_items_rows assert_equal 2, trows.length tr1 = trows[::WatirBrowser.item_index(1)] assert tr1.exists? @basic_user.populate(tr1.text_field(:name, "invoice[line_items_attributes][][description]"),"Removed Description one") #remove the first line item tr1.link(:name, 'remove').click @basic_user.submits end end invoice = Invoice.find(invoices(:invoice_with_line_items).id) assert_equal 1, invoice.line_items.count assert_equal line_items(:line_item_two).description, invoice.line_items[0].description end
[ "def test_should_remove_second_line_item_from_invoice\r\n assert_no_difference 'Invoice.count' do\r\n assert_difference 'LineItem.count', -1 do\r\n @basic_user.edits_invoice(invoices(:invoice_with_line_items).id)\r\n \r\n #get first row in the table.\r\n trows = @basic_user.line_items_rows\r\n assert_equal 2, trows.length\r\n tr2 = trows[::WatirBrowser.item_index(2)]\r\n assert tr2.exists?\r\n @basic_user.populate(tr2.text_field(:name, \"invoice[line_items_attributes][][description]\"),\"Removed Description two\")\r\n\r\n #remove the second line item\r\n tr2.link(:name, 'remove').click\r\n\r\n @basic_user.submits\r\n end\r\n end\r\n \r\n invoice = Invoice.find(invoices(:invoice_with_line_items).id)\r\n assert_equal 1, invoice.line_items.count\r\n assert_equal line_items(:line_item_one).description, invoice.line_items[0].description \r\n \r\n end", "def test_dsl_should_add_edit_and_remove_line_items_in_invoice\r\n assert_no_difference 'Invoice.count' do\r\n assert_no_difference 'LineItem.count' do\r\n @basic_user.edits_invoice(invoices(:invoice_with_line_items).id)\r\n assert_equal 3, @basic_user.adds_line_item(:unit => 'New line item', :description => 'Description of new line item')\r\n assert @basic_user.removes_line_item(2)\r\n @basic_user.edits_line_item(1, :description => 'Changed Description One').\r\n and_submits\r\n end\r\n end\r\n \r\n invoice = Invoice.find(invoices(:invoice_with_line_items).id)\r\n assert_equal 2, invoice.line_items.count\r\n\r\n assert_equal 'Changed Description One', invoice.line_items[0].description\r\n assert_equal line_items(:line_item_one).description, invoice.line_items[0].description\r\n end", "def test_should_create_and_delete_line_items_before_post\r\n assert_difference 'Invoice.count' do\r\n assert_difference 'LineItem.count', 1 do\r\n @basic_user.creates_new_invoice\r\n @basic_user.enter_new_customer(\r\n :name => \"Test 8 Customer Name 4\") \r\n \r\n #get first row in the table.\r\n trows = @basic_user.line_items_rows\r\n assert_equal 1, trows.length\r\n tr1 = trows[::WatirBrowser.item_index(1)]\r\n assert tr1.exists?\r\n @basic_user.populate(tr1.text_field(:name, \"invoice[line_items_attributes][][description]\"), \"Description one\")\r\n\r\n @basic_user.link(:id, \"add_line_item\").click\r\n\r\n #get second row in the table.\r\n trows = @basic_user.line_items_rows\r\n assert_equal 2, trows.length\r\n tr2 = trows[::WatirBrowser.item_index(2)]\r\n assert tr2.exists?\r\n\r\n @basic_user.populate(tr2.text_field(:name, 'invoice[line_items_attributes][][description]'),'Description two')\r\n\r\n #remove the first line item\r\n @basic_user.line_items_rows[::WatirBrowser.item_index(1)].link(:name, 'remove').click\r\n\r\n @basic_user.submits\r\n end\r\n end\r\n\r\n invoice = Invoice.find(:first, :order => 'id desc')\r\n assert_equal 1, invoice.line_items.count\r\n assert_equal 'Description two', invoice.line_items[0].description\r\n end", "def test_should_update_new_invoice_total_when_line_item_is_deleted\r\n update_profile_taxes(\r\n :tax_enabled => true,\r\n :tax_1_name => \"tax 1 name\",\r\n :tax_1_rate => \"5.0\",\r\n :tax_2_name => \"\",\r\n :discount_before_tax => false)\r\n \r\n @user.creates_new_invoice\r\n # the first line is created automatically, no need to click add line \r\n @user.edits_line_item(1,:unit => 'line one', :quantity => '1', :price => '1.23')\r\n @user.adds_line_item(:unit => 'line two', :quantity => '1', :price => '4.56')\r\n \r\n verify_invoice_view_fields(\r\n :line_items_total => 5.79,\r\n :tax_1_amount => 0.29,\r\n :discount_amount => 0,\r\n :total => 6.08)\r\n \r\n @user.removes_line_item(1)\r\n\r\n verify_invoice_view_fields(\r\n :line_items_total => 4.56,\r\n :tax_1_amount => 0.23,\r\n :discount_amount => 0,\r\n :total => 4.79)\r\n end", "def test_should_exclude_should_destroy_line_items_from_total\n i = Invoice.find(invoices(:invoice_with_line_items).id)\n assert_equal BigDecimal.new('33.33'), i.line_items_total\n \n i.line_items[0].should_destroy=1\n \n assert_equal BigDecimal.new('22.22'), i.line_items_total\n end", "def removes_line_item(index)\r\n index = ::WatirBrowser.item_index(index)\r\n trows = line_items_rows\r\n tr = trows[index]\r\n\r\n @test_case.assert tr.exists? && tr.visible?, 'tried to remove a non-existent line_item.'\r\n expects_ajax() do\r\n tr.link(:name, 'remove').click\r\n b.wait()\r\n end\r\n self\r\n end", "def split(client_tag, client_name='')\n to_copy = self.attributes.slice('status','date','customer_id','note')\n split_invoice = Invoice.new(to_copy)\n split_invoice.number = Invoice.generate_num\n tag_regex = /\\[#{client_tag}\\:(\\d+\\.?\\d*)\\]/i\n self.line_items.each do |line_item|\n if m = line_item.description.match(tag_regex)\n new_quantity = m.captures.first.to_f\n # TODO: deal with edge cases, tag but no hours, bad tags, etc\n # next unless new_quantity > 0 \n\n split_line_item = LineItem.new(line_item.attributes.merge({'id'=>nil, 'invoice_id'=>nil}))\n unless client_name.blank?\n split_line_item.description = line_item.description.gsub(tag_regex, \"[#{client_name}]\")\n end\n split_line_item.quantity = new_quantity\n \n line_item.description = line_item.description.gsub(tag_regex,'').strip!\n line_item.quantity = line_item.quantity - new_quantity\n if (line_item.quantity <= 0)\n line_item.delete\n else\n line_item.save\n end\n\n split_invoice.line_items << split_line_item\n end\n end\n split_invoice.save\n split_invoice\n end", "def test_totalling_methods_when_line_items_empty\n invoice = create_test_invoice\n invoice.line_items = []\n\n assert_nothing_raised(Exception) {\n assert_equal(BigDecimal('0'), invoice.sub_total)\n assert_equal(BigDecimal('0'), invoice.total_tax)\n assert_equal(BigDecimal('0'), invoice.total)\n }\n end", "def update_line_items(table, ticket)\n ticket.ticket_line_items.destroy_all\n create_line_items(table, ticket)\n end", "def test_delete_previous_promotion_line_item\n setup_new_order_with_items()\n \n a_fixed_rebate = promotions(:fixed_rebate)\n @o.promotion_code = a_fixed_rebate.code\n # Saving it, sets the promo code and product cost.\n assert @o.save\n # Assert the promotion is there.\n assert_equal @o.order_line_items.find_by_name(a_fixed_rebate.description).name, a_fixed_rebate.description, \"The fixed rebate wasn't added properly.\"\n\n # Test a percent rebate.\n a_percent_rebate = promotions(:percent_rebate)\n @o.promotion_code = a_percent_rebate.code\n # Saving it, sets the promo code and product cost.\n assert @o.save\n # Assert the promotion is there.\n assert_equal @o.order_line_items.find_by_name(a_percent_rebate.description).name, a_percent_rebate.description, \"The percent rebate wasn't added properly.\"\n\n # Assert the previous promotion is NOT there.\n assert_equal @o.order_line_items.find_by_name(a_fixed_rebate.description), nil, \"The fixed rebate is still there.\"\n end", "def test_line_amount_calculation\n invoice = create_test_invoice\n line_item = invoice.line_items.first\n\n # Make sure that everything adds up to begin with.\n expected_amount = line_item.quantity * line_item.unit_amount\n assert_equal(expected_amount, line_item.line_amount)\n\n # Change the line_amount and check that it doesn't modify anything.\n line_item.line_amount = expected_amount * 10\n assert_equal(expected_amount, line_item.line_amount)\n\n # Change the quantity and check that the line_amount has been updated.\n quantity = line_item.quantity + 2\n line_item.quantity = quantity\n assert_not_equal(expected_amount, line_item.line_amount)\n assert_equal(quantity * line_item.unit_amount, line_item.line_amount)\n end", "def remove_line_item(line_item)\n unless self.ordered?\n line_item.destroy\n end\n end", "def unrefulfill_line_item(line_item_id)\n li = Spree::LineItem.find(line_item_id)\n li.refulfill = nil\n li.save\n end", "def destroy_invoice_item(id, taxes)\n if invoice_item = CustomerInvoiceItem.get(id)\n # Destroy the invoice item \n invoice_item.destroy\n # Update totals\n substract_item_data(invoice_item, taxes)\n # Save and reload the invoice\n self.save\n self.reload\n return self\n end\n\n end", "def purge!\n unless self.ordered?\n self.line_items.each do |line_item|\n remove_line_item(line_item) if line_item.invalid?\n end\n end\n end", "def clear_line_items\n @line_items = []\n end", "def delete_lines_main_order\n # Eliminamos las lineas de la orden principal\n @main_order.line_items.each do |line_item|\n line_item.delete\n end\n # Eliminamos la orden principal\n @main_order.delete\n end", "def lock_new_invoice_line_items party, document\n # Generate fixed-price post sub total line items from percentage based sub total line items\n subtotal = revenue_subtotal_by_party(party, false)\n line_items_after_subtotal_percentage_by_billable_party(party).each do |li|\n fixed_price = li.revenue_total_with_percentage subtotal\n line_items.create!(li.attributes.merge({unit_price_revenue: fixed_price, name: \"#{li.name} (#{li.unit_price_revenue}%)\", percentage_of_subtotal: false, parent_id: li.id}))\n end\n\n line_items_fixed_by_billable_party_including_add_ons(party, false).each do |li|\n li.document = document\n li.save\n end\n end", "def test_totalling_methods_when_line_items_empty\n credit_note = create_test_credit_note\n credit_note.line_items = []\n\n assert_nothing_raised(Exception) {\n assert_equal(BigDecimal('0'), credit_note.sub_total)\n assert_equal(BigDecimal('0'), credit_note.total_tax)\n assert_equal(BigDecimal('0'), credit_note.total)\n }\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Remove second line item from existing invoice
def test_should_remove_second_line_item_from_invoice assert_no_difference 'Invoice.count' do assert_difference 'LineItem.count', -1 do @basic_user.edits_invoice(invoices(:invoice_with_line_items).id) #get first row in the table. trows = @basic_user.line_items_rows assert_equal 2, trows.length tr2 = trows[::WatirBrowser.item_index(2)] assert tr2.exists? @basic_user.populate(tr2.text_field(:name, "invoice[line_items_attributes][][description]"),"Removed Description two") #remove the second line item tr2.link(:name, 'remove').click @basic_user.submits end end invoice = Invoice.find(invoices(:invoice_with_line_items).id) assert_equal 1, invoice.line_items.count assert_equal line_items(:line_item_one).description, invoice.line_items[0].description end
[ "def test_should_remove_first_line_item_from_invoice\r\n assert_no_difference 'Invoice.count' do\r\n assert_difference 'LineItem.count', -1 do\r\n @basic_user.edits_invoice(invoices(:invoice_with_line_items).id)\r\n \r\n #get first row in the table.\r\n trows = @basic_user.line_items_rows\r\n assert_equal 2, trows.length\r\n tr1 = trows[::WatirBrowser.item_index(1)]\r\n assert tr1.exists?\r\n @basic_user.populate(tr1.text_field(:name, \"invoice[line_items_attributes][][description]\"),\"Removed Description one\")\r\n\r\n #remove the first line item\r\n tr1.link(:name, 'remove').click\r\n\r\n @basic_user.submits\r\n end\r\n end\r\n \r\n invoice = Invoice.find(invoices(:invoice_with_line_items).id)\r\n assert_equal 1, invoice.line_items.count\r\n assert_equal line_items(:line_item_two).description, invoice.line_items[0].description\r\n end", "def remove_line_item(line_item)\n unless self.ordered?\n line_item.destroy\n end\n end", "def remove_line(product)\n i = @cartlines.index { |o| o.productID == product.productID }\n @cartlines.delete_at(i) if i != nil\n end", "def removes_line_item(index)\r\n index = ::WatirBrowser.item_index(index)\r\n trows = line_items_rows\r\n tr = trows[index]\r\n\r\n @test_case.assert tr.exists? && tr.visible?, 'tried to remove a non-existent line_item.'\r\n expects_ajax() do\r\n tr.link(:name, 'remove').click\r\n b.wait()\r\n end\r\n self\r\n end", "def purge!\n unless self.ordered?\n self.line_items.each do |line_item|\n remove_line_item(line_item) if line_item.invalid?\n end\n end\n end", "def test_should_exclude_should_destroy_line_items_from_total\n i = Invoice.find(invoices(:invoice_with_line_items).id)\n assert_equal BigDecimal.new('33.33'), i.line_items_total\n \n i.line_items[0].should_destroy=1\n \n assert_equal BigDecimal.new('22.22'), i.line_items_total\n end", "def split(client_tag, client_name='')\n to_copy = self.attributes.slice('status','date','customer_id','note')\n split_invoice = Invoice.new(to_copy)\n split_invoice.number = Invoice.generate_num\n tag_regex = /\\[#{client_tag}\\:(\\d+\\.?\\d*)\\]/i\n self.line_items.each do |line_item|\n if m = line_item.description.match(tag_regex)\n new_quantity = m.captures.first.to_f\n # TODO: deal with edge cases, tag but no hours, bad tags, etc\n # next unless new_quantity > 0 \n\n split_line_item = LineItem.new(line_item.attributes.merge({'id'=>nil, 'invoice_id'=>nil}))\n unless client_name.blank?\n split_line_item.description = line_item.description.gsub(tag_regex, \"[#{client_name}]\")\n end\n split_line_item.quantity = new_quantity\n \n line_item.description = line_item.description.gsub(tag_regex,'').strip!\n line_item.quantity = line_item.quantity - new_quantity\n if (line_item.quantity <= 0)\n line_item.delete\n else\n line_item.save\n end\n\n split_invoice.line_items << split_line_item\n end\n end\n split_invoice.save\n split_invoice\n end", "def destroy_invoice_item(id, taxes)\n if invoice_item = CustomerInvoiceItem.get(id)\n # Destroy the invoice item \n invoice_item.destroy\n # Update totals\n substract_item_data(invoice_item, taxes)\n # Save and reload the invoice\n self.save\n self.reload\n return self\n end\n\n end", "def remove_item(item)\n order_item = self.order_items.where(item: item).order('price asc').first\n if order_item.is_a? OrderItem\n order_item.remove 1\n recalculate!\n end\n end", "def unrefulfill_line_item(line_item_id)\n li = Spree::LineItem.find(line_item_id)\n li.refulfill = nil\n li.save\n end", "def remove_items_from_cart\n line_items.each { |i| i.cart = nil }\n end", "def delete\n LineItem.delete_for_order(order_id)\n CreditCardTransaction.remove_references_to_order(order_id)\n super\n end", "def remove_from(order)\n order.line_items.each do |line_item|\n line_item.adjustments.each do |adjustment|\n if adjustment.source == self\n line_item.adjustments.destroy(adjustment)\n end\n end\n end\n end", "def free item\n item.order_book.remove item if item.order_book\n end", "def down\n # split items with a quantity>1 into multiple items\n LineItem.where(\"quantity>1\").each do |line_item|\n # add individual items\n line_item.quantity.times do\n LineItem.create(\n cart_id: line_item.cart_id,\n product_id: line_item.product_id,\n quantity: 1\n )\n end\n # remove the original item\n line_item.destroy\n end\n end", "def destroy\n @invoice_line_item = InvoiceLineItem.find(params[:id])\n @invoice_line_item.destroy\n\n respond_to do |format|\n format.html { redirect_to invoice_line_items_url }\n format.json { head :no_content }\n end\n end", "def move_line_items_to_order basket ,order\n basket.line_items.each do |item|\n item.basket_id = nil #prevent cascade delete\n order.line_items << item\n end\n end", "def delete\n self.class.delete_order_line_item_by_id(self.id)\n end", "def remove_entry(entry)\n if entry.new_record?\n entries = self.invoiceable_entries(true)\n entries.delete(entry)\n else\n if !entry.invoiced?\n entry.destroy\n else\n entry.state! :voided\n entry.save\n end\n end\n return {success: true}\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
DSL tests Test dsl for add, remove, and edit items from existing invoice
def test_dsl_should_add_edit_and_remove_line_items_in_invoice assert_no_difference 'Invoice.count' do assert_no_difference 'LineItem.count' do @basic_user.edits_invoice(invoices(:invoice_with_line_items).id) assert_equal 3, @basic_user.adds_line_item(:unit => 'New line item', :description => 'Description of new line item') assert @basic_user.removes_line_item(2) @basic_user.edits_line_item(1, :description => 'Changed Description One'). and_submits end end invoice = Invoice.find(invoices(:invoice_with_line_items).id) assert_equal 2, invoice.line_items.count assert_equal 'Changed Description One', invoice.line_items[0].description assert_equal line_items(:line_item_one).description, invoice.line_items[0].description end
[ "def test_should_create_and_delete_line_items_before_post\r\n assert_difference 'Invoice.count' do\r\n assert_difference 'LineItem.count', 1 do\r\n @basic_user.creates_new_invoice\r\n @basic_user.enter_new_customer(\r\n :name => \"Test 8 Customer Name 4\") \r\n \r\n #get first row in the table.\r\n trows = @basic_user.line_items_rows\r\n assert_equal 1, trows.length\r\n tr1 = trows[::WatirBrowser.item_index(1)]\r\n assert tr1.exists?\r\n @basic_user.populate(tr1.text_field(:name, \"invoice[line_items_attributes][][description]\"), \"Description one\")\r\n\r\n @basic_user.link(:id, \"add_line_item\").click\r\n\r\n #get second row in the table.\r\n trows = @basic_user.line_items_rows\r\n assert_equal 2, trows.length\r\n tr2 = trows[::WatirBrowser.item_index(2)]\r\n assert tr2.exists?\r\n\r\n @basic_user.populate(tr2.text_field(:name, 'invoice[line_items_attributes][][description]'),'Description two')\r\n\r\n #remove the first line item\r\n @basic_user.line_items_rows[::WatirBrowser.item_index(1)].link(:name, 'remove').click\r\n\r\n @basic_user.submits\r\n end\r\n end\r\n\r\n invoice = Invoice.find(:first, :order => 'id desc')\r\n assert_equal 1, invoice.line_items.count\r\n assert_equal 'Description two', invoice.line_items[0].description\r\n end", "def test_should_update_new_invoice_total_when_line_item_is_deleted\r\n update_profile_taxes(\r\n :tax_enabled => true,\r\n :tax_1_name => \"tax 1 name\",\r\n :tax_1_rate => \"5.0\",\r\n :tax_2_name => \"\",\r\n :discount_before_tax => false)\r\n \r\n @user.creates_new_invoice\r\n # the first line is created automatically, no need to click add line \r\n @user.edits_line_item(1,:unit => 'line one', :quantity => '1', :price => '1.23')\r\n @user.adds_line_item(:unit => 'line two', :quantity => '1', :price => '4.56')\r\n \r\n verify_invoice_view_fields(\r\n :line_items_total => 5.79,\r\n :tax_1_amount => 0.29,\r\n :discount_amount => 0,\r\n :total => 6.08)\r\n \r\n @user.removes_line_item(1)\r\n\r\n verify_invoice_view_fields(\r\n :line_items_total => 4.56,\r\n :tax_1_amount => 0.23,\r\n :discount_amount => 0,\r\n :total => 4.79)\r\n end", "def test_should_remove_second_line_item_from_invoice\r\n assert_no_difference 'Invoice.count' do\r\n assert_difference 'LineItem.count', -1 do\r\n @basic_user.edits_invoice(invoices(:invoice_with_line_items).id)\r\n \r\n #get first row in the table.\r\n trows = @basic_user.line_items_rows\r\n assert_equal 2, trows.length\r\n tr2 = trows[::WatirBrowser.item_index(2)]\r\n assert tr2.exists?\r\n @basic_user.populate(tr2.text_field(:name, \"invoice[line_items_attributes][][description]\"),\"Removed Description two\")\r\n\r\n #remove the second line item\r\n tr2.link(:name, 'remove').click\r\n\r\n @basic_user.submits\r\n end\r\n end\r\n \r\n invoice = Invoice.find(invoices(:invoice_with_line_items).id)\r\n assert_equal 1, invoice.line_items.count\r\n assert_equal line_items(:line_item_one).description, invoice.line_items[0].description \r\n \r\n end", "def test_items_return_associated_invoice_items\n invoice_items = engine.invoice_item_repository.find_by('item_id', 10)\n item = invoice_items.item\n assert ['pooper scooper'], invoice_items.item.name\n end", "def test_discount_per_invoice\r\n\r\n\r\n# puts \"Customer.find_by_name('heavy_user_customer_1'): #{Customer.find_by_name('heavy_user_customer_1').inspect}\"\r\n\r\n # Add an Invoice\r\n assert_difference 'Invoice.count' do\r\n #Create new Invoice and fill it up\r\n @user.creates_new_invoice\r\n fill_invoice_for @user \r\n \r\n #Customer Info \r\n current_customer = customers(\"heavy_user_customer_1\".to_sym)\r\n @user.enter_new_customer(:name => current_customer.name) \r\n \r\n # quantity and price keep the same value so calculating subtotal is easy \r\n quantity = 10 \r\n price = 5.00\r\n discount = 50.00\r\n discount_percent = 10.00\r\n \t \r\n #Add line items\r\n add_line_items(quantity, price)\r\n\t \r\n # Discount and Type\r\n @user.populate(@user.text_field(:id, \"invoice_discount_value\"),discount.to_s)\r\n @user.select_list(:id, \"invoice_discount_type\").option(:text, \"amount\").select\r\n \r\n # Submit and verify \r\n assert_difference 'LineItem.count', @number_of_line_items do\r\n @user.submits \r\n end\r\n \r\n # Edit Invoice\r\n id_last_invoice = Invoice.find(:first, :order => \"id DESC\").id\r\n @user.edits_invoice(id_last_invoice)\r\n \r\n # Verify SubTotal, Discount and Total \r\n subTotal = quantity * price * @number_of_line_items\r\n total = subTotal - discount\r\n \r\n assert_equal(format_amount(subTotal), @user.span(:id, \"invoice_line_items_total\").text)\r\n assert_equal(format_amount(discount), @user.span(:id, \"invoice_discount_amount\").text)\r\n assert_equal(format_amount(total), @user.span(:id, \"invoice_total\").text)\r\n \r\n # Changing discount type\r\n @user.expects_ajax(2) do\r\n @user.populate(@user.text_field(:name, 'invoice[discount_value]'), discount_percent.to_s)\r\n @user.select_list(:name, 'invoice[discount_type]').select \"percent\"\r\n end\r\n\r\n @user.submits\r\n\r\n # Edit Invoice\r\n id_last_invoice = Invoice.find(:first, :order => \"id DESC\").id\r\n @user.edits_invoice(id_last_invoice)\r\n \r\n # Verify SubTotal, Discount and Total \r\n discount = discount_percent * subTotal / 100.00\r\n total = subTotal - discount\r\n \r\n assert_equal(format_amount(subTotal), @user.span(:id, \"invoice_line_items_total\").text)\r\n assert_equal(format_amount(discount), @user.span(:id, \"invoice_discount_amount\").text)\r\n assert_equal(format_amount(total), @user.span(:id, \"invoice_total\").text)\r\n \r\n end \r\n end", "def test_should_remove_first_line_item_from_invoice\r\n assert_no_difference 'Invoice.count' do\r\n assert_difference 'LineItem.count', -1 do\r\n @basic_user.edits_invoice(invoices(:invoice_with_line_items).id)\r\n \r\n #get first row in the table.\r\n trows = @basic_user.line_items_rows\r\n assert_equal 2, trows.length\r\n tr1 = trows[::WatirBrowser.item_index(1)]\r\n assert tr1.exists?\r\n @basic_user.populate(tr1.text_field(:name, \"invoice[line_items_attributes][][description]\"),\"Removed Description one\")\r\n\r\n #remove the first line item\r\n tr1.link(:name, 'remove').click\r\n\r\n @basic_user.submits\r\n end\r\n end\r\n \r\n invoice = Invoice.find(invoices(:invoice_with_line_items).id)\r\n assert_equal 1, invoice.line_items.count\r\n assert_equal line_items(:line_item_two).description, invoice.line_items[0].description\r\n end", "def test_it_can_create_a_new_invoice_when_sent_create_with_params\n previous_invoice_count = @instance.all.count\n customer1 = @instance.engine.customer_repository.find_by_id(1)\n merchant1 = @instance.engine.merchant_repository.find_by_id(1)\n item1 = @instance.engine.item_repository.find_by_id(1)\n item2 = @instance.engine.item_repository.find_by_id(2)\n @instance.create(customer: customer1,\n merchant: merchant1,\n status: \"shipped\",\n items: [item1, item1, item2])\n assert_equal previous_invoice_count + 1, @instance.all.count\n end", "def test_edit_invoice_with_edited_enabled_taxes_with_profile_taxes_unchanged\n # show invoice.taxes. Include a link to use profile taxes.\n context = \"when editing invoice with edited taxes with profile taxes unchanged\"\n user = tax_user(:basic_user)\n user.enable_profile_taxes\n assert user.profile.tax_enabled\n\n user.login\n # puts \"do click new invoice\"\n user.clicks_create_new_invoice\n\n user.assert user.assigns(:invoice)\n user.assert_taxes(user.assigns(:invoice), \"after creating new invoice \" + context)\n\n # puts \"do save invoice\"\n user.saves_new_invoice(\n :customer_id => user.scratch.fixture_user.customers.first.id,\n :date => Time.now.to_date.to_s(:MMDDYYYY),\n :due_date => (Time.now + 1.week).to_date.to_s(:MMDDYYYY),\n :description => Babel.random_short.gsub( '&', 'and' ),\n :tax_1_name => 'custom',\n :tax_2_rate => 9\n )\n\n new_invoice = user.assigns(:invoice)\n user.assert_taxes(new_invoice, \"after saving new invoice \" + context, \n {:tax_2_rate => 9, :tax_2_edited => true, :tax_1_name => 'custom', :tax_1_edited => true})\n\n new_invoice.reload\n # puts \"new_invoice: #{new_invoice.inspect}\"\n user.assert_taxes(new_invoice, \"after reloading invoice object \" + context,\n {:tax_2_rate => 9, :tax_2_edited => true, :tax_1_name => 'custom', :tax_1_edited => true})\n\n # puts \"do edit invoice with #{new_invoice.id} user.assigns(:invoice): #{user.assigns(:invoice).inspect}\"\n user.edits_invoice(new_invoice.id)\n\n # puts user.assigns(:invoice).taxes.inspect\n user.deny user.assigns(:invoice).taxes.empty?, \"edited invoice taxes was, empty #{context}\"\n user.assert_taxes(user.assigns(:invoice), \"after editing new invoice \" + context,\n {:tax_2_rate => 9, :tax_2_edited => true, :tax_1_name => 'custom', :tax_1_edited => true}) \n\n end", "def test_edit_invoice_with_unedited_enabled_taxes_with_profile_taxes_unchanged\n # show invoice.taxes\n context = \"when editing invoice with unedited taxes with profile taxes unchanged\"\n user = tax_user(:basic_user)\n user.enable_profile_taxes\n assert user.profile.tax_enabled\n\n\n user.login\n # puts \"do click new invoice\"\n user.clicks_create_new_invoice\n\n user.assert user.assigns(:invoice)\n user.assert_taxes(user.assigns(:invoice), \"after creating new invoice \" + context)\n\n # puts \"do save invoice\"\n user.saves_new_invoice(\n :customer_id => user.scratch.fixture_user.customers.first.id,\n :date => Time.now.to_date.to_s(:MMDDYYYY),\n :due_date => (Time.now + 1.week).to_date.to_s(:MMDDYYYY),\n :description => Babel.random_short.gsub( '&', 'and' )\n )\n\n new_invoice = user.assigns(:invoice)\n user.assert_taxes(new_invoice, \"after saving new invoice \" + context)\n\n new_invoice.reload\n# puts \"new_invoice: #{new_invoice.inspect}\"\n user.assert_taxes(new_invoice, \"after reloading invoice object \" + context)\n \n # puts \"do edit invoice with #{new_invoice.id} user.assigns(:invoice): #{user.assigns(:invoice).inspect}\"\n user.edits_invoice(new_invoice.id)\n\n # puts user.assigns(:invoice).taxes.inspect\n user.deny user.assigns(:invoice).taxes.empty?, \"edited invoice taxes was, empty #{context}\"\n user.assert_taxes(user.assigns(:invoice), \"after editing new invoice \" + context)\n\n end", "def test_create_invoice_and_edit_taxes_should_copy_taxes_and_mark_edited\n context = \"with taxes enabled in profile and valid\"\n user = tax_user(:basic_user)\n user.enable_profile_taxes\n assert user.profile.tax_enabled\n\n user.login\n# puts \"do click new invoice\"\n user.clicks_create_new_invoice\n \n user.assert_select 'div#tax_container', true, \"new invoice screen was missing tax_container #{context}\"\n \n user.assert_select 'input#invoice_tax_1_enabled', true, \"new invoice failed to show tax_1_enabled #{context}\"\n user.assert_select 'input#invoice_tax_1_rate', true, \"new invoice failed to show tax_1_rate #{context}\"\n\n user.assert_select 'input#invoice_tax_2_enabled', true, \"new invoice failed to show tax_2_enabled #{context}\"\n user.assert_select 'input#invoice_tax_2_rate', true, \"new invoice failed to show tax_2_rate #{context}\"\n \n user.assert user.assigns(:invoice)\n user.deny user.assigns(:invoice).taxes.empty?\n\n user.assert_taxes(user.assigns(:invoice), context)\n# puts \"do save invoice\"\n user.saves_new_invoice(:tax_1_rate => '10.1')\n# puts user.assigns(:invoice).taxes.inspect\n\n user.assert_taxes(user.assigns(:invoice), context, {:tax_1_rate => 10.1, :tax_1_edited => true})\n end", "def test_add_new_item_to_shop\n #Add new item to the shop\n @shop.add_item(\"ML1\", \"MILK\", 10.0)\n item = @shop.find(\"ML1\")\n\n # Check if quantity has increased\n assert_equal 4, @shop.items_quantity, 'Shop has 4 items'\n\n # Check if item exists\n assert_equal [ item.code, item.name, item.price], @shop.find(item.code).to_a\n end", "def test_new_item_creation\n assert_equal @code, @item.code, 'item code is ok'\n assert_equal @name, @item.name, 'item name is ok'\n assert_equal @price, @item.price, 'item price is ok'\n end", "def test_create_invoice_resource\n resource = :invoice\n test_field = :label\n test_value = \"a nice label\"\n\n # We need a client so that we can assign a valid client id to the invoice\n # It is deleted in test_delete_invoice_resource\n client = Billomat.res(:client).new\n client.save\n\n x = Billomat.res(resource).new\n x.client_id = client.id\n assert x.save, \"Could not save newly created #{resource}\"\n assert x.save, \"#{resource} creation resulted in invalid #{resource} record\"\n assert(x.id > 0, \"New #{resource} was not successfully created (no id)\")\n\n\n y = Billomat.res(resource).find(x.id)\n y.attributes[test_field] = test_value\n assert y.save, \"Editing #{resource} resource did not succeed\"\n\n z = Billomat.res(resource).find(x.id)\n assert_equal test_value, z.attributes[test_field]\n end", "def test_add_new_item\n user = Trading::User.named('Peter')\n user.add_new_item('Cookies', 3)\n assert_equal(user.list_of_items[0].name,'Cookies')\n assert_equal(user.list_of_items[0].price,3)\n assert_equal(user.list_of_items[0].owner.name, 'Peter')\n assert_equal(user.list_of_items[0].status, false)\n end", "def test_add_item_edit_menu_index_0\n menu = UI.menu(\"Edit\")\n item = menu.add_item(\"Test\", 0) {}\n assert_not_nil(item)\n end", "def invoice_with_revenue(ii_count: 4)\n FactoryBot.create(:invoice, status: 'shipped') do |invoice|\n FactoryBot.create(:item, merchant: invoice.merchant) do |item|\n FactoryBot.create(:transaction, result: 'success', invoice: invoice)\n FactoryBot.create_list(:invoice_item_fixed, ii_count, item: item, invoice: invoice)\n end\n end\nend", "def test_item_initialisation\n item = @owner.create_item(\"testobject\", 50, 7, \"Description-text\")\n assert(item.name == \"testobject\", \"Name should be returned\")\n assert(item.price == 50, \"Should return price\")\n assert(!item.is_active?, \"Should not be active\")\n assert(item.quantity == 7, \"Quantity should be returned\")\n assert(item.description.eql?(\"Description-text\"), \"Description should be returned\")\n end", "def test_add_recipe\n \n end", "def test_create_item_with_quantity\n me = Trader.named(\"Me\", :credits => 100)\n you = Trader.named(\"You\", :credits => 100)\n (my_item = me.propose_item_with_quantity(\"One\", 7, 1, :fixed, nil, nil)).activate\n (your_item = you.propose_item_with_quantity(\"Four\", 11, 4, :fixed, nil, nil)).activate\n\n assert_equal(me.items, [my_item])\n assert(my_item.quantity == 1)\n assert_equal(you.items, [your_item])\n assert(your_item.quantity == 4)\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
respond to checkjira command same as interval except we answer with 'no issues found' if there are no issues
def checkjira_command(msg) begin msg.speak "no new issues since I last checked #{@lastlast} ago" \ if !check_jira(msg) rescue msg.speak "sorry, we had trouble connecting to JIRA." end end
[ "def checkjira_command(msg)\n begin\n msg.speak \"no new issues since I last checked #{@lastlast} ago\" if !check_jira(msg)\n rescue \n msg.speak \"sorry, we had trouble connecting to JIRA.\"\n end\n end", "def issues_worked\n search_criteria_present!\n\n query = \"q=type:issue+org:#{org}+assignee:#{username}+updated:>=#{datetime}\"\n url = \"#{SEARCH_ENDPOINT}?#{results_size}&#{query}\"\n response = HTTParty.get url, options\n\n handle! response\n end", "def get_jira_deploy_task\n jira_search_url = 'https://projects.engineering.redhat.com/rest/api/2/search'\n jira_query = \"Project = ERRATA AND Type = Task AND labels = pntdevops-sysops-deploy ORDER BY updated Desc\"\n response_json = %x{ curl -s -H \"Content-Type: application/json\" \\\n \"#{jira_search_url}?jql=#{CGI.escape(jira_query)}&maxResults=1\" }\n response = JSON.load(response_json)\n raise \"Query returned no issues, aborting. Query: #{jira_query} Response: #{response}\" if response_json.blank? || response['issues'].blank?\n response['issues'].first\nend", "def add_jira_tickets_to_omnifocus ()\n # Get the open Jira issues assigned to you\n results = Issue.query_results\n if results.nil?\n puts \"No results from Jira\"\n exit\n end\n puts \"\\\"#{QUERY}\\\" returned #{results.size} results from #{JIRA_BASE_URL}\"\n\n # Iterate through resulting issues.\n results.each do |jira_id, issue|\n jira_issues[jira_id] = issue # cache results\n add_task(issue)\n end\nend", "def report_check_run_failure\n data = params\n check_run = data[:check_run]\n check_run_status = check_run[:status]\n sha = check_run[:head_sha]\n workflow_name = check_run[:name]\n conclusion = check_run[:conclusion]\n check_run_url = check_run[:html_url]\n check_suite = check_run[:check_suite]\n app_name = check_run[:app][:name]\n details_url = check_run[:details_url]\n pull_requests = check_suite[:pull_requests]\n pull_request = pull_requests[0]\n branch = check_suite[:head_branch]\n repository = data[:repository]\n repo_name = repository[:name]\n repo_url = repository[:html_url]\n sender_login = data[:sender][:login]\n pr_number = pull_request[:number] if pull_request.present?\n branch_path = ''\n branch_path = \"/tree/#{branch}\" if branch != 'master'\n\n # We are only interested in completed non-success\n return if check_run_status != 'completed' || conclusion == 'success'\n\n message = \"[ [#{repo_name}](#{repo_url}) ]\"\n message += if app_name == 'GitHub Actions'\n \" GitHub Action workflow [#{workflow_name}](#{check_run_url}):\"\n else\n \" Check run [#{workflow_name}](#{check_run_url}):\"\n end\n message += if pull_request.present?\n \" [#{conclusion}](#{repo_url}/pull/#{pr_number}/checks?sha=#{sha})\"\n else\n \" [#{conclusion}](#{details_url})\"\n end\n message += \" on [#{sha.first(7)}](#{repo_url}/commit/#{sha.first(10)})\"\n message += \" by #{sender_login}\" if sender_login.present?\n message += \" in the [#{branch}](#{repo_url}#{branch_path}) branch\" if branch.present?\n message += \" for [PR ##{pr_number}](#{repo_url}/pull/#{pr_number})\" if pull_request.present?\n\n # We don't want to send more than one message for this workflow & sha with the same conclusion within 20 minutes.\n # This counter expires from Redis in 20 minutes.\n ci_counter = Redis::CI.new(\"check_run_#{workflow_name}_#{conclusion}_#{sha}\")\n ci_counter.sucess_count_incr\n ActionCable.server.broadcast 'smokedetector_messages', message: message if ci_counter.sucess_count == 1\n end", "def known_issue(id)\n skip_message = nil\n DemoLogger.log.info \"Checking status of KnownIssue '#{id}'\"\n uri = URI.parse(\"https://tickets.puppetlabs.com/rest/api/2/issue/#{id}?fields=summary,status\")\n\n max_tries = 2\n max_tries.downto(0) do |tries_left|\n begin\n Net::HTTP.start(uri.host, uri.port,\n :use_ssl => uri.scheme == 'https') do |http|\n request = Net::HTTP::Get.new uri\n request['Content-Type'] = \"application/json\"\n\n response = http.request request\n # If issue can't be found, log a warning and continue test case execution\n if response.code != \"200\"\n raise \"The specified known_issue #{id} could not be found in Jira, or the server could not be reached. Response code #{response.code}\"\n end\n\n parsed_response = JSON.parse(response.body)\n\n # Verify that the response is for the correct issue and not a \"closest match\".\n if parsed_response[\"key\"].to_s.downcase != id.downcase\n raise \"The specified known_issue '#{id}' could not be found in Jira.\"\n end\n\n # Determine if ticket is marked as fixed or not\n status = parsed_response[\"fields\"][\"status\"][\"name\"]\n\n fixed = false\n\n FIXED_ISSUE_STATUSES.each do |fixed_status|\n fixed = true if fixed_status == status.upcase\n end\n\n # If status of issue is fixed, log a warning with a reminder to remove known_issue and execute test.end\n if fixed\n DemoLogger.log.warn \"Issue '#{id}' - #{parsed_response[\"fields\"][\"summary\"]} is fixed! \" +\n \"Remove the known_issue from the automation, and update automated test if it is still failing.\"\n return nil\n else\n # If status of issue is not fixed, log a warning and skip test\n DemoLogger.log.warn \"Issue '#{id}' - #{parsed_response[\"fields\"][\"summary\"]} is NOT fixed. Skipping test.\"\n skip_message = \"SKIPPING TEST - Blocking known issue '#{id}' has not been fixed.\"\n end\n\n #TODO: Should also look at the resolution to see if it is fixed or done.\n\n return skip_message\n end\n rescue StandardError => ex\n DemoLogger.log.warn \"Unable to determine status of defect '#{id}'. Exception: #{ex}. Retrying #{tries_left} times\"\n sleep 5\n end\n end\n return skip_message\n end", "def fetch_jira_poll_url()\n\n jiraconfig = bot.config['jira_poll_url']\n\n if jiraconfig.is_a?(Array)\n searchurls_str = jiraconfig \n else \n searchurls_str = [jiraconfig]\n end\n\n tix = []\n\n searchurls_str.each do |searchurl_str|\n begin\n @log.info \"checking jira for new issues... #{searchurl_str}\"\n\t# jira_poll_url: \"http://username:password@www.host.com/jira/sr/jira.issueviews:searchrequest-xml/temp/SearchRequest.xml?jqlQuery=project+%3D+OPS+ORDER+BY+updated+DESC%2C+priority+DESC%2C+created+ASC&tempMax=25&field=key&field=link&field=title&field=reporter&field=assignee&field=type&field=priority&field=updated\"\n searchurl = Addressable::URI.parse(searchurl_str)\n @log.debug pp lookupurl.to_hash\n xmldata = open(searchurl.omit(:user, :password), \\\n :http_basic_authentication=>[searchurl.user, searchurl.password]).read\n doc = REXML::Document.new(xmldata)\n raise Exception.new(\"response had no content\") if doc.nil?\n doc.elements.inject('rss/channel/item', tix) do |tix, element|\n tix.push(parse_ticket_info(element))\n end\n rescue Exception => e\n @log.error \"error connecting to jira: #{e.message}\"\n # @log.error \"#{e.backtrace}\"\n end\n end\n return tix\n end", "def update_issue_status\n issues = Issue.all :conditions => { :project_id => @project.id,\n :tracker_id => @tracker.id,\n :subject => params[:subject] }\n if issues.size > 1\n @project = nil\n @tracker = nil\n @status = nil\n render_error({:message => :error_cws_ambiguous_query, :status => 400}) \n elsif issues.size == 0\n render_404\n else\n issue = issues[0]\n if !issue.new_statuses_allowed_to(User.current).include? @status\n render_403\n else\n if issue.status.id != @status.id\n issue.init_journal(User.current)\n issue.status = @status\n if issue.save\n if Setting.notified_events.include?('issue_updated')\n Mailer.deliver_issue_edit(issue.current_journal)\n end\n respond_to do |format|\n format.json { render :json => issue }\n format.xml { render :xml => issue }\n end\n else\n render_403\n end\n else\n # Return unchanged issue\n respond_to do |format|\n format.json { render :json => issue }\n format.xml { render :xml => issue }\n end\n end\n end\n end\n end", "def get_issues\n jira_issues = Hash.new\n # This is the REST URL that will be hit. Change the jql query if you want to adjust the query used here\n uri = URI(JIRA_BASE_URL + '/rest/api/2/search?jql=assignee+%3D+currentUser()+AND+status+not+in+(Closed,+Resolved)')\n\n Net::HTTP.start(uri.hostname, uri.port, :use_ssl => uri.scheme == 'https') do |http|\n request = Net::HTTP::Get.new(uri)\n request.basic_auth USERNAME, PASSWORD\n response = http.request request\n # If the response was good, then grab the data\n if response.code =~ /20[0-9]{1}/\n data = JSON.parse(response.body)\n data[\"issues\"].each do |item|\n jira_id = item[\"key\"]\n jira_issues[jira_id] = item[\"fields\"][\"summary\"]\n end\n else\n raise StandardError, \"Unsuccessful response code \" + response.code + \" for issue \" + issue\n end\n end\n return jira_issues\nend", "def report_check_suite_success\n data = params\n check_suite = data[:check_suite]\n conclusion = check_suite[:conclusion]\n branch = check_suite[:head_branch]\n pull_requests = check_suite[:pull_requests]\n pull_request = pull_requests[0]\n sha = check_suite[:head_sha]\n check_suite_status = check_suite[:status]\n repository = data[:repository]\n repo_name = repository[:name]\n repo_url = repository[:html_url]\n app_name = check_suite[:app][:name]\n sender_login = data[:sender][:login]\n pr_number = pull_request[:number] if pull_request.present?\n branch_path = ''\n branch_path = \"/tree/#{branch}\" if branch != 'master'\n\n # We are only interested in completed successes\n return if check_suite_status != 'completed' || conclusion != 'success' || sender_login == 'SmokeDetector'\n\n message = \"[ [#{repo_name}](#{repo_url}) ]\"\n message += \" #{app_name}:\"\n message += if pull_request.present?\n \" [#{conclusion}](#{repo_url}/pull/#{pr_number}/checks?sha=#{sha})\"\n else\n \" [#{conclusion}](#{repo_url}/commit/#{sha}/checks)\"\n end\n message += \" on [#{sha.first(7)}](#{repo_url}/commit/#{sha.first(10)})\"\n message += \" by #{sender_login}\" if sender_login.present?\n message += \" in the [#{branch}](#{repo_url}#{branch_path}) branch\" if branch.present?\n message += \" for [PR ##{pr_number}](#{repo_url}/pull/#{pr_number})\" if pull_request.present?\n\n # We don't want to send more than one message for this SHA with the same conclusion within 20 minutes.\n # This counter expires from Redis in 20 minutes.\n ci_counter = Redis::CI.new(\"check_suite_#{conclusion}_#{sha}\")\n ci_counter.sucess_count_incr\n ActionCable.server.broadcast 'smokedetector_messages', message: message if ci_counter.sucess_count == 1\n end", "def check\n issues = find_asana_issues\n\n messages = []\n\n issues.each do |issue|\n task = find_by_id(issue)\n\n unless task.nil?\n name = task.name[0..300].gsub(\"\\n\", \"<br />\").strip\n notes = task.notes[0..300].gsub(\"\\n\", \"<br />\").strip\n\n messages << \"**[#{name}](#{task.permalink_url})**\\n#{notes} |\"\n end\n end\n\n unless messages.empty?\n header = [\n \"Asana tasks in this PR |\",\n \"--- |\"\n ]\n\n markdown header\n .concat(messages)\n .join(\"\\n\")\n end\n end", "def process_inactive(issue)\n diff_in_months = (Time.now - issue.updated_at) / 60.0 / 60.0 / 24.0 / 30.0\n\n warning_sent = !!issue.labels.find { |a| a.name == AWAITING_REPLY }\n if warning_sent && diff_in_months > ISSUE_CLOSED\n # We sent off a warning, but we have to check if the user replied\n if client.issue_comments(SLUG, issue.number).last.user.login == myself\n # No reply from the user, let's close the issue\n puts \"https://github.com/#{SLUG}/issues/#{issue.number} (#{issue.title}) is #{diff_in_months.round(1)} months old, closing now\"\n body = []\n body << \"This issue will be auto-closed because there hasn't been any activity for a few months. Feel free to [open a new one](https://github.com/bunto/bunto/issues/new) if you still experience this problem 👍\"\n client.add_comment(SLUG, issue.number, body.join(\"\\n\\n\"))\n client.close_issue(SLUG, issue.number)\n client.add_labels_to_an_issue(SLUG, issue.number, [AUTO_CLOSED])\n else\n # User replied, let's remove the label\n puts \"https://github.com/#{SLUG}/issues/#{issue.number} (#{issue.title}) was replied to by a different user\"\n client.remove_label(SLUG, issue.number, AWAITING_REPLY)\n end\n smart_sleep\n elsif diff_in_months > ISSUE_WARNING\n return if issue.labels.find { |a| a.name == AWAITING_REPLY }\n\n puts \"https://github.com/#{SLUG}/issues/#{issue.number} (#{issue.title}) is #{diff_in_months.round(1)} months old, pinging now\"\n body = []\n body << \"There hasn't been any activity on this issue recently. Due to the high number of incoming GitHub notifications, we have to clean some of the old issues, as many of them have already been resolved with the latest updates.\"\n body << \"Please make sure to update to the latest `bunto` version and check if that solves the issue. Let us know if that works for you by adding a comment :+1:\"\n\n client.add_comment(SLUG, issue.number, body.join(\"\\n\\n\"))\n client.add_labels_to_an_issue(SLUG, issue.number, [AWAITING_REPLY])\n smart_sleep\n end\n end", "def process_inactive(issue)\n diff_in_months = (Time.now - issue.updated_at) / 60.0 / 60.0 / 24.0 / 30.0\n\n warning_sent = !!issue.labels.find { |a| a.name == AWAITING_REPLY }\n if warning_sent && diff_in_months > ISSUE_CLOSED\n # We sent off a warning, but we have to check if the user replied\n if last_responding_user(issue) == myself\n # No reply from the user, let's close the issue\n logger.info(\"https://github.com/#{SLUG}/issues/#{issue.number} (#{issue.title}) is #{diff_in_months.round(1)} months old, closing now\")\n body = []\n body << \"This issue will be auto-closed because there hasn't been any activity for a few months. Feel free to [open a new one](https://github.com/#{SLUG}/issues/new) if you still experience this problem :+1:\"\n client.add_comment(SLUG, issue.number, body.join(\"\\n\\n\"))\n client.close_issue(SLUG, issue.number)\n client.add_labels_to_an_issue(SLUG, issue.number, [AUTO_CLOSED])\n else\n # User replied, let's remove the label\n logger.info(\"https://github.com/#{SLUG}/issues/#{issue.number} (#{issue.title}) was replied to by a different user\")\n client.remove_label(SLUG, issue.number, AWAITING_REPLY)\n end\n smart_sleep\n elsif diff_in_months > ISSUE_WARNING\n return if issue.labels.find { |a| a.name == AWAITING_REPLY }\n\n logger.info(\"https://github.com/#{SLUG}/issues/#{issue.number} (#{issue.title}) is #{diff_in_months.round(1)} months old, pinging now\")\n body = []\n body << \"There hasn't been any activity on this issue recently. Due to the high number of incoming GitHub notifications, we have to clean some of the old issues, as many of them have already been resolved with the latest updates.\"\n body << \"Please make sure to update to the latest `fastlane` version and check if that solves the issue. Let us know if that works for you by adding a comment :+1:\"\n body << \"Friendly reminder: contributions are always welcome! Check out [CONTRIBUTING.md](https://github.com/fastlane/fastlane/blob/master/CONTRIBUTING.md) for more information on how to help with `fastlane` and feel free to tackle this issue yourself :muscle:\"\n body << \"\\n\\nThis issue will be auto-closed if there is no reply within #{months(ISSUE_CLOSED)}.\"\n\n client.add_comment(SLUG, issue.number, body.join(\"\\n\\n\"))\n client.add_labels_to_an_issue(SLUG, issue.number, [AWAITING_REPLY])\n smart_sleep\n end\n end", "def get_v1defect_Jira_list\n l = Array.new\n\n @db.execute(\"select jira_link from v1link where jira_link is not null and status is null\").each do |row|\n row.each do |issue|\n auth = {:username => $JIRA['username'], :password => $JIRA['password']}\n doc = HTTParty.get($JIRA['base_uri'] + '/rest/api/2/issue/' + issue.split('/').last + '?fields=status',\n :basic_auth => auth)\n i = @db.execute('select defect from v1link where jira_link = \"' + issue + '\"')\n\n if doc['errorMessages']\n p \"Error (#{i[0][0]}): \" + doc.to_s\n else\n l.push(i[0][0]) if doc['fields']['status']['name'] == @TRIGGER_STATUS\n end\n end\n end\n\n return l\n end", "def run\n case issue.action\n when \"opened\"\n job = PuppetLabs::Trello::TrelloIssueJob.new\n job.issue = issue\n delayed_job = job.queue\n logger.info \"Successfully queued up opened issue #{issue.repo_name}/#{issue.number} as job #{delayed_job.id}\"\n body = {\n 'job_id' => delayed_job.id,\n 'queue' => delayed_job.queue,\n 'priority' => delayed_job.priority,\n 'created_at' => delayed_job.created_at,\n }\n return [ACCEPTED, {}, body]\n else\n logger.info \"Ignoring issue #{issue.repo_name}/#{issue.number} because the action is #{issue.action}.\"\n body = { 'message' => 'Action has been ignored.' }\n return [OK, {}, body]\n end\n end", "def create_issue_in_jira\n\n if @one_m_users_flag.to_i == 1\n\n format_company_info_fields\n\n issue_params = {\n project_name:GlobalConstant::Jira.project_name,\n issue_type: GlobalConstant::Jira.task_issue_type,\n priority:GlobalConstant::Jira.medium_priority_issue,\n assignee: GlobalConstant::Jira.assignee_name,\n summary: get_issue_summary,\n description: get_issue_description\n }\n\n r = Ticketing::Jira::Issue.new(issue_params).perform\n\n @failed_logs = {\n debug_params: issue_params.to_hash\n } unless r.success?\n\n success\n\n end\n\n success\n\n end", "def get_issues(config)\n\n # Identify method entry\n debug_print \"#{ self.class } : #{ __method__ }\\n\"\n\n\n # Set up formatter for printing errors\n # config.output_format should be set based on less status by now\n formatter = Printer.new(config).build_formatter\n\n\n # Only attempt to get issues if API is specified\n if config.bitbucket_api.empty?\n debug_print \"No API found, this shouldn't be called...\\n\"\n return false\n end\n\n # If we haven't obtained the pw from user yet, do it\n if config.bitbucket_pw.empty?\n # No OAuth for Bitbucket yet, gotta get user password in order to make calls :(\n formatter.print_status \"!\", YELLOW\n print BOLD + \"Bitbucket password required for remote checking/posting.\\n\" + RESET\n print \" Password: \"\n\n # Block output to tty to prevent PW showing, Linux/Unix only :(\n system \"stty -echo\"\n _password = $stdin.gets.chomp\n system \"stty echo\"\n if _password.empty?\n print \"Input is blank. Please enter your password!\\n\"\n return false\n else\n print \"\\n\"\n end\n\n config.bitbucket_pw = _password\n end\n\n\n # Get all open tickets (anything but resolved)\n # Create options hash to pass to Remote::http_call\n # Issues URL for Bitbucket + SSL\n opts = {\n :url => \"https://bitbucket.org/api/1.0/repositories/#{ config.bitbucket_repo }/issues\",\n :ssl => true,\n :method => \"GET\",\n :basic_auth => [config.bitbucket_api, config.bitbucket_pw],\n :verbose => false\n }\n\n _json, _resp = Watson::Remote.http_call(opts)\n\n\n # Check response to validate repo access\n if _resp.code != \"200\"\n formatter.print_status \"x\", RED\n print BOLD + \"Unable to access remote #{ config.bitbucket_repo }, Bitbucket API may be invalid\\n\" + RESET\n print \" Make sure you have created an issue tracker for your repository on the Bitbucket website\\n\"\n print \" Consider running --remote (-r) option to regenerate/validate settings\\n\"\n print \" Status: #{ _resp.code } - #{ _resp.message }\\n\\n\"\n\n debug_print \"Bitbucket invalid, setting config var\\n\"\n config.bitbucket_valid = false\n return false\n end\n\n\n # Create hash entry from each returned issue\n # MD5 of issue serves as hash key\n # Hash value is another hash of info we will use\n _json[\"issues\"].each do |issue|\n\n # Skip this issue if it doesn't have watson md5 tag\n next if (_md5 = issue[\"content\"].match(/__md5__ : (\\w+)/)).nil?\n\n # If it does, use md5 as hash key and populate values with our info\n config.bitbucket_issues[_md5[1]] = {\n :title => issue[\"title\"],\n :id => issue[\"local_id\"],\n :state => issue[\"status\"]\n }\n end\n\n config.bitbucket_valid = true\n end", "def fetch_jira_url()\n \n jiraconfig = bot.config['jira_url']\n \n if jiraconfig.is_a?(Array)\n searchurls = jiraconfig \n else \n searchurls = [jiraconfig]\n end\n \n tix = []\n \n searchurls.each do |searchurl|\n begin\n @log.info \"checking jira for new issues... #{searchurl}\"\n xmldata = open(searchurl).read\n doc = REXML::Document.new(xmldata)\n raise Exception.new(\"response had no content\") if doc.nil?\n doc.elements.inject('rss/channel/item', tix) do |tix, element|\n tix.push(parse_ticket_info(element))\n end\n rescue Exception => e\n @log.error \"error connecting to jira: #{e.message}\"\n # @log.error \"#{e.backtrace}\"\n end\n end\n return tix\n end", "def get_Jira_list\n l = Array.new\n\n @db.execute(\"select jira_link from v1link where jira_link is not null and status is null\").each do |row|\n row.each do |issue|\n auth = {:username => $JIRA['username'], :password => $JIRA['password']}\n doc = HTTParty.get($JIRA['base_uri'] + '/rest/api/2/issue/' + issue.split('/').last + '?fields=status',\n :basic_auth => auth)\n\n l << issue if doc['fields']['status']['name'] == @TRIGGER_STATUS\n end\n end\n\n return l\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
fetch jira url and return a list of ticket Hashes
def fetch_jira_poll_url() jiraconfig = bot.config['jira_poll_url'] if jiraconfig.is_a?(Array) searchurls_str = jiraconfig else searchurls_str = [jiraconfig] end tix = [] searchurls_str.each do |searchurl_str| begin @log.info "checking jira for new issues... #{searchurl_str}" # jira_poll_url: "http://username:password@www.host.com/jira/sr/jira.issueviews:searchrequest-xml/temp/SearchRequest.xml?jqlQuery=project+%3D+OPS+ORDER+BY+updated+DESC%2C+priority+DESC%2C+created+ASC&tempMax=25&field=key&field=link&field=title&field=reporter&field=assignee&field=type&field=priority&field=updated" searchurl = Addressable::URI.parse(searchurl_str) @log.debug pp lookupurl.to_hash xmldata = open(searchurl.omit(:user, :password), \ :http_basic_authentication=>[searchurl.user, searchurl.password]).read doc = REXML::Document.new(xmldata) raise Exception.new("response had no content") if doc.nil? doc.elements.inject('rss/channel/item', tix) do |tix, element| tix.push(parse_ticket_info(element)) end rescue Exception => e @log.error "error connecting to jira: #{e.message}" # @log.error "#{e.backtrace}" end end return tix end
[ "def fetch_jira_url()\n \n jiraconfig = bot.config['jira_url']\n \n if jiraconfig.is_a?(Array)\n searchurls = jiraconfig \n else \n searchurls = [jiraconfig]\n end\n \n tix = []\n \n searchurls.each do |searchurl|\n begin\n @log.info \"checking jira for new issues... #{searchurl}\"\n xmldata = open(searchurl).read\n doc = REXML::Document.new(xmldata)\n raise Exception.new(\"response had no content\") if doc.nil?\n doc.elements.inject('rss/channel/item', tix) do |tix, element|\n tix.push(parse_ticket_info(element))\n end\n rescue Exception => e\n @log.error \"error connecting to jira: #{e.message}\"\n # @log.error \"#{e.backtrace}\"\n end\n end\n return tix\n end", "def get_issues\n jira_issues = Hash.new\n # This is the REST URL that will be hit. Change the jql query if you want to adjust the query used here\n uri = URI(JIRA_BASE_URL + '/rest/api/2/search?jql=' + JQL)\n\n Net::HTTP.start(uri.hostname, uri.port, :use_ssl => uri.scheme == 'https') do |http|\n request = Net::HTTP::Get.new(uri)\n request.basic_auth USERNAME, PASSWORD\n response = http.request request\n # If the response was good, then grab the data\n if response.code =~ /20[0-9]{1}/\n data = JSON.parse(response.body)\n data[\"issues\"].each do |item|\n jira_id = item[\"key\"]\n jira_issues[jira_id] = item[\"fields\"][\"summary\"]\n end\n else\n raise StandardError, \"Unsuccessful HTTP response code: \" + response.code\n end\n end\n return jira_issues\nend", "def get_issues\n jira_issues = Hash.new\n # This is the REST URL that will be hit. Change the jql query if you want to adjust the query used here\n uri = URI(JIRA_BASE_URL + '/rest/api/2/search?jql=assignee+%3D+currentUser()+AND+status+not+in+(Closed,+Resolved)')\n\n Net::HTTP.start(uri.hostname, uri.port, :use_ssl => uri.scheme == 'https') do |http|\n request = Net::HTTP::Get.new(uri)\n request.basic_auth USERNAME, PASSWORD\n response = http.request request\n # If the response was good, then grab the data\n if response.code =~ /20[0-9]{1}/\n data = JSON.parse(response.body)\n data[\"issues\"].each do |item|\n jira_id = item[\"key\"]\n jira_issues[jira_id] = item[\"fields\"][\"summary\"]\n end\n else\n raise StandardError, \"Unsuccessful response code \" + response.code + \" for issue \" + issue\n end\n end\n return jira_issues\nend", "def get_Jira_list\n l = Array.new\n\n @db.execute(\"select jira_link from v1link where jira_link is not null and status is null\").each do |row|\n row.each do |issue|\n auth = {:username => $JIRA['username'], :password => $JIRA['password']}\n doc = HTTParty.get($JIRA['base_uri'] + '/rest/api/2/issue/' + issue.split('/').last + '?fields=status',\n :basic_auth => auth)\n\n l << issue if doc['fields']['status']['name'] == @TRIGGER_STATUS\n end\n end\n\n return l\n end", "def get_jira_tickets(status, project, version, max_num_results=300)\n\n response = @client.call(:get_issues_from_jql_search,\n message: {:token => @token,\n :jqlSearch => 'status in (' + status + ') and project=' + project + ' and fixVersion in (' + version + ')',\n :maxNumResults => max_num_results})\n #if response is empty\n if response.to_hash[:multi_ref].nil?\n nil\n else\n jira_tickets = []\n response.to_hash[:multi_ref].each do |tickets|\n if !tickets[:key].nil? and !tickets[:summary].nil?\n jira_tickets << [tickets[:key], tickets[:summary], 'http://'+@jira_host+'/browse/'+tickets[:key].to_s]\n end\n end\n jira_tickets\n end\n rescue Savon::SOAPFault => error\n return false, error.to_hash[:fault][:faultstring].match(/.*?:(.*)/)[1]\n end", "def get_v1defect_Jira_list\n l = Array.new\n\n @db.execute(\"select jira_link from v1link where jira_link is not null and status is null\").each do |row|\n row.each do |issue|\n auth = {:username => $JIRA['username'], :password => $JIRA['password']}\n doc = HTTParty.get($JIRA['base_uri'] + '/rest/api/2/issue/' + issue.split('/').last + '?fields=status',\n :basic_auth => auth)\n i = @db.execute('select defect from v1link where jira_link = \"' + issue + '\"')\n\n if doc['errorMessages']\n p \"Error (#{i[0][0]}): \" + doc.to_s\n else\n l.push(i[0][0]) if doc['fields']['status']['name'] == @TRIGGER_STATUS\n end\n end\n end\n\n return l\n end", "def get_my_issues\n get_json( GITHUB_ISSUES_URL ).each do |item|\n puts \"#{ item[ 'repository' ][ 'full_name' ] }: ##{ item[ 'number' ] } #{ item[ 'title' ] } | href=#{ item[ 'html_url' ] }\"\n end\nend", "def get_issues( url, label )\n\t\tissues = @git.client.issues( url, :per_page => 100 )\n\t\tcleaned_issues = parse_issue_array( issues, label )\n\t\tputs cleaned_issues.inspect\n\t\treturn cleaned_issues\n\tend", "def tickets_for_project(project_id)\n tickets = send(:get , \"/api/v1/projects/#{project_id}/tickets.json\")\n\n process_list_response( tickets , Unfuddled::Ticket )\n end", "def add_jira_tickets_to_omnifocus ()\n # Get the open Jira issues assigned to you\n results = Issue.query_results\n if results.nil?\n puts \"No results from Jira\"\n exit\n end\n puts \"\\\"#{QUERY}\\\" returned #{results.size} results from #{JIRA_BASE_URL}\"\n\n # Iterate through resulting issues.\n results.each do |jira_id, issue|\n jira_issues[jira_id] = issue # cache results\n add_task(issue)\n end\nend", "def get_tickets\n Ticket::RESULTS.tickets\n end", "def fetch\n urls\n .map do |url|\n parse_feed(Feedjira::Feed.fetch_and_parse(url))\n end\n .inject :+\n end", "def issues\n workspace_id = zenhub_workspace_id\n repo_id = zenhub_repo_id\n url = \"https://api.zenhub.io/p2/workspaces/#{workspace_id}/repositories/#{repo_id}/board\" \n issues = Array.new\n uri = URI(url)\n response = Net::HTTP.start(uri.host, uri.port, :use_ssl => true) do |http|\n request = Net::HTTP::Get.new(uri)\n request['Content-Type'] = 'application/json'\n request['X-Authentication-Token'] = ENV['TOKEN']\n\n http.request(request)\n end\n\n board = JSON.parse(response.body)\n board[\"pipelines\"].each do |columns|\n if columns[\"name\"] == COLUMN\n columns[\"issues\"].each do |issue|\n issues.push(issue['issue_number'])\n end\n end\n end\n issues\nend", "def index\n a = URI.parse(\"https://api.github.com/repos/nnluukhtn/employment_bot/issues?state=all\").read\n @gitissue = JSON.parse(a)\n end", "def get_issues_info\n closed_issues = []\n stop = false\n page = 1\n\n until stop\n issue_fetch = HTTParty.get(@GITHUB_API_BASE_URL + \"/issues?state=closed&page=#{page}&access_token=#{@access_token}\", headers: {\n \"User-Agent\" => @user_agent\n })\n\n if issue_fetch.is_a?(Hash) && issue_fetch['message'] === 'Not Found'\n break\n end\n\n if issue_fetch.count === 0\n stop = true\n end\n\n\n issue_fetch.each do |issue|\n closed_issues << {\n 'number' => issue['number'],\n 'created_at' => issue['created_at'],\n 'closed_at' => issue['closed_at'],\n 'duration' => (Date.parse(issue['closed_at']) - Date.parse(issue['created_at'])).to_i\n }\n end\n\n page += 1\n end\n\n closed_issues.reverse!\n end", "def create_ticket\n jiraPair = @defect.getJiraList\n mapping = jiraAPIMapping\n payload = {\n :fields =>\n {:project =>\n {:key => \"#{jiraPair['Project']}\"},\n :summary => jiraPair['Summary'] + \" (#{@defect.get_story})\",\n :description => Sanitize.clean(jiraPair['Description']),\n mapping['Release Milestone'] => {:value => jiraPair['Release Milestone']},\n :customfield_10143 => [{:value => jiraPair['Environment'],}],\n :issuetype => {:name => jiraPair['issuetype']},\n mapping['Functional Group'] => {:value => jiraPair['Functional Group']},\n mapping['Project Manager'] => {:value => jiraPair['Project Manager']},\n :versions => [{:name => \"#{jiraPair['Release']}\",}],\n },\n }\n\n response = self.class.post('/rest/api/latest/issue/',\n :body => payload.to_json,\n :headers => {'Content-Type' => 'application/json' },\n :verify => false)\n\n url = \"\"\n if response['key']\n url = $JIRA['base_uri'] + \"/browse/\" + response['key']\n @defect.setJiraLink(url)\n else\n errormsg = \"Error (#{@defect.get_story}): #{response}\"\n p errormsg\n @defect.setDefectError(errormsg)\n end\n\n return url\n end", "def load_issues(sprint_num)\n (return [] unless login) unless @logged_in\n issues = []\n\n## get sprint page\n sprint_url = GOTO_URL.gsub('<s/i>', \"s\" + sprint_num) \n p = get_page(sprint_url, URL_MATCHERS[:sprint])\n return [] unless p\n STDERR.puts \"Navigated to '#{p.title}'\" if DEBUG\n\n rows = p.search(\"//tr\")\n rows.each do |r|\n if r['id'] =~ ISSUE_ID_MATCHER\n issue_number_container = p.search(\"//a[@id='issue_number_for_#{$1}']\")\n desc_container = p.search(\"//div[@id='task_description_link_#{$1}']/span\")\n owner_container = p.search(\"//div[@id='task_owner_id_link_#{$1}']\")\n status_container = p.search(\"//div[@id='task_status_#{$1}']\")\n issues << {:number => issue_number_container ? issue_number_container.text : nil, \n :description => (desc_container && desc_container.children[1]) ? desc_container.children[1].text.strip : nil,\n :owner => owner_container ? owner_container.text.strip : nil,\n :status => status_container ? status_container.text.strip : nil}\n end\n end\n\n #issues.each do |i| puts \"#{i[:number]} - #{i[:description]}\" end\n issues\nend", "def fetch_issues()\n\n # fetch last created date\n last_creation = @kv.get(@last_creation_k) # || \"2015-05-11T16:37:21Z\"\n\n # request Redmine issues\n issues = @redmine_api.issues(created_on: \">=#{last_creation}\", limit:200, sort:\"created_on\")['issues']\n puts issues if self.verbose\n \n # filter issues to include only certain projects, avoid certain users, avoid self tickets\n issues = issues.select do |issue|\n Utils.project_whitelisted? issue and Utils.user_not_blacklisted? issue and not Utils.ticket_to_self? issue\n end\n\n # no issues\n if issues.empty?\n puts \"#{Utils.human_timestamp} No new issues since #{last_creation}.\"\n return\n end\n\n # post issues\n issues.each do |issue|\n post_issue(issue)\n end\n\n # store the created data of the last ticket + 1 second\n last_creation = issues.last[\"created_on\"]\n last_creation_plus = (Time.parse(last_creation)+1).iso8601\n @kv.put(@last_creation_k, last_creation_plus)\n \n puts \"#{Utils.human_timestamp}: Posted #{issues.length} issues. Last created #{@kv.get(@last_creation_k)}\"\n\n end", "def getJiraList\n jiraContent = Hash.new\n @MAP.each do |k, v|\n next if k.start_with?('-')\n content = @doc.xpath('//Attribute[@name=\"' + v + '\"]').text\n\n jiraContent[k] << \",\" if jiraContent.has_key?(k)\n jiraContent[k] = content\n end\n\n @sMAP.each do |k, v|\n# jiraContent[k] << \",\" if jiraContent.has_key?(k)\n if jiraContent.has_key?(k) && jiraContent[k].length > 0\n next\n else\n jiraContent[k] = v\n end\n end\n\n return jiraContent\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
extract array of comments from an xml element (ticket)
def parse_ticket_for_comments(xml) comments = [] doc = REXML::Document.new(xml) doc.elements.inject('item/comments', comments) do |comments, element| comments.push(parse_comment_info(element)) end return comments end
[ "def comments tag\n path = \"/comment/#{tag}/show.xml\"\n doc = http_get path\n \n comments = []\n xpath(doc, \"//comment\").each do |entry|\n comments << Comment.new(:xml => entry)\n end\n comments\n end", "def comments\n get_ticket_property_list(\"comments\" , Unfuddled::Comment)\n end", "def parse_comments(comments_box)\n comments = comments_box.search(COMMENT_XPATH)\n comments.map do |comment|\n date = comment.search(COMMENT_DATE_XPATH).text\n text = comment.search(COMMENT_TEXT_XPATH).text\n {date: \"Date: #{date}\", text: text}\n end\n end", "def time_schedule_comments\n return [] if attributes[\"TimeScheduleComments\"].attributes[\"SectionComments\"].attributes[\"Lines\"].nil?\n [attributes[\"TimeScheduleComments\"].attributes[\"SectionComments\"].attributes[\"Lines\"].attributes[\"Line\"]].flatten.collect(&:Text)\n end", "def extract_comments(item)\n comments = []\n comment_nodes = item.xpath('wp:comments')\n\n comment_nodes.each do |c|\n comments << process_comment(c) if comment_is_valid?(c)\n end\n\n comments\n end", "def parse_comment_info(xml_element)\n text = xml_element.elements['comment'].text rescue \"\"\n author = xml_element.elements['comment'].key['author'] rescue \"\"\n created = xml_element.elements['comment'].key['created'] rescue \"\"\n\n return {\n :text => text,\n :author => author,\n :created => created\n }\n end", "def comments\n request_str = \"/gallery/appliances/#{id.to_i}/comments\"\n response = GenericRequest.new(self.class.studio_connection).get request_str\n tree = XmlSimple.xml_in response, \"ForceArray\" => [\"comment\"]\n tree[\"appliance\"][\"comments\"][\"comment\"].collect do |c|\n Comment.parse(self,c)\n end\n end", "def parse_tickets(tickets)\n\t\t\ttickets.search(\"//b\").each do |node|\n\t\t\t\tnode.replace Nokogiri::XML::Text.new(node.inner_html, node.document)\n\t\t\tend\n\t\t\n\t\t\ttickets.search(\"//font\").each do |node|\n\t\t\t\tnode.replace Nokogiri::XML::Text.new(node.inner_html, node.document)\n\t\t\tend\n\t\t\n\t\t\ttickets.search('//*[@align=\"right\"]').each do |node|\n\t\t\t\tnode.remove\n\t\t\tend\n\t\t\t\n\t\t\ttickets.at('//*[@colspan=\"5\"]').remove\n\t\t\n\t\t\t@tickets = tickets\n\t\tend", "def comments(sheet=nil)\r\n sheet ||= @default_sheet\r\n read_comments(sheet) unless @comments_read[sheet]\r\n if @comment[sheet]\r\n @comment[sheet].each.collect do |elem|\r\n [elem[0][0],elem[0][1],elem[1]]\r\n end\r\n else\r\n []\r\n end\r\n end", "def comments(sheet=nil)\n sheet ||= @default_sheet\n read_comments(sheet) unless @comments_read[sheet]\n if @comment[sheet]\n @comment[sheet].each.collect do |elem|\n [elem[0][0],elem[0][1],elem[1]]\n end\n else\n []\n end\n end", "def comments\r\n return node_list(TYPE_COMMENT, nil)\r\n end", "def get_comments id, vimeo, xml=nil\n comments = Array.new\n url = vimeo.generate_url({\"method\" => \"vimeo.videos.comments.getList\",\n \"video_id\" => id, \"api_key\" => vimeo.api_key}, \"read\")\n unless xml\n #does not get covered by specs because we get an internal xml file\n xml_data = Net::HTTP.get_response(URI.parse(url)).body\n else\n xml_data = File.open(File.join(File.dirname(xml), File.basename(xml, '.xml')+'.comments.xml'))\n end\n xml_doc = REXML::Document.new(xml_data)\n \n xml_doc.elements.each('rsp/comments/comment') do |comment|\n id = comment.attributes[\"id\"].to_i\n author = comment.attributes[\"author\"]\n authorname = comment.attributes[\"authorname\"]\n date = comment.attributes[\"datecreate\"]\n url = comment.attributes[\"permalink\"]\n text = comment.text\n @portraits = Array.new\n xml_doc.elements.each('rsp/comments/comment/portraits/portrait') do |thumb|\n portrait_url = thumb.text\n w = thumb.attributes[\"width\"].to_i\n h = thumb.attributes[\"height\"].to_i\n thumbnail = Thumbnail.new(portrait_url, w, h)\n @portraits << thumbnail\n end\n com = Comment.new(id, author, authorname, date, url, text, @portraits)\n @comments << com\n end\n end", "def scrape_comments(href)\n p \"scraping comments #{href}\"\n \n comments = Array.new\n \n \n #begin transaction to get comments\n @agent.transact {\n page = @agent.get(href)\n \n page.search(\".ctextfooterwrap\").each{ |elem|\n #each ctextfooterwrap is a comment\n #a textfooter wrap is composed of ctext and cfooter\n \n #create our blog comment template\n str = <<-eos\n<wp:comment>\n<wp:comment_id></wp:comment_id>\n<wp:comment_author><![CDATA[]]></wp:comment_author>\n<wp:comment_author_email></wp:comment_author_email>\n<wp:comment_author_url></wp:comment_author_url>\n<wp:comment_author_IP></wp:comment_author_IP>\n<wp:comment_date></wp:comment_date>\n<wp:comment_date_gmt></wp:comment_date_gmt>\n<wp:comment_content><![CDATA[]]></wp:comment_content>\n<wp:comment_approved>1</wp:comment_approved>\n<wp:comment_type></wp:comment_type>\n<wp:comment_parent>0</wp:comment_parent>\n<wp:comment_user_id>0</wp:comment_user_id>\n</wp:comment>\n eos\n \n doc = Hpricot.XML(str)\n \n #this gives us the string with type= \"Posted 3/24/2009 8:45 PM by anon ymos - delete - reply\"\n str_arr = elem.search(\".cfooter\").inner_text.split(\" \")\n #wp:comment_date/wp:comment_date_gmt have format of: 2009-03-10 00:12:22\n str_arr[1] = str_arr[1].split(\"/\") #first we must fix format of year\n \n str_arr[1][0]= \"0\" + str_arr[1][0].to_s if str_arr[1][0].to_s.size == 1 #we want month padded to 2 digits\n str_arr[1][1]= \"0\" + str_arr[1][1].to_s if str_arr[1][0].to_s.size == 1 #we want day padded to 2 digits\n \n str_arr[1] = str_arr[1][2] + \"-\" + str_arr[1][0] + \"-\" + str_arr[1][1]\n str_arr[2] = convert_to_usable_time(str_arr[2] + \" \" + str_arr[3] ).split(\" \")[0]\n str_arr[1] = str_arr[1] + \" \" + str_arr[2]\n \n p \"date is #{str_arr[1]}\"\n doc.search(\"wp:comment_date\").inner_html = str_arr[1]\n doc.search(\"wp:comment_date_gmt\").inner_html = str_arr[1]\n\n #set comment id to next value\n doc.search(\"wp:comment_id\").inner_html = \"#{@comment_id}\"\n \n #author is found in str_arr at element index=5 and continues till we find element \"-\"\n temp = \"\"\n while str_arr[5] != \"-\"\n temp = temp + str_arr[5] + \" \"\n str_arr.delete_at(5)\n end\n \n #in case of anonymous commenter, they can leave a site url in the name\n #thanks be to glorious xanga dom-design engineer but we now have to take that out\n temp = temp.gsub(/\\(.*\\)/, \"\")\n \n while temp[-1] == 32\n temp.chop!\n end \n \n doc.search(\"wp:comment_author\").inner_html = \"<![CDATA[#{temp}]]>\"\n p \"author= #{temp}\"\n \n #author email is not present?\n #comment_author_IP is not present?\n \n # fill in comment_author_url\n #if cfooter contains 2, or 3 href tags, we've got an anonymous comment\n #if 2, then anonymous and no url provided\n #if 3, then anonymous and url provided\n temp = elem.search(\".cfooter\").search(\"a\")\n if temp.length == 3 #first link is provided 'site' url\n temp[0] = temp[0].to_s\n temp[0] = temp[0].slice(/href=\\\".*\\\"/).gsub(\"href=\\\"\",\"\").gsub(\"\\\"\",\"\")\n \n p \"comment author=#{temp[0]}\"\n doc.search(\"wp:comment_author_url\").inner_html = temp[0]\n elsif temp.length == 4 #second link is provided user that commented\n temp[1] = temp[1].to_s\n temp[1] = temp[1].slice(/href=\\\".*\\\"/).gsub(\"href=\\\"\",\"\").gsub(\"\\\"\",\"\")\n \n p \"comment author=#{temp[1]}\"\n doc.search(\"wp:comment_author_url\").inner_html = temp[1]\n end\n \n #capture comment id for hierarchical sorting\n temp = elem.search(\".cfooter\").search(\"a[@onclick]\").to_s\n temp = temp.slice( /direction=n#\\d*\\'/).gsub(\"direction=n#\",\"\")\n @comment_hash[temp.to_i] = @comment_id #register comment id\n p \"key #{temp.to_i} added to comment id=#{@comment_id}\"\n @comment_id += 1\n \n #capture if this elem has parent-id\n #ctext:class=teplyto x--PARENTID--x\n temp = elem.search(\".ctext\").search(\".replyto\")\n if temp.size == 1\n temp = temp[0].to_s\n temp = temp.slice!(/x--\\d*--x/)\n temp.gsub!(\"x--\",\"\")\n temp.gsub!(\"--x\",\"\")\n \n #p \"lookup parent-id= #{temp}\"\n temp = @comment_hash[temp.to_i]\n \n p \"parent id= #{temp}\"\n doc.search(\"wp:comment_parent\").inner_html = \"#{temp.to_i}\"\n \n #additionally, this takes a special key thingamajic\n doc.search(\"wp:comment_user_id\").inner_html = \"6074067\"\n \n elsif temp.size > 1 #this should NEVER happen, cant have >1 replyto element\n p \"This is an error!\"\n throw Exception.new(\"More than 1 replyto element found\")\n end #end: if temp.size == 1\n \n #finally, insert comment-content where it belongs\n temp = elem.search(\".ctext\").inner_text\n p \"comment=#{temp}\"\n doc.search(\"wp:comment_content\").inner_html = \"<![CDATA[#{temp}]]>\"\n \n #add document model for the comment to the list of arrays\n comments.push(doc)\n \n }#end:page.search(\".ctextfooterwrap\").each{ |elem|\n }#end:@agent.transact {\n \n #TODO: figure out if we need to recurse further down to get next 25 comments?\n \n comments\n end", "def comments(doc)\n doc.search('.comment > font:first-child').map { |font| font.inner_text }\n end", "def comments(sheet = nil)\n sheet ||= default_sheet\n read_comments(sheet) unless @comments_read[sheet]\n return [] unless @comment[sheet]\n @comment[sheet].each.collect do |elem|\n [elem[0][0], elem[0][1], elem[1]]\n end\n end", "def comments(bugid)\n getList(\"Bug.comments\", bugid, 'bugs') { |b|\n b['comments'].collect { |c| \n Comment.new(c) \n } \n }\n end", "def read_comments(sheet=nil)\n sheet ||= @default_sheet\n validate_sheet!(sheet)\n n = sheet_index(sheet)\n return unless @comments_doc[n] #>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n @comments_doc[n].xpath(\"//xmlns:comments/xmlns:commentList/xmlns:comment\").each do |comment|\n ref = comment.attributes['ref'].to_s\n row,col = Roo::Base.split_coordinate(ref)\n comment.xpath('./xmlns:text/xmlns:r/xmlns:t').each do |text|\n @comment[sheet] ||= {}\n @comment[sheet][[row,col]] = text.text\n end\n end\n @comments_read[sheet] = true\n end", "def comments\n comments = []\n self.review_submissions.each do |submission|\n submission.comments.each do |comment|\n comments.push(comment)\n end\n end\n return comments\n end", "def get_commented_urls(doc)\n doc.xpath('//comment()').flat_map { |e| URI.extract(e.to_html, 'http') }\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
extract comment hash from individual xml element
def parse_comment_info(xml_element) text = xml_element.elements['comment'].text rescue "" author = xml_element.elements['comment'].key['author'] rescue "" created = xml_element.elements['comment'].key['created'] rescue "" return { :text => text, :author => author, :created => created } end
[ "def convert_xml_comment(el, opts)\n block = el.options[:category] == :block\n indent = SPACE * @current_indent\n content = el.value\n content.gsub!(/^<!-{2,}\\s*/, \"\") if content.start_with?(\"<!--\")\n content.gsub!(/-{2,}>$/, \"\") if content.end_with?(\"-->\")\n result = content.lines.map.with_index do |line, i|\n (i.zero? && !block ? \"\" : indent) +\n @pastel.decorate(\"#{@symbols[:hash]} \" + line.chomp,\n *@theme[:comment])\n end.join(NEWLINE)\n block ? result + NEWLINE : result\n end", "def extract_hash(node); end", "def parse_ticket_for_comments(xml)\n comments = []\n\n doc = REXML::Document.new(xml)\n\n doc.elements.inject('item/comments', comments) do |comments, element|\n comments.push(parse_comment_info(element))\n end\n\n return comments\n end", "def hash_comments(comments)\n Digest::MD5.hexdigest(comments.map(&:comment).to_s)\n end", "def extract_comment comment\n if comment.text =~ /^#[ \\t]*:section:.*\\n/ then\n start = $`\n rest = $'\n\n comment.text = if start.empty? then\n rest\n else\n rest.sub(/#{start.chomp}\\Z/, '')\n end\n end\n\n comment\n end", "def comments_hash_flag; end", "def parse_comments\n @data[4][0]\n end", "def cid\n chunks = self.url.split('/')\n\twid = chunks[-2]\n\tcomment_part = chunks[-1]\n\tcomment_part =~ /comment-(\\d+).xml/\n\t\"#{wid}#{$1}\"\n end", "def visit_comment(node); end", "def first_comment\n hash = {}\n comments_table = self.div(:class=>\"contentcommentsTable\")\n last_message = comments_table.div(:class=>\"contentcomments_comment last\")\n hash.store(:poster, last_message.span(:class=>\"contentcomments_posterDataName s3d-regular-links\").link.text)\n hash.store(:date, last_message.span(:class=>\"contentcomments_dateComment\").text)\n hash.store(:message, last_message.div(:class=>\"contentcomments_message\").text)\n hash.store(:delete_button, last_message.button(:id=>/contentcomments_delete_\\d+/))\n return hash\n end", "def comment\n # logger.debug2 \"comment = #{read_attribute(:comment)} (#{read_attribute(:comment).class.name})\"\n return nil unless (extended_comment = read_attribute(:comment))\n encrypt_remove_pre_and_postfix(extended_comment, 'comment', 31)\n end", "def comment\n @comment ||= begin\n space = node.previous_sibling and\n space.to_s.blank? && space.to_s.count(\"\\n\") == 1 and\n comment_node = space.previous_sibling\n\n if comment_node.is_a?(REXML::Comment)\n doc.restore_erb_scriptlets(comment_node.to_s.strip)\n end\n end\n end", "def get_link_id(comment)\n\treturn comment['name']\nend", "def xmlComment(c)\n s = \"<!-- #{c}\".ljust(XML_COMMENT_WIDTH - 4) + \" -->\"\n end", "def comment\n file = ''\n MIME.mime_dirs.each { |dir|\n file = \"#{dir}/#{@type}.xml\"\n break if File.file? file\n }\n\n open(file) { |f|\n doc = REXML::Document.new f\n comments = {}\n REXML::XPath.match(doc, '*/comment').each { |c|\n if att = c.attributes['xml:lang']\n comments[att] = c.text\n else\n comments.default = c.text\n end\n }\n }\n comments\n end", "def comment\n file = ''\n MIME.mime_dirs.each { |dir|\n file = \"#{dir}/#{@type}.xml\"\n break if File.file? file\n }\n\n comments = {}\n open(file) { |f|\n doc = REXML::Document.new f\n REXML::XPath.match(doc, '*/comment').each { |c|\n if att = c.attributes['xml:lang']\n comments[att] = c.text\n else\n comments.default = c.text\n end\n }\n }\n comments\n end", "def xmlParseComment()\n return unless cmp('<!--') # Check that there is a comment right here\n skip(4)\n begin\n until (cmp(\"-->\"))\n raise XML_Comment_Error, \"line #{@line}: '--' must not occur within comments\" if cmp(\"--\")\n skip # Skip forward until we find a closing brace\n end\n skip(3) # Skip the \"-->\"\n rescue OutOfBoundsError\n raise XML_Comment_Error, \"Comment not terminated\"\n end\n end", "def comment\r\n file = ''\r\n MIME.mime_dirs.each { |dir|\r\n file = \"#{dir}/#{content_type}.xml\"\r\n break if File.file? file\r\n }\r\n\r\n comments = {}\r\n open(file) { |f|\r\n doc = REXML::Document.new f\r\n REXML::XPath.match(doc, '*/comment').each { |c|\r\n if att = c.attributes['xml:lang']\r\n comments[att] = c.text\r\n else\r\n comments.default = c.text\r\n end\r\n }\r\n }\r\n comments\r\n end", "def get_comment_age\n page.css(\".comment-tree\").css(\".comhead\").map do |header| \n header.text.match(/\\S+\\s\\S+/).to_s.split(' ')[1]\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
extract ticket hash from individual xml element
def parse_ticket_info(xml_element) id = xml_element.elements['key'].text rescue "" id, spacekey = split_spacekey_and_id(id) rescue "" link = xml_element.elements['link'].text rescue "" title = xml_element.elements['title'].text rescue "" reporter = xml_element.elements['reporter'].text rescue "" assignee = xml_element.elements['assignee'].text rescue "" type = xml_element.elements['type'].text rescue "" priority = xml_element.elements['priority'].text rescue "" updated = xml_element.elements['updated'].text rescue "" status = xml_element.elements['status'].text rescue "" return { :spacekey => spacekey, :id => id, :link => link, :title => title, :reporter => reporter, :assignee => assignee, :type => type, :priority => priority, :updated => updated, :status => status } end
[ "def generate_xmlhash\n self.xmlhash = Digest::SHA256.hexdigest(self.xml)\n end", "def extract_hash(node); end", "def xml_to_hash(xml)\n hash = Hash.from_xml(xml)\n convert_hex(hash, [\"DeviceMacId\", \"MeterMacId\"])\n hash\n end", "def digest_value(xml)\n canonical = xml.canonicalize(Nokogiri::XML::XML_C14N_EXCLUSIVE_1_0)\n digest = OpenSSL::Digest::SHA256.new.digest canonical\n Base64.encode64(digest)\n end", "def elementhash\n return @elementHash\n end", "def proteinInfo2hash (xmlRes)\n\n#\t\txmlDoc = Document.new xmlRes\n#\t\tentries = xmlDoc.elements.collect('uniprot/entry') { |ent| ent }\n\t\txmlDoc = Nokogiri::XML(xmlRes)\n\t\tentries = xmlDoc.css('uniprot > entry')\n# just take the very first entry\n\t\tmain_entry = entries.first\n\n\t\trecommended_name = main_entry.css('protein > recommendedName > fullName').collect {\n\t\t\t|node| node.text\n\t\t}\n\t\tsynonyms = main_entry.css('protein > alternativeName > fullName').collect {\n\t\t\t|alt_name| alt_name.text\n\t\t}\n\t\tkeywords = main_entry.css('keyword').collect { |keyw| keyw.text }\n\n\t\torganism = main_entry.css('organism > name').collect { |org|\n\t\t\tif org['type'] == 'scientific' then org.text end\n\t\t}\n\t\tfunction = main_entry.css(\"comment[type='function']\").collect { |func|\n\t\t\tfunc.text\n\t\t}\n\t\tlocation = main_entry.css(\"comment[type='subcellular location'] > subcellularLocation > location\").collect { |loc|\n\t\t\tloc.text\n\t\t}\n\n\t\tmolWeight = nil\n\t\tseqLength = nil\n\t\tseq = nil\n\t\tmain_entry.css(\"/sequence\").collect { |theSeq|\n\t\t\tmolWeight = theSeq.attributes['mass'].value\n\t\t\tseqLength = theSeq.attributes['length'].value\n\t\t\tseq = theSeq.text\n\t\t}\n\n# the very first pdb reference is got. a comparison based on resolution can improve the choice\n\t\tpdbs = main_entry.css(\"dbReference[type='PDB']\").collect { |pdb|\n\t\t\tpdb\n\t\t}\n\t\tpdbNodeMalformed = false\n\t\tpdbs.each { |node|\n\t\t\tresolution = node.css(\"property[type='resolution']\")\n\t\t\tif resolution.nil? || resolution.length == 0 then\n\t\t\t\tpdbNodeMalformed = true\n\t\t\t\tbreak\n\t\t\tend\n\t\t}\n\t\tif pdbs.empty? == false && pdbNodeMalformed == false\n# sort by value resolution to get the element with lowes resolution value\n\t\t\tpdbs = pdbs.sort_by{ |node|\n\t\t\t\tnode.css(\"property[type='resolution']\").first['value']\n\t\t\t}\n\t\tend\n\n\n\t\tpdbResult = ''\n\t\tif pdbs.empty? == false\n\t\t\tpdbResult = 'http://www.pdb.org/pdb/explore/explore.do?structureId='\n#\t\t\tpdbResult += pdbs[0].css(\"property[type='resolution']\").first['value']\n\t\t\tpdbResult += pdbs[0].attributes['id'].value\n\t\tend\n\t\thash_result = Hash.new\n\t\thash_result[:target_name] = \"#{recommended_name[0]} (#{organism[0]})\"\n\t\thash_result[:target_type] = 'PROTEIN'\n\t\thash_result[:description] = recommended_name[0]\n\t\thash_result[:synonyms] = synonyms.join('; ')\n\t\thash_result[:organism] = organism[0]\n\t\thash_result[:keywords] = keywords.join('; ')\n\t\thash_result[:cellularLocations] = location.join('; ')\n\t\thash_result[:molecularWeight] = molWeight\n\t\thash_result[:numberOfResidues] = seqLength\n\t\thash_result[:sequence] = seq\n\t\thash_result[:specificFunction] = function.join('; ')\n\t\thash_result[:pdbIdPage] = pdbResult\n\t\thash_result[:theoreticalPi] = nil\n\n\t\thash_result\n\tend", "def actual_hash(element)\n # initialize element hash with element name\n elementHash = '-' + element.name\n\n # add element attributes to the hash\n element.attributes.each { |a| elementHash += \":\" + a[0] + \"=\" + a[1] }\n\n # add element inner text to the hash\n elementHash += ':inner_text='\n\n # append the text if any is specified\n if element.text != nil\n elementHash += element.text.strip\n end\n\n # if the element has children, append an indicator\n if element.elements.size > 0\n elementHash += '>'\n end\n\n # iterate through child elements\n element.elements.each do |childElement|\n # calculate and appent the child element hash\n elementHash += actual_hash(childElement)\n end\n\n # return final actual hash value\n elementHash\n end", "def hexdigest\n @digest.unpack('H*'.freeze).first\n end", "def get_xml_to_hash(url)\n require 'activesupport'\n doc = Nokogiri::XML get(url)\n obj = Object.new.extend(ActiveSupport::CoreExtensions::Hash::Conversions::ClassMethods)\n obj.from_xml doc.to_xml\n # (Hash.from_xml(doc.search('//rubygem').to_xml) || {})['rubygem']\n end", "def get_crypto_tag_hex\n return @crypto_tag if ! @crypto_tag\n @crypto_tag.unpack(\"H*\")\n end", "def extract_uuid(section_hash)\n section_hash['id'].split('@').first\nend", "def hexdigest\n generate_digest if @digest.nil?\n @digest.unpack('H*')[0]\n end", "def get_artifact_sha(coordinates)\n REXML::Document.new(remote.get_artifact_info(coordinates)).elements[\"//sha1\"].text\n end", "def extract_hash_pair(node, key); end", "def get_artifact_sha(node, artifact_location, ssl_verify=true)\n require 'nexus_cli'\n require 'rexml/document'\n config = data_bag_config_for(node, DATA_BAG_NEXUS)\n remote = NexusCli::RemoteFactory.create(config, ssl_verify)\n REXML::Document.new(remote.get_artifact_info(artifact_location)).elements[\"//sha1\"].text\n end", "def create_hash(tag)\n hash = {}\n if tag.exists\n list = tag.find.a.id(\"/OrderID$/\")\n for i in 0...list.length\n hash[list[i].innerText.strip] = i\n end\n end\n\n return hash\n end", "def fingerprint\n text('//fingerprint').upcase\n end", "def sha\n result_hash['sha']\n end", "def get_upload_ticket\n send_method(:get_upload_ticket).at(:upload_ticket).inner_html\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
extract the spacekey and id from the ticket id
def split_spacekey_and_id(key) spacekey = key.scan(/^([A-Z]+)/).to_s id = key.scan(/([0-9]+)$/)[0].to_s.to_i return id, spacekey end
[ "def extract_contract_id(id)\n /^(\\w+)-\\w+\\b.*/ =~ id\n $1\n end", "def get_ticket id\n\t\t\t\t\tFreshdesk::Api::Client.convert_to_hash( @connection.get TICKETS, id )\n\t\t\t\tend", "def extract_vsys_id(id)\n /^(\\w+-\\w+)\\b.*/ =~ id\n $1\n end", "def getDetails id\n\t\tticket = @tickets.read ['id', 'pid', 'title', 'description', 'tracker', 'creator', 'created', 'status'], ['id', id]\n\t\tticket = ticket[0]\n\tend", "def parse_ticket_info(xml_element)\n id = xml_element.elements['key'].text rescue \"\"\n id, spacekey = split_spacekey_and_id(id) rescue \"\"\n\n link = xml_element.elements['link'].text rescue \"\"\n title = xml_element.elements['title'].text rescue \"\"\n reporter = xml_element.elements['reporter'].text rescue \"\"\n assignee = xml_element.elements['assignee'].text rescue \"\"\n type = xml_element.elements['type'].text rescue \"\"\n priority = xml_element.elements['priority'].text rescue \"\"\n updated = xml_element.elements['updated'].text rescue \"\"\n status = xml_element.elements['status'].text rescue \"\"\n\n return {\n :spacekey => spacekey,\n :id => id,\n :link => link,\n :title => title,\n :reporter => reporter,\n :assignee => assignee,\n :type => type,\n :priority => priority,\n :updated => updated,\n :status => status\n }\n end", "def extract_uuid(section_hash)\n section_hash['id'].split('@').first\nend", "def parse_id(mid)\n tm_assert{ mid.is_a? String }\n ids = mid.split ':'\n tm_assert{ ids.length == 2 }\n ids.map { |id| id.to_sym }\n end", "def get_ticket_details( session_key, ticket_id)\n response_xml = self.call( :get_ticket_details, message: {\n api_key: session_key,\n ticket: ticket_id\n })\n response = IssueCentre::Response.parse( response_xml)\n end", "def extract_id\n selector[:_id]\n end", "def parse_ie_charging_id(payload_data) \n bearer_charging_id = {}\n \n bearer_charging_id[:bearer_charging_id] = payload_data.unpack(\"H*\")[0].hex\n \n return bearer_charging_id\n end", "def obtain_ticket_number\n self.ticket_number = Otrs::Ticket.find(ticket_id).tn\n end", "def get_key_from_result(res)\n res.fetch('id', {}).fetch('S', nil)\n end", "def get_ticket(ticket_id:)\n ZendeskAPI::Ticket.find(client, id: ticket_id)\n end", "def get_row_id(key)\n key.delete '^[0-9]'\n end", "def user_id ; @id.split(/g/).first ; end", "def get_space_name_to_id\n my_spaces = {}\n birst_soap_session do |bc|\n bc.list_spaces.each do |s|\n my_spaces[s[:name]] = s[:id]\n end\n end\n my_spaces\n end", "def space_guid\n params.dig(:context, :space_guid)\n end", "def eventbrite_ticket_id\n eventbrite_event[\"event\"][\"tickets\"].first[\"ticket\"][\"id\"]\n end", "def space_id\n fil_header[:space_id]\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Format fragment operation as string
def format_fragment_operation(fragment_operation, level) result = [] result << indent(level) + format_fragment_operation_signature(fragment_operation) result << "#{indent(level)}{" result += fragment_operation.__nodes.map { |node| format_node(node, level + 1) } result << "#{indent(level)}}" result.join("\n") end
[ "def format_fragment_operation_signature(fragment_operator)\n [\n \"fragment #{fragment_operator.__name}\",\n \"on #{fragment_operator.__type}\",\n format_directives(fragment_operator.__directives, false),\n ].compact.join(' ')\n end", "def decorate_fragment( fragment, name )\n out = \"\\n\\n# BEGIN #{name} ------------ \\n\"\n out << runtime_log( \"Started fragment '#{name}'\" )\n out << fragment\n out << runtime_log( \"Finished fragment '#{name}'\" )\n out << \"\\n\\n# END #{name} ------------\\n\"\nend", "def fragment\n \"#{sql} #{operator}\"\n end", "def format_inline_fragment_signature(inline_fragment)\n [\n '...',\n (inline_fragment.__type ? \"on #{inline_fragment.__type}\" : nil),\n format_directives(inline_fragment.__directives, false),\n ].compact.join(' ')\n end", "def format_fragment_spread(fragment_spread, level)\n fragment_spread_signature = [\n fragment_spread.__name,\n format_directives(fragment_spread.__directives, false),\n ].compact.join(' ')\n\n indent(level) + \"...#{fragment_spread_signature}\"\n end", "def fragment_name; end", "def rb_build_format_string(mne, tokens, field_format)\n fmt_a = []\n\n tokens.each{|t|\n if (field_format.include?(t))\n fmt_a << field_format[t][0]\n else\n fmt_a << t\n end\n }\n\n fmt_s = mne\n\n if (!fmt_a.empty?)\n fmt_s += ' ' + fmt_a.join\n end\n\n return '\"' + fmt_s + '\"'\nend", "def write(fragment); end", "def to_s\n \"#<syntax:#{ @name }>\"\n end", "def c_build_format_string(mne, tokens, field_format)\n fmt_a = []\n\n tokens.each{|t|\n if (field_format.include?(t))\n fmt_a << field_format[t][0]\n else\n fmt_a << t\n end\n }\n\n fmt_s = mne\n\n if (!fmt_a.empty?)\n fmt_s += ' ' + fmt_a.join\n end\n\n return '\"' + fmt_s + '\"'\nend", "def format_ops_hash(op_hash)\n ops_str = ''\n op_hash.each do |key, value|\n ops_str << \"op #{key} #{format_param_hash(value)} \"\n end\n ops_str.strip\n end", "def partial_format(string, substitutions); end", "def print_operation_definition(operation_definition, indent: T.unsafe(nil)); end", "def format(tokens, &b); end", "def separate_fragments(aggregator, last_fragment, fragment)\n output \"\\n\\n\"\n end", "def to_string_interpolation(node_or_interp); end", "def format_endpoint!\n http_methods = endpoint.http_methods.map(&:upcase).join(\" / \")\n path = endpoint.path.gsub('(.:format)', '.json')\n output << md_code(\"#{http_methods} #{path}\")\n end", "def to_s\n (segments || []).inject('') do |str,seg|\n str << (seg.is_a?(Symbol) ? \":#{seg}\" : seg)\n end\n end", "def to_s\n \"#<continuation>\"\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Format fragment operation signature as string
def format_fragment_operation_signature(fragment_operator) [ "fragment #{fragment_operator.__name}", "on #{fragment_operator.__type}", format_directives(fragment_operator.__directives, false), ].compact.join(' ') end
[ "def format_inline_fragment_signature(inline_fragment)\n [\n '...',\n (inline_fragment.__type ? \"on #{inline_fragment.__type}\" : nil),\n format_directives(inline_fragment.__directives, false),\n ].compact.join(' ')\n end", "def format_signature(path, http_method, timestamp)\n http_method.upcase + '&' + path.encode('UTF-8') + '&' + timestamp.to_s\n # returns: String::signature\nend", "def signature_base_string\n \"#{method.to_s.upcase}&#{full_uri}&#{normalise_signature_params}\"\n end", "def signature_string\n @signature_string\n end", "def get_signature_string(params, secret)\n return secret + extract_param_values(params).sort.join(\"\")\n end", "def string_to_sign\n if content_type.nil?\n \"#{method.upcase}\\n#{host.downcase}\\n#{uri}#{canonicalized_params}\"\n else\n \"#{method.upcase}\\n#{body_md5}\\n#{content_type}\\n#{host.downcase}\\n#{uri}#{canonicalized_params}\"\n end\n end", "def redirect_signature_string(params)\n params[:authResult].to_s + params[:pspReference].to_s + params[:merchantReference].to_s + \n params[:skinCode].to_s + params[:merchantReturnData].to_s\n end", "def format_fragment_operation(fragment_operation, level)\n result = []\n\n result << indent(level) + format_fragment_operation_signature(fragment_operation)\n result << \"#{indent(level)}{\"\n result += fragment_operation.__nodes.map { |node| format_node(node, level + 1) }\n result << \"#{indent(level)}}\"\n\n result.join(\"\\n\")\n end", "def display_signature_params\n params = method_signature_params\n params << 'arguments = {}'\n \"(#{params.join(', ')})\"\n end", "def format_field_signature(field)\n field_alias = field.__alias ? \"#{field.__alias}: \" : ''\n field_name = field.__name.to_s\n field_arguments = field.__arguments.empty? ? '' : format_arguments(field.__arguments, false)\n field_directives = field.__directives.empty? ? '' : \" #{format_directives(field.__directives, false)}\"\n\n [\n field_alias,\n field_name,\n field_arguments,\n field_directives,\n ].join\n end", "def getSignature(id)\n if not @procedures.has_key?(id)\n return \"#{id} doesn't exist\"\n else\n info = @procedures[id]\n msg = \"The signature is: #{info[:return_type]} #{info[:id]}(\"\n temp = []\n info[:args].each { |arg|\n str = \"#{arg[:type]}\"\n if arg[:ref]\n if arg[:array]\n str = str + \" [\" + arg[:size] + \"]\"\n else\n str = str + \" &\"\n end\n end\n temp.push(str)\n }\n msg = msg + temp.join(\",\") + \")\"\n return msg\n end\n end", "def signature\n \"#{round}#{result}#{opponent_id}\"\n end", "def signature_base_string\n s = @http_method.dup\n s << '&'\n s << percent_encode(base_url)\n s << '&'\n s << percent_encode(request_params.to_s)\n s << '&'\n s << percent_encode(nonce.to_s)\n\n s\n end", "def to_s(name = nil)\n name ||= @impl_name\n @signature.to_s(name)\n end", "def active_signature; end", "def signature_help; end", "def string_to_sign\n [\n http_method,\n headers.values_at('content-md5', 'content-type').join(\"\\n\"),\n signing_string_date,\n canonicalized_headers,\n canonicalized_resource,\n ].flatten.compact.join(\"\\n\")\n end", "def get_message_signature(message); end", "def uri_with_signature\n separator = if request_parser.query_params? then '&' else '?' end\n encoded_signature = signature.extend(UriSigner::Helpers::String).escaped\n \"%s%s_signature=%s\" % [self.uri, separator, encoded_signature]\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
GET /admin/rights/new GET /admin/rights/new.json
def new @right = Right.new respond_to do |format| format.html # new.html.erb format.json { render json: @right } end end
[ "def new\n @access_right = AccessRight.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @access_right }\n end\n end", "def new\n \n @roles_and_permission = @roles.roles_and_permission.new\n \n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @roles_and_permission }\n end\n end", "def new\n @new = true\n @administrativo = Administrativo.new\n @administrativo.build_user\n atributos\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @administrativo }\n end\n end", "def new\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @global_role }\n end\n end", "def new\n @restriction = Restriction.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @restriction }\n end\n end", "def new\n @admin_permission = Permission.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @admin_permission }\n end\n end", "def new\n @role = Role.new\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @role }\n end\n end", "def new\n @role = Role.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @role }\n end\n end", "def new\n @auth_cms_role = CmsRole.new\n\n breadcrumbs.add I18n.t(\"helpers.titles.#{current_action}\", :model => Model_class.model_name.human), new_auth_cms_role_path\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @auth_cms_role }\n end\n end", "def new\n @lab_permissions_role = LabPermissionsRole.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @lab_permissions_role }\n end\n end", "def new\n \n # Only allows admin to create new grants.\n if current_user.admin == true\n @grant = Grant.new\n \n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @grant }\n end\n else\n respond_to do |format|\n format.html { redirect_to dashboard_path, alert: 'Must be admin to create grant.' }\n format.json { render json: @grant }\n end\n end\n end", "def new\n require_privilege(Privilege::PERM_SET)\n @privilege = Privilege.new(:role_id => params[:role_id])\n @target_type_list = Privilege::TARGET_TYPES\n @action_list = Privilege::ACTIONS\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @privilege }\n end\n end", "def new\n @privacy_level = PrivacyLevel.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @privacy_level }\n end\n end", "def new\n authorize Admin::AuthoringSite\n @admin_authoring_site = Admin::AuthoringSite.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @admin_authoring_site }\n end\n end", "def new\n @permission = Permission.new\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @permission }\n end\n end", "def new\n authorize! :index, @user, :message => 'Not authorized as an administrator.'\n @specialist = Specialist.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @specialist }\n end\n end", "def new\n @permission_resource = PermissionResource.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @permission_resource }\n end\n end", "def create\n @right = Right.new(right_params)\n\n respond_to do |format|\n if @right.save\n format.html { redirect_to rights_url, notice: \"Right: #{I18n.t('helpers.saved')}\" }\n format.json { render :show, status: :created, location: @right }\n else\n format.html { render :new }\n format.json { render json: @right.errors, status: :unprocessable_entity }\n end\n end\n end", "def new\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @course_role }\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
PUT /admin/rights/1 PUT /admin/rights/1.json
def update @right = Right.find(params[:id]) respond_to do |format| if @right.update_attributes(params[:right]) format.html { redirect_to admin_right_path(@right), notice: 'Right was successfully updated.' } format.json { head :no_content } else format.html { render action: "edit" } format.json { render json: @right.errors, status: :unprocessable_entity } end end end
[ "def update\n respond_to do |format|\n if @right.update(right_params)\n format.html { redirect_to rights_url, notice: \"Right: #{I18n.t('helpers.saved')}\" }\n format.json { render :show, status: :ok, location: @right }\n else\n format.html { render :edit }\n format.json { render json: @right.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @access_right = AccessRight.find(params[:id])\n\n respond_to do |format|\n if @access_right.update_attributes(params[:access_right])\n format.html { redirect_to @access_right, notice: 'Access right was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @access_right.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @rights_holder.update(rights_holder_params)\n format.html { redirect_to @rights_holder, notice: 'Rights holder was successfully updated.' }\n format.json { render :show, status: :ok, location: @rights_holder }\n else\n format.html { render :edit }\n format.json { render json: @rights_holder.errors, status: :unprocessable_entity }\n end\n end\n end", "def rights=(new_rights)\n @rights = new_rights\n end", "def update\n @right = Right.find(params[:id])\n respond_to do |format|\n if @right.update_attributes(params[:right])\n flash[:notice] = 'Right was successfully updated.'\n format.html { redirect_to(@right) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @right.errors, :status => :unprocessable_entity }\n end\n end\n end", "def edit\n authorize! :update, @grading_level1\n end", "def update\n @auth_cms_role = CmsRole.find(params[:id])\n @auth_cms_role.set_cms_role_permits_list(params[\"cms_role\"][\"cms_role_permit_ids\"]) if params[\"cms_role\"][\"cms_role_permit_ids\"].present?\n @auth_cms_role.set_assign_permits_list(params[\"cms_role\"][\"assign_permit_ids\"]) if params[\"cms_role\"][\"assign_permit_ids\"].present?\n\n @auth_cms_role.name = params[\"cms_role\"][\"name\"]\n\n respond_to do |format|\n if @auth_cms_role.save\n format.html { redirect_to auth_cms_role_path(@auth_cms_role), notice: 'Cms role was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @auth_cms_role.errors, status: :unprocessable_entity }\n end\n end\n end", "def admin_edit\n org = Org.find(params[:id])\n authorize org\n languages = Language.all.order('name')\n org.links = { org: [] } unless org.links.present?\n render 'admin_edit', locals: { org: org, languages: languages, method: 'PUT',\n url: admin_update_org_path(org) }\n end", "def userupdate\n respond_to do |format|\n if @right.update(right_params)\n format.html { redirect_to user_rights_path(@actuser), notice: \"Right: #{I18n.t('helpers.saved')}\" }\n format.json { render :show, status: :ok, location: @right }\n else\n format.html { render :useredit }\n format.json { render json: @right.errors, status: :unprocessable_entity }\n end\n end\n end", "def admin_edit\n org = Org.find(params[:id])\n authorize org\n languages = Language.all.order('name')\n org.links = { org: [] } if org.links.blank?\n render 'admin_edit', locals: { org: org, languages: languages, method: 'PUT',\n url: admin_update_org_path(org) }\n end", "def update\n @tree_node_ac_rights = TreeNodeAcRight.find(params[:id])\n\n respond_to do |format|\n if @tree_node_ac_rights.update_attributes(params[:tree_node_ac_rights])\n flash[:notice] = 'TreeNodeAcRights was successfully updated.'\n format.html { redirect_to(admin_tree_node_tree_node_permission_path(@tree_node_id, @tree_node_ac_rights)) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @tree_node_ac_rights.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @occasion_type = OccasionType.with_permissions_to(:update).find(params[:id])\n\n respond_to do |format|\n if @occasion_type.update_attributes(params[:occasion_type])\n flash[:notice] = 'Tapahtumatyyppin tiedot päivitetty.'\n format.html { redirect_to(occasion_type_url) } \n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @occasion_type.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @tree_node_ac_rights = TreeNodeAcRight.find(params[:id])\n\n respond_to do |format|\n if @tree_node_ac_rights.update_attributes(params[:tree_node_ac_rights])\n flash[:notice] = 'TreeNodeAcRights was successfully updated.'\n format.html { redirect_to(admin_tree_node_ac_right_path(@tree_node_ac_rights)) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @tree_node_ac_rights.errors, :status => :unprocessable_entity }\n end\n end\n end", "def permissions_update\n authorize @member\n @member.change_roles params[:permission].nil? ? [] : params[:permission].map {|k, v| k}\n respond_to do |format|\n format.html { redirect_to staff_member_path(@member), notice: 'The permissions were successfully updated.' }\n end\n end", "def change_role\n authorize @user\n @user.update!(role_params)\n json_response({message: \"Role changed successfully\"})\n end", "def add_right\n role = Role.find(params[:id])\n\tright = Right.find(params[:right_id])\n role.rights << right\n redirect_to roles_rights_url\n\n end", "def update\n # this action is not provided for partyroles\n end", "def update\n\t\tauthorize! :update, Restriccion\n @concepto_gastos = ConceptoGasto.all\n respond_to do |format|\n if @restriccion.update(restriccion_params)\n format.html { redirect_to gestionar_restricciones_path }#@restriccion, notice: 'Restriccion fue actualizado satisfactoriamente.' }\n #format.json { render :show, status: :ok, location: @restriccion }\n else\n format.html { render :edit }\n format.json { render json: @restriccion.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n authorize @role, :edit?\n respond_to do |format|\n if @role.update(api_v2_role_params)\n format.html { render :show, notice: \"Role was successfully updated.\" }\n format.json { render json: @role, status: :ok }\n else\n format.html { render :edit }\n format.json { render json: @role.errors, status: :unprocessable_entity }\n end\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
DELETE /admin/rights/1 DELETE /admin/rights/1.json
def destroy @right = Right.find(params[:id]) @right.destroy respond_to do |format| format.html { redirect_to admin_rights_url } format.json { head :no_content } end end
[ "def destroy\n @right.destroy\n respond_to do |format|\n format.html { redirect_to rights_url, notice: \"Right: #{I18n.t('helpers.deleted')}\" }\n format.json { head :no_content }\n end\n end", "def destroy\n @privilege.destroy\n respond_to do |format|\n format.html { redirect_to privileges_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @privilege_level.destroy\n respond_to do |format|\n format.html { redirect_to privilege_levels_url }\n format.json { head :no_content }\n end\n end", "def delete_guest_access_portal(args = {}) \n delete(\"/guestaccess.json/gap/#{args[:portalId]}\", args)\nend", "def destroy\n @right.destroy\n respond_to do |format|\n format.html { redirect_to rights_url, notice: 'Right was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @tree_node_ac_rights = TreeNodeAcRight.find(params[:id])\n @tree_node_ac_rights.destroy\n\n respond_to do |format|\n format.html { redirect_to admin_tree_node_tree_node_permissions_path }\n format.xml { head :ok }\n end\n end", "def destroy\n @access.destroy\n respond_to do |format|\n format.html { redirect_to accesses_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @role_privilege.destroy\n respond_to do |format|\n format.html { redirect_to role_privileges_url, notice: 'Veza je usšešno obrisana.' }\n format.json { head :no_content }\n end\n end", "def userdestroy\n @right.destroy\n respond_to do |format|\n format.html { redirect_to user_rights_path(@actuser), notice: \"Right: #{I18n.t('helpers.deleted')}\" }\n format.json { head :no_content }\n end\n end", "def destroy\n @level_permission.destroy\n respond_to do |format|\n format.html { redirect_to level_permissions_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @rights_holder.destroy\n respond_to do |format|\n format.html { redirect_to rights_holders_url, notice: 'Rights holder was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @tree_node_ac_rights = TreeNodeAcRight.find(params[:id])\n @tree_node_ac_rights.destroy\n\n respond_to do |format|\n format.html { redirect_to admin_tree_node_ac_rights_path }\n format.xml { head :ok }\n end\n end", "def destroy\n @readonly_administrator = ReadonlyAdministrator.find(params[:id])\n @readonly_administrator.destroy\n head :no_content\n end", "def destroy\n authorize! :read_admin, User\n @typeresid.destroy\n respond_to do |format|\n format.html { redirect_to typeresids_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @admin_permission.destroy\n respond_to do |format|\n format.html { redirect_to admin_permissions_url }\n format.json { head :no_content }\n end\n end", "def delete_tenant_circle(args = {}) \n delete(\"/tenantcircles.json/#{args[:circleId]}\", args)\nend", "def destroy\n @accesslevel = Accesslevel.find(params[:id])\n @accesslevel.destroy\n\n respond_to do |format|\n format.html { redirect_to(accesslevels_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n authorize! :destroy, @admin_system_admin\n @admin_system_admin.destroy\n respond_to do |format|\n format.html { redirect_to admin_system_admins_url }\n format.json { head :no_content }\n end\n end", "def delete_mutant(m_id)\n\n request = API_URL + \"mutants/#{m_id}\"\n @response = RestClient.delete request\n log_mutant(\"deleted\")\n return\nend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
GET /clasifications or /clasifications.json
def index @clasifications = Clasification.all end
[ "def classifications(classification)\n params = {\n classification: classification\n }.compact\n\n _get(\"/account/classifications\", params) { |json| json }\n end", "def show\n @clasification = Clasification.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @clasification }\n end\n end", "def classification\n id = params[:id]\n uri = \"/repositories/#{params[:repo_id]}/classifications/#{id}\"\n response = JSONModel::HTTP::get_json(\"/search/published_tree\", :node_uri => uri)\n\n tree = ASUtils.json_parse(response['tree_json'])\n\n data = tree['direct_children']\n .map {|child| JSONModel(:classification_term).id_for(child['record_uri'])}\n .map {|id| JSONModel(:classification_term).find(id, :repo_id => params[:repo_id], \"resolve[]\" => [\"linked_records\"]) }\n .map {|term|\n term.to_hash.merge({:container_children => term['linked_records']\n .map {|linked_record| linked_record['_resolved']['instances'].map{|instance| instance.merge({'dates' => linked_record['_resolved']['dates'], 'resource_title' => linked_record['_resolved']['title'], 'resource_data' => JSONModel.parse_reference(linked_record['ref'])})} }\n .flatten\n .reject{|instance| instance['instance_type'] == 'digital_object'}\n .map {|instance|\n result = {:name => instance['resource_title'], :resource_data => instance['resource_data']}\n if date = instance['dates'][0]\n if date['expression']\n result[:date] = date['expression']\n elsif date['begin'] || date['end']\n result[:date] = [date['begin'], date['end']].compact.join('-')\n end\n end\n\n container = instance['container'] || {}\n if(container['type_1'] && container['indicator_1'])\n result[:container_1] = I18n.t(\"enumerations.container_type.#{container[\"type_1\"]}\") + \" #{container['indicator_1']}\"\n end\n\n if(container['type_2'] && container['indicator_2'])\n result[:container_2] = I18n.t(\"enumerations.container_type.#{container[\"type_2\"]}\") + \" #{container['indicator_2']}\"\n end\n\n result\n }\n })\n }\n render :json => ASUtils.to_json(data)\n end", "def classifications\n if params[:id].downcase == \"any\"\n out = ShelfListing.all.pluck(:classification).uniq.to_a.sort\n else\n out = ShelfListing.where(classification_system: params[:id]).pluck(:classification).uniq.to_a.sort\n end\n render json: [\"Any\"] + out\n end", "def index\n @classifieds = Classified.all\n \n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @classifieds }\n end\n end", "def index\n @klasses = @organisation.klasses\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @klasses }\n end\n end", "def search_classes\n mb = MindBodyAPI.new\n @staff_classes = mb.get_staff_classes(staff_classes_params)\n json_response(@staff_classes, nil)\n end", "def show\n @klass = @organisation.klasses.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @klass }\n end\n end", "def subclassifications\n if params[:id].downcase == \"any\"\n render json: [\"Any\"] + ShelfListing.all.pluck(:subclassification).uniq.to_a\n else\n render json: [\"Any\"] + ShelfListing.where(classification: params[:id]).pluck(:subclassification).uniq.to_a\n end\n end", "def show\n @classifier = Classifier.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @classifier }\n end\n end", "def index\n @citizenship_classes = @grantee.citizenship_classes.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @citizenship_classes }\n end\n end", "def index\n @vtclasses = current_user.vtclasses.order(:campus, :subject_code, :course_number)\n # @fetcher = VtclassFetch.new\n # raise @fetcher.fetch_classes(\"CS\",201409,\"0\").to_yaml\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @vtclasses }\n end\n end", "def index \n @discipline_class = DisciplineClass.find(params[:discipline_class_id])\n @class_records = ClassRecord.where(:discipline_class_id => params[:discipline_class_id])\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render :json => @class_records }\n end\n end", "def index\n @uni_classes = UniClass.search(params)\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @uni_classes }\n end\n end", "def show\n @jclsclass = Jclsclass.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @jclsclass }\n end\n end", "def index\n if params[:discipline_class_id]\n @discipline_class = DisciplineClass.find(params[:discipline_class_id])\n @class_teachings = ClassTeaching.where(:discipline_class_id => params[:discipline_class_id]).paginate(:page => params[:page], :per_page => 10)\n end\n\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render :json => @class_teachings }\n end\n end", "def show\n @clazz = Clazz.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @clazz }\n end\n end", "def index\n @clasificaciones = Clasificacion.all\n end", "def index\n @class_resources = ClassResource.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @class_resources }\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
POST /clasifications or /clasifications.json
def create @clasification = Clasification.new(clasification_params) respond_to do |format| if @clasification.save format.html { redirect_to @clasification, notice: "Clasification was successfully created." } format.json { render :show, status: :created, location: @clasification } else format.html { render :new, status: :unprocessable_entity } format.json { render json: @clasification.errors, status: :unprocessable_entity } end end end
[ "def create\n @clasification = Clasification.new(params[:clasification])\n\n respond_to do |format|\n if @clasification.save\n format.html { redirect_to @clasification, notice: 'Clasification was successfully created.' }\n format.json { render json: @clasification, status: :created, location: @clasification }\n else\n format.html { render action: \"new\" }\n format.json { render json: @clasification.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n Rails.logger.debug \"[ClassificationsController.create] params.class: #{params.class}, params: #{params}\"\n @classifaction = Classification.new transaction_data: params\n @classifaction.classifiers << StatusCodeClassifier.classify( @classifaction.transaction_data )\n @classifaction.classify\n\n if @classifaction.save\n render json: @classifaction.as_jsonapi, status: :created\n else\n render json: { error: 'oops' }, status: 500\n end\n end", "def submit_classification(params)\n\n require 'uri'\n require \"net/http\"\n\n uri = URI(@classifications_endpoint)\n \n req = Net::HTTP::Post.new(uri.path, {'BOT_AUTH' => ENV['SCRIBE_BOT_TOKEN']})\n req.body = params.to_params \n http = Net::HTTP.new(uri.host, uri.port)\n\n response = http.start {|http| http.request(req) }\n\n begin\n JSON.parse response.body\n rescue\n nil\n end\n end", "def create\n @classtype = Classtype.new(classtype_params)\n\n respond_to do |format|\n if @classtype.save\n format.html { redirect_to @classtype, notice: 'Classtype was successfully created.' }\n format.json { render :show, status: :created, location: @classtype }\n else\n format.html { render :new }\n format.json { render json: @classtype.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @clasificacion = Clasificacion.new(clasificacion_params)\n\n respond_to do |format|\n if @clasificacion.save\n format.html { redirect_to @clasificacion, notice: 'Clasificacion was successfully created.' }\n format.json { render :show, status: :created, location: @clasificacion }\n else\n format.html { render :new }\n format.json { render json: @clasificacion.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @classification = Classification.new(classification_params)\n\n respond_to do |format|\n if @classification.save\n format.html { redirect_to @classification }\n format.json { render :show, status: :created, location: @classification }\n else\n format.html { render :new }\n format.json { render json: @classification.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @class = Klass.new(params[:klass])\n\n respond_to do |format|\n if @class.save\n format.html { redirect_to class_path(@class), notice: 'La classe a bien été créée.' }\n format.json { render json: @class, status: :created, location: @class }\n else\n format.html do\n @trainings = Training.all\n render action: \"new\"\n end\n format.json { render json: @class.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @classified = Classified.new(params[:classified])\n\n respond_to do |format|\n if @classified.save\n format.html { redirect_to @classified, notice: 'Classified was successfully created.' }\n format.json { render json: @classified, status: :created, location: @classified }\n else\n format.html { render action: \"new\" }\n format.json { render json: @classified.errors, status: :unprocessable_entity }\n end\n end\n end", "def classifications(classification)\n params = {\n classification: classification\n }.compact\n\n _get(\"/account/classifications\", params) { |json| json }\n end", "def create\n @classified = Classified.new(classified_params)\n respond_to do |format|\n if @classified.save\n format.html { redirect_to @classified, notice: 'Classified was successfully created.' }\n format.json { render :show, status: :created, location: @classified }\n else\n format.html { render :new }\n format.json { render json: @classified.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @classification = Classification.new(admin_classification_params)\n\n respond_to do |format|\n if @classification.save\n format.html { redirect_to admin_classifications_path, notice: 'Classification was successfully created.' }\n format.json { render :show, status: :created, location: @classification }\n else\n format.html { render :new }\n format.json { render json: @classification.errors, status: :unprocessable_entity }\n end\n end\n end", "def new\n @clasification = Clasification.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @clasification }\n end\n end", "def create\n @article_clasification = ArticleClasification.new(article_clasification_params)\n\n respond_to do |format|\n if @article_clasification.save\n format.html { redirect_to @article_clasification, notice: 'Article clasification was successfully created.' }\n format.json { render :show, status: :created, location: @article_clasification }\n else\n format.html { render :new }\n format.json { render json: @article_clasification.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @clasificacion = Clasificacion.new(params[:clasificacion])\n\n respond_to do |format|\n if @clasificacion.save\n format.html { redirect_to(@clasificacion, :notice => 'Clasificacion was successfully created.') }\n format.xml { render :xml => @clasificacion, :status => :created, :location => @clasificacion }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @clasificacion.errors, :status => :unprocessable_entity }\n end\n end\n end", "def create\n @faculty_classification = FacultyClassification.new(params[:faculty_classification])\n\n respond_to do |format|\n if @faculty_classification.save\n format.html { redirect_to faculty_classifications_path, notice: 'Faculty classification was successfully created.' }\n format.json { render json: @faculty_classification, status: :created, location: @faculty_classification }\n else\n format.html { render action: \"new\" }\n format.json { render json: @faculty_classification.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @class = Classe.new(params[:class])\n\n respond_to do |format|\n if @class.save\n format.html { redirect_to @class, notice: 'Classe was successfully created.' }\n format.json { render json: @class, status: :created, location: @class }\n else\n format.html { render action: \"new\" }\n format.json { render json: @class.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @clasification.update(clasification_params)\n format.html { redirect_to @clasification, notice: \"Clasification was successfully updated.\" }\n format.json { render :show, status: :ok, location: @clasification }\n else\n format.html { render :edit, status: :unprocessable_entity }\n format.json { render json: @clasification.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @classs = Classs.new(classs_params)\n\n respond_to do |format|\n if @classs.save\n format.html { redirect_to @classs, notice: 'Classs was successfully created.' }\n format.json { render :show, status: :created, location: @classs }\n else\n format.html { render :new }\n format.json { render json: @classs.errors, status: :unprocessable_entity }\n end\n end\n end", "def create\n @classname = Classname.new(classname_params)\n\n respond_to do |format|\n if @classname.save\n format.html { redirect_to @classname, notice: 'Classname was successfully created.' }\n format.json { render :show, status: :created, location: @classname }\n else\n format.html { render :new }\n format.json { render json: @classname.errors, status: :unprocessable_entity }\n end\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
PATCH/PUT /clasifications/1 or /clasifications/1.json
def update respond_to do |format| if @clasification.update(clasification_params) format.html { redirect_to @clasification, notice: "Clasification was successfully updated." } format.json { render :show, status: :ok, location: @clasification } else format.html { render :edit, status: :unprocessable_entity } format.json { render json: @clasification.errors, status: :unprocessable_entity } end end end
[ "def update\n @clasification = Clasification.find(params[:id])\n\n respond_to do |format|\n if @clasification.update_attributes(params[:clasification])\n format.html { redirect_to @clasification, notice: 'Clasification was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @clasification.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @classtype.update(classtype_params)\n format.html { redirect_to @classtype, notice: 'Classtype was successfully updated.' }\n format.json { render :show, status: :ok, location: @classtype }\n else\n format.html { render :edit }\n format.json { render json: @classtype.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @jclsclass = Jclsclass.find(params[:id])\n\n respond_to do |format|\n if @jclsclass.update_attributes(params[:jclsclass])\n format.html { redirect_to @jclsclass, notice: 'Jclsclass was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @jclsclass.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @clazz = Clazz.find(params[:id])\n\n respond_to do |format|\n if @clazz.update_attributes(params[:clazz])\n format.html { redirect_to @clazz, notice: 'Clazz was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @clazz.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @clasificacion.update(clasificacion_params)\n format.html { redirect_to @clasificacion, notice: 'Clasificacion was successfully updated.' }\n format.json { render :show, status: :ok, location: @clasificacion }\n else\n format.html { render :edit }\n format.json { render json: @clasificacion.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @classification.update(classification_params)\n format.html { redirect_to @classification, notice: 'Classification was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @classification.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @classtable = Classtable.find(params[:id])\n\n respond_to do |format|\n if @classtable.update_attributes(params[:classtable])\n format.html { redirect_to @classtable, notice: 'Classtable was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @classtable.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @classified = Classified.find(params[:id])\n\n respond_to do |format|\n if @classified.update_attributes(params[:classified])\n format.html { redirect_to @classified, notice: 'Classified was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @classified.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @klass = @organisation.klasses.find(params[:id])\n\n respond_to do |format|\n if @klass.update_attributes(params[:klass])\n format.html { redirect_to [@organisation, @klass], notice: 'Klass was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @klass.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @classification.update(classification_params)\n format.html { redirect_to @classification, notice: t('controller.successfully_updated', model: t('activerecord.models.classification')) }\n format.json { head :no_content }\n else\n @classification_types = ClassificationType.all\n format.html { render action: \"edit\" }\n format.json { render json: @classification.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @classified.update(classified_params)\n format.html { redirect_to @classified, notice: 'Classified was successfully updated.' }\n format.json { render :show, status: :ok, location: @classified }\n else\n format.html { render :edit }\n format.json { render json: @classified.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @klass.update(klass_params)\n format.html { redirect_to my_classes_path, notice: 'Klass was successfully updated.' }\n format.json { render :show, status: :ok, location: @klass }\n else\n format.html { render :edit }\n format.json { render json: @klass.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @classified = @user.classifieds.find(params[:id])\n\n respond_to do |format|\n if @classified.update_attributes(params[:classified])\n format.html { redirect_to @classified, notice: 'Classified was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @classified.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @classname.update(classname_params)\n format.html { redirect_to @classname, notice: 'Classname was successfully updated.' }\n format.json { render :show, status: :ok, location: @classname }\n else\n format.html { render :edit }\n format.json { render json: @classname.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @objeto.update(clasificacion_params)\n format.html { redirect_to @objeto, notice: 'Clasificacion was successfully updated.' }\n format.json { render :show, status: :ok, location: @objeto }\n else\n format.html { render :edit }\n format.json { render json: @objeto.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @clasificacion = Clasificacion.find(params[:id])\n\n respond_to do |format|\n if @clasificacion.update_attributes(params[:clasificacion])\n format.html { redirect_to(@clasificacion, :notice => 'Clasificacion was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @clasificacion.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n respond_to do |format|\n if @article_clasification.update(article_clasification_params)\n format.html { redirect_to @article_clasification, notice: 'Article clasification was successfully updated.' }\n format.json { render :show, status: :ok, location: @article_clasification }\n else\n format.html { render :edit }\n format.json { render json: @article_clasification.errors, status: :unprocessable_entity }\n end\n end\n end", "def update\n @classification = Classification.find(params[:id])\n\n respond_to do |format|\n if @classification.update_attributes(params[:classification])\n format.html { redirect_to(@classification, :notice => 'Classification was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @classification.errors, :status => :unprocessable_entity }\n end\n end\n end", "def update\n @clclass = Clclass.find(params[:id])\n\n respond_to do |format|\n if @clclass.update_attributes(params[:clclass])\n format.html { redirect_to @clclass, notice: 'Clclass was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @clclass.errors, status: :unprocessable_entity }\n end\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
DELETE /clasifications/1 or /clasifications/1.json
def destroy @clasification.destroy respond_to do |format| format.html { redirect_to clasifications_url, notice: "Clasification was successfully destroyed." } format.json { head :no_content } end end
[ "def destroy\n @clasification = Clasification.find(params[:id])\n @clasification.destroy\n\n respond_to do |format|\n format.html { redirect_to clasifications_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @classtable = Classtable.find(params[:id])\n @classtable.destroy\n\n respond_to do |format|\n format.html { redirect_to classtables_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @classification = Classification.find(params[:id])\n @classification.destroy\n\n respond_to do |format|\n format.html { redirect_to classifications_url, notice: t('controller.successfully_deleted', model: t('activerecord.models.classification')) }\n format.json { head :no_content }\n end\n end", "def destroy\n @classification = Classification.find(params[:id])\n @classification.destroy\n\n respond_to do |format|\n format.html { redirect_to(classifications_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @clazz = Clazz.find(params[:id])\n @clazz.destroy\n\n respond_to do |format|\n format.html { redirect_to clazzes_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @classtype.destroy\n respond_to do |format|\n format.html { redirect_to classtypes_url, notice: 'Classtype was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @classification.destroy\n respond_to do |format|\n format.html { redirect_to admin_classifications_url, notice: 'Classification was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @classified = Classified.find(params[:id])\n @classified.destroy\n\n respond_to do |format|\n format.html { redirect_to classifieds_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @jclsclass = Jclsclass.find(params[:id])\n @jclsclass.destroy\n\n respond_to do |format|\n format.html { redirect_to jclsclasses_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @clasificacion.destroy\n respond_to do |format|\n format.html { redirect_to clasificaciones_url, notice: 'Clasificacion was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @clclass = Clclass.find(params[:id])\n @clclass.destroy\n\n respond_to do |format|\n format.html { redirect_to clclasses_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @clasificacion = Clasificacion.find(params[:id])\n @clasificacion.destroy\n\n respond_to do |format|\n format.html { redirect_to(clasificaciones_url) }\n format.xml { head :ok }\n end\n end", "def destroy\n @class = Classe.find(params[:id])\n @class.destroy\n\n respond_to do |format|\n format.html { redirect_to classes_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @classifier = Classifier.find(params[:id])\n @classifier.destroy\n\n respond_to do |format|\n format.html { redirect_to classifiers_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @item_classification = ItemClassification.find(params[:id])\n @item_classification.destroy\n\n respond_to do |format|\n format.html { redirect_to item_classifications_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @class.destroy\n\n respond_to do |format|\n format.html { redirect_to classes_path }\n format.json { head :ok }\n end\n end", "def destroy\n @company_classification.destroy\n respond_to do |format|\n format.html { redirect_to company_classifications_url }\n format.json { head :no_content }\n end\n end", "def destroy\n @article_clasification.destroy\n respond_to do |format|\n format.html { redirect_to article_clasifications_url, notice: 'Article clasification was successfully destroyed.' }\n format.json { head :no_content }\n end\n end", "def destroy\n @classification_type.destroy\n\n respond_to do |format|\n format.html { redirect_to classification_types_url, notice: t('controller.successfully_deleted', model: t('activerecord.models.classification_type')) }\n format.json { head :no_content }\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Check every 0.5 seconds to see if block.call returns true. nil is considered a failure. Give up after 30 seconds.
def wait_true max_wait=30, interval=0.5, &block max_wait = 1 if max_wait <= 0 result = nil timeout max_wait do until (result = begin; block.call; rescue; end) sleep interval end end result end
[ "def keep_trying_till_true timeout = 30\n t_begin = Time.now\n loop do\n if yield\n break\n elsif (Time.now - t_begin) > timeout\n fail \"Timeout after trying for #{timeout} seconds\"\n else\n sleep 1\n end\n end\nend", "def keep_trying_till_true timeout = 30\n t_begin = Time.now\n delay = 10\n loop do\n if yield\n break\n elsif (Time.now - t_begin) > timeout\n fail \"Timeout after trying for #{timeout} seconds\"\n else\n sleep delay\n end\n delay += 1\n end\nend", "def wait_until_true timeout = 5\n begin_t = Time.now\n\n until yield\n return :timeout if Time.now - begin_t > timeout\n sleep 0.1\n end\n\n :completed\n end", "def timeout_check_equal(duration, expected, &block)\n execute_ok = false\n duration.times do\n sleep(1)\n text = instance_eval(&block)\n execute_ok = true and break if (text == expected)\n end\n execute_ok.should == true\n end", "def wait_block(last_block)\n time = Time.now\n block_median_time = block_median_time_env\n block_timeout_time = block_timeout_time_env\n loop do\n return true if last_block != block_get\n return false if Time.now - time > block_timeout_time\n # sleep 0.005 # TODO: probably around 5ms is the lowest\n sleep block_median_time # 10ms - good amount for speed (LAN) deployments - 50-100ms - good amount for a international network\n end\n end", "def retry_block_until_true\n (0..RETRY_COUNT).each do\n return if yield\n sleep RETRY_STEP\n end\n Puppet.debug get_cluster_debug_report if is_online?\n fail \"Execution timeout after #{RETRY_COUNT * RETRY_STEP} seconds!\"\n end", "def wait_until(timeout = 30, retry_interval = 0.1, &block)\n start = Time.now\n while (result = !block.call)\n break if (Time.now - start).to_i >= timeout\n sleep(retry_interval)\n end\n !result\nend", "def result_within(seconds, &block)\n (seconds.to_i + 1).times do\n result = yield rescue nil\n return result if result\n sleep 1\n end\n return nil\n end", "def wait block = true\n if block\n guard = block_given? ? yield : true\n while Time.now < @gate_time && guard do\n sleep 0.5\n end\n else\n Time.now < @gate_time\n end\n end", "def success?\n !exception? and !timeout?\n end", "def wait_until timeout: 60, description: \"execute block\"\n t_begin = Time.now\n delay = 1\n loop do\n break if yield\n sleep delay\n raise \"Timeout attempting to #{description} after #{timeout}s\" if (Time.now - t_begin) > timeout\n delay += 1\n end\nend", "def retry_if_nil(times)\r\n begin\r\n result = yield\r\n end while result.nil? && ((times -= 1) > 0)\r\n result\r\n end", "def eventually(label, &block)\n current_time = Time.now\n timeout_treshold = current_time + TIMEOUT\n while (block.call == false) && (current_time <= timeout_treshold) do\n sleep 5\n current_time = Time.now\n end\n if (current_time > timeout_treshold)\n fail \"Action '#{label}' did not resolve within timeout: #{TIMEOUT}s\"\n end\nend", "def verify_block(block)\r\n hash_check = hash_check(block)\r\n time_check = time_check(block)\r\n transaction_check = transaction_check(block)\r\n prev_hash_check = prev_hash_check(block)\r\n if hash_check == true && time_check == true && transaction_check == true && prev_hash_check == true\r\n return true, nil\r\n elsif hash_check == true && time_check == true && prev_hash_check == true\r\n return false, 1\r\n elsif time_check == true && prev_hash_check\r\n return false, 2\r\n elsif time_check == true\r\n return false, 3\r\n else\r\n return false, 4\r\n end\r\nend", "def non_blocking?()\n #This is a stub, used for indexing\n end", "def test_check_block_bad_time\n\t refute @checker.check_block('0', '0', 'SYSTEM>111111(40)', '-1.-1', '1', '1', 0)\n end", "def meta_dl_wait\n $meta_dl_wait_cb = Proc.new if block_given?\n if $metadata_dl_list.values.any? # still downloading\n EM.add_timer(1) { meta_dl_wait } # check again 1s later\n else\n if $meta_fail ### failed !!!!\n meta_fail\n else\n cb = $meta_dl_wait_cb\n $meta_dl_wait_cb = nil\n cb.call\n end\n end\nend", "def block?\n !!block\n end", "def assert_callback(time = nil, message = nil)\n called_back = false\n \n EventMachine.next_tick do\n yield(lambda { called_back = true })\n end\n \n start_time = Time.now.to_i\n\n while (!called_back)\n select(nil, nil, nil, 0.1)\n \n if (time and (Time.now.to_i - start_time > time))\n flunk(message || 'assert_callback timed out')\n end\n end\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
For Sauce Labs reporting. Returns the current session id.
def session_id @driver.session_id end
[ "def session_id\n @driver.session_id\n end", "def session_id\n @driver.session_id\n end", "def session_id\n @cnx.session_id\n end", "def cracker_session_id\n @session_id ||= ::Rex::Text.rand_text_alphanumeric(8)\n end", "def sessionid\n command[:sessionid]\n end", "def session_id; end", "def current_session_id(env)\n env[ENV_SESSION_OPTIONS_KEY][:id]\n end", "def session_id\n @session_id ||= \"#{chip_api.redis_session_prefix}_#{token.claims_token.api_id}\"\n end", "def session_id\n return @session_id\n end", "def grid_session_id\n @browser.driver.session_id\n end", "def get_session_id\n @agent.get( @root_url + '/dwr/engine.js') do |page|\n @session_id = extract_session_id(page.body)\n end\n end", "def get_id\n @sessionid\n end", "def session_id\n session[:uid] rescue nil\n end", "def session_id\n @build_session_id ||= \"#{lighthouse_prefix}_#{api}_#{account_uuid}\"\n end", "def get_session_id\n check_lisitng_id\n calendar_frame = get_vrbo_calendar_page.iframes.first.src\n page = @agent.get calendar_frame\n link = page.links[3].href\n uri = Addressable::URI.parse(link)\n uri.query_values['sessionId']\n end", "def get_session_id\n check_lisitng_id\n \n calendar_frame = get_vrbo_calendar_page.iframes.first.src\n page = @agent.get calendar_frame\n link = page.links[3].href\n uri = Addressable::URI.parse(link)\n uri.query_values['sessionId']\n end", "def id\n @id ||= scgi.session_id\n end", "def id\n response_hash[:session_id]\n end", "def session_id\n session.transport.algorithms.session_id\n end" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Get the element of type tag_name at matching index.
def ele_index tag_name, index # XPath index starts at 1. raise "#{index} is not a valid xpath index. Must be >= 1" if index <= 0 find_element :xpath, "//#{tag_name}[#{index}]" end
[ "def []( index, name=nil)\n if index.kind_of? Integer\n raise \"index (#{index}) must be >= 1\" if index < 1\n name = literalize(name) if name\n num = 0\n @element.find { |child|\n child.kind_of? Element and\n (name.nil? ? true : child.has_name?( name )) and\n (num += 1) == index\n }\n else\n return XPath::first( @element, index )\n #{ |element|\n # return element if element.kind_of? Element\n #}\n #return nil\n end\n end", "def ele_index(class_name, index)\n raise 'Index must be >= 1' unless index == 'last()' || (index.is_a?(Integer) && index >= 1)\n\n elements = tags(class_name)\n\n if index == 'last()'\n result = elements.last\n else\n # elements array is 0 indexed\n index -= 1\n result = elements[index]\n end\n\n raise _no_such_element if result.nil?\n\n result\n end", "def [](index_or_name)\n if index_or_name.is_a? Integer\n return @tarray[index_or_number]\n elsif index_or_name.is_a? Symbol\n @description.each_with_index do |n, i|\n return @tarray[i] if n == index_or_name\n end\n end\n raise \"Unknown element name: '#{index_or_name}'\"\n end", "def [](index_or_name)\n if index_or_name.is_a?(Integer)\n at_index(index_or_name)\n else\n first_with_name(index_or_name)\n end\n end", "def ele_index(class_name, index)\n results = tags(class_name)\n if index == 'last()'\n index = results.length\n index -= 1 if index >= 0\n else\n raise 'Index must be >= 1' unless index >= 1\n\n index -= 1 if index >= 1\n end\n\n # uiautomator has issues with index/instance so calculate the index\n # client side.\n results[index]\n end", "def [](name)\n @elements[name]\n end", "def element_named(name)\n @elements.find{|e| e.name.to_s == name.to_s}\n end", "def [](index)\n elements[index]\n end", "def first_ele tag_name\n # XPath index starts at 1\n find_element :xpath, \"//#{tag_name}[1]\"\n end", "def ele_index class_name, index\n unless index == 'last()'\n # XPath index starts at 1.\n raise \"#{index} is not a valid xpath index. Must be >= 1\" if index <= 0\n end\n find_element :xpath, %Q(//#{class_name}[#{index}])\n end", "def element\n @collection[index]\n end", "def getElem idx\n\tif idx == @elems.length; idx = @elems.length-1; end\n\t@elems[idx]\n end", "def get(ns, element_name)\n getlist(ns, element_name).first\n end", "def first_ele(class_name)\n ele_index class_name, 1\n end", "def at(index)\n each.with_index { |v, i| return v if i == index }\n return nil\n end", "def [](index)\n cast get(index)\n end", "def index_to_type(index)\n index if index && (index <= @tag_types.max_index)\n end", "def index_to_type(index)\n index if index && (index <= @TagTypes.max_index)\n end", "def retrieve_element_from_index(array, index_number)\n array[index_number]\nend" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }