query
stringlengths 7
9.5k
| document
stringlengths 10
1.07M
| negatives
listlengths 19
19
| metadata
dict |
---|---|---|---|
Disable or enable ETag and Last Modified conditional GET. Equivalent to use_etag(true_or_false) use_last_modified(true_or_false)
|
def use_conditional_get(true_or_false)
use_etag(true_or_false)
use_last_modified(true_or_false)
end
|
[
"def not_modified_response(env, etag); end",
"def get(path, headers = {})\n cached_response = fetch_cached_response(path)\n if cached_response\n headers = headers.merge 'If-None-Match' => cached_response['Etag']\n end\n response = super(path, headers)\n case response\n when Net::HTTPNotModified then cached_response\n else\n cache_response(path, response)\n response\n end\n end",
"def fresh_when(options)\n options.assert_valid_keys(:etag, :last_modified, :public)\n\n response.etag = options[:etag] if options[:etag]\n response.last_modified = options[:last_modified] if options[:last_modified]\n \n if options[:public] \n cache_control = response.headers[\"Cache-Control\"].split(\",\").map {|k| k.strip }\n cache_control.delete(\"private\")\n cache_control.delete(\"no-cache\")\n cache_control << \"public\"\n response.headers[\"Cache-Control\"] = cache_control.join(', ')\n end\n\n if request.fresh?(response)\n head :not_modified\n end\n end",
"def fresh_when(options)\n options.assert_valid_keys(:etag, :last_modified, :public)\n\n response.etag = options[:etag] if options[:etag]\n response.last_modified = options[:last_modified] if options[:last_modified]\n\n if options[:public]\n response.cache_control[:public] = true\n end\n\n if request.fresh?(response)\n head :not_modified\n end\n end",
"def not_modified!\n self.status = 304\n self.body = []\n NOT_MODIFIED_OMIT_HEADERS.each { |name| headers.delete(name) }\n nil\n end",
"def enable_get\n add option: \"-get=true\"\n end",
"def etag_http_cache(action, variable)\n get action\n assert_headers('200 OK', 'private, max-age=0, must-revalidate', 'ETag', etag_for(variable))\n reset!\n\n @request.env['HTTP_IF_NONE_MATCH'] = etag_for(variable)\n get action\n assert_headers('304 Not Modified', 'private, max-age=0, must-revalidate', 'ETag', etag_for(variable))\n reset!\n\n @request.env['HTTP_IF_NONE_MATCH'] = 'INVALID'\n get action\n assert_headers('200 OK', 'private, max-age=0, must-revalidate', 'ETag', etag_for(variable))\n end",
"def not_modified?\n response.etag_matches?(original_request.if_none_match) ||\n response.last_modified_at?(original_request.if_modified_since)\n end",
"def not_modified_response(env, etag)\n [ 304, cache_headers(env, etag), [] ]\n end",
"def not_modified!\n body.close if body.respond_to?(:close)\n self.status = 304\n self.body = []\n NOT_MODIFIED_OMIT_HEADERS.each { |name| headers.delete(name) }\n nil\n end",
"def weak_etag?; end",
"def do_GET(req, res)\n super(req, res)\n prevent_caching(res)\n end",
"def default_etag_check\n fresh_when(1) if response.etag.nil?\n end",
"def disable_caching; end",
"def is_not_modified?\n @code == 304\n end",
"def not_modified?\n @payload[:status] == 304\n end",
"def send_not_modified\n self.send_header(:status => \"304 Not Modified\")\n end",
"def restrict_html_to_get\n head 406 unless request.get? or request.format.json?\n end",
"def render_not_modified_or(last_modified)\n response.headers['Last-Modified'] = last_modified.httpdate if last_modified\n\n if_modified_since = request.env['HTTP_IF_MODIFIED_SINCE']\n if if_modified_since && last_modified &&\n last_modified <= Time.httpdate(if_modified_since)\n # The representation has not changed since it was last requested.\n # Instead of processing the request normally, send a response\n # code of 304 (\"Not Modified\").\n render :nothing => true, :status => \"304 Not Modified\"\n else\n # The representation has changed since it was last requested.\n # Proceed with normal request processing.\n yield\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
returns true if a handler is set for the HTTP method in param it is an alternative to get_handler which would return a true value if there is a default handler
|
def has_handler?(method)
send(method) && true
end
|
[
"def has_handler_for?(handler_method)\n @jr_handlers ||= {}\n @jr_handlers.has_key?(handler_method)\n end",
"def get?\r\nHTTP_METHOD_LOOKUP[request_method] == :get\r\nend",
"def post?\r\nHTTP_METHOD_LOOKUP[request_method] == :post\r\nend",
"def can_handle?(req)\n true\n end",
"def has_handler?(handler_name)\n @handlers.key? handler_name\n end",
"def http_method_allowed?(verb=@base.method_value)\n @base.allowed_http_methods.include?(verb)\n end",
"def valid_http_method? method\n method.to_s.in? 'post', 'get'\n end",
"def passive? route\n %[GET POST].include? route.verb\n end",
"def no_handler?\n handlers_set.empty?\n end",
"def can_generate_rest?\n [:normal, :server].include?(@main_method.kind) &&\n http_bindings.first&.verb? &&\n http_bindings.first&.path?\n end",
"def match_method(method)\n super || (!method.is_a?(Array) && head? && method.to_s.upcase == 'GET')\n end",
"def request_is_safe?\n case method_value when :get, :head, :options then true end\n end",
"def implements?(http_method)\n http_methods.include?(http_method.upcase)\n end",
"def handled?\n handled\n end",
"def get?\n method == \"GET\"\n end",
"def method_missing(sym, *args)\n if multipart_form_method?(sym)\n return multipart_form_controller_action?(sym)\n else\n super\n end\n end",
"def default_handler\n @default_handler\n end",
"def is_get?\n @env[\"REQUEST_METHOD\"] == 'GET'\n end",
"def matches?(req)\n pattern.match(req.path) &&\n method(req).downcase.to_sym == http_method\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
sets a handler for the HTTP method in param to val
|
def set_handler(method, val)
send("#{method}=", val)
end
|
[
"def http_method(value = nil)\n if value\n @http_method = value\n else\n @http_method\n end\n end",
"def http_method(value = nil)\n if value\n config[:http_method] = value\n else\n config[:http_method]\n end\n end",
"def http_method= new_verb\n @http_method = new_verb.to_s.upcase\n end",
"def handle_http_method(http_method, extra_path)\n extra_path = CGI.unescape(extra_path)\n case http_method\n when \"GET\"\n self.on_get extra_path\n when \"POST\"\n self.on_post extra_path\n when \"PUT\"\n self.on_put extra_path\n when \"DELETE\"\n self.on_delete extra_path\n end #case \n end",
"def handler_method; end",
"def set_method(method)\n\n # capitalize and convert to a symbol if not already a symbol\n method.upcase! if !method.kind_of? Symbol\n\n # check if the http method is valid\n valid_methods = [:GET, :PUT, :POST, :PATCH, :DELETE]\n if !valid_methods.include?(method.to_sym)\n raise ArgumentError, \"HTTP method must be valid (#{valid_methods.join(', ')})\"\n end\n\n self.http_method = method\n end",
"def method=(method)\n method = method.downcase.to_sym\n\n @method = method if [:get, :post, :head, :options, :put, :delete].include? method\n true\n end",
"def method=(value)\n @method = value\n end",
"def set_handler(symbol, method)\n @handler[symbol] = method\n end",
"def set_Method(value)\n set_input(\"Method\", value)\n end",
"def method_missing(method, *args)\n @uri.path = \"/#{args[0]}.#{@format.extension}\" # Should be the form of /resources\n if verb = method.to_s.match(/^http_(get|post|put|delete|head)$/)\n run_verb(verb.to_s.split(\"_\").last, args[1])\n end\n end",
"def http_method\n\t\t\tparams['_method'] || request_method\n\t\tend",
"def http_method\n\t\t\t\treturn @http_method if defined?(@http_method)\n\n\t\t\t\tmethod_from_method =\n\t\t\t\t\tbegin\n\t\t\t\t\t\tparams['_method']\n\t\t\t\t\trescue ArgumentError => e\n\t\t\t\t\t\t## https://github.com/rack/rack/issues/337#issuecomment-48555831\n\t\t\t\t\t\traise unless e.message.include?('invalid %-encoding')\n\t\t\t\t\tend\n\n\t\t\t\t@http_method = (method_from_method || request_method).upcase.to_sym\n\t\t\tend",
"def http_method\n @http_method ||= @options[:http_method] || :post\n end",
"def method=(meth)\n meth = meth.to_s\n self[:method] = (meth.empty? ? nil : meth)\n end",
"def custom_method(name, http_verb:, http_path: T.unsafe(nil)); end",
"def handler(&block)\n set_request_handler(block)\n end",
"def method\n @method ||= original_request.request_method.downcase.to_sym\n end",
"def http_method\n @parser.http_method\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Returns an array of pairs of array with [HTTPverbs, handler] an extra item is the :default handler NB: could also be a hash, but I'm fine with it
|
def handlers
HTTP_METHODS.map do |sym|
[sym, send(sym)]
end
end
|
[
"def http_methods(_path)\n []\n end",
"def match(verb, url)\n parts = url.gsub(/^\\/+/,'').split('/')\n\n @routes.each do |route|\n if route.verb == verb\n result = matches?(route, parts)\n if result\n return [ route.handler, result ]\n end\n end\n end\n\n return [ nil, nil ]\n end",
"def handlers\n @handlers ||= {\n 'application/json' => -> (response) { handle_json_response(response) },\n 'application/octet-stream' => -> (response) { handle_octet_stream_response(response) }\n }\n end",
"def handlers\n @handlers ||= Hash.new do |hash, key|\n if key.nil?\n raise NoHandler\n else\n raise InvalidHandler, key\n end\n end\n end",
"def action_handlers\n @action_handlers\n end",
"def default_handlers\n if (handlers_key = config[:handlers])\n Handlers[handlers_key]\n .map { |key, value| [:\"#{key}_handler\", value] }.to_h\n else\n EMPTY_HASH\n end\n end",
"def action_handlers\n _action_handlers = []\n self.each do |_action_handler|\n _action_handlers.add(_action_handler.action_handlers)\n end\n _action_handlers \n end",
"def actions_for verb\n @commands[verb] || []\n end",
"def all_handlers\n Handlers::HANDLERS\n end",
"def list_handlers\n robot.handlers.flat_map do |handler|\n handler.namespace if handler.respond_to?(:routes)\n end.compact.uniq.sort\n end",
"def http_methods(uri)\n begin\n node = @server.tree.node_for_path(uri)\n rescue Dav::Exception::NotFound\n return []\n end\n\n return ['POST'] if node.is_a?(IOutbox)\n\n []\n end",
"def verbs_for(route)\n route_verb = route.verb.to_s\n\n %w[GET POST PUT PATCH DELETE].select do |verb|\n route_verb.include?(verb)\n end\n end",
"def home(url, verbs = %w[get post], &block)\n verbs.each do |verb|\n send(verb, url, &block)\n end\nend",
"def allowed_methods( path_components )\n\t\ttype = path_components.empty? ? :collection : :single\n\t\tallowed = HTTP_METHOD_MAPPING[ type ].keys.\n\t\t\tfind_all {|msym| self.respond_to?(msym) }.\n\t\t\tinject([]) {|ary,msym| ary << HTTP_METHOD_MAPPING[type][msym]; ary }\n\n\t\tallowed += ['HEAD'] if allowed.include?( 'GET' )\n\t\treturn allowed.uniq.sort\n\tend",
"def extract_handler_method(args)\n handler = nil\n\n if method_defined?(args.last)\n handler = args.last\n args.delete_at(-1)\n\n else\n handler = :handle\n end\n\n [handler, args]\n end",
"def define_handlers\n namespace = api_config[:namespace]\n api_config[:operations].each_pair do |method,operation|\n input_handlers[method] = InputHandler.new(namespace, operation)\n output_handlers[method] = OutputHandler.new(operation)\n end\n end",
"def http_methods\n reload if executed_requests['GET'].nil? || executed_requests['GET']['headers'].nil? || executed_requests['GET']['headers'].empty?\n (executed_requests['GET']['headers']['Allow'] || \"GET\").split(/,\\s*/)\n end",
"def handlers_idx; end",
"def action_responses\n self.class.action_responses[action_name] || []\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
returns an array like handlers but restricted to the not nil values
|
def handlers_set
handlers.reject{|sym, handler| handler.nil?}
end
|
[
"def handlers\n @handlers ||= Hash.new do |hash, key|\n if key.nil?\n raise NoHandler\n else\n raise InvalidHandler, key\n end\n end\n end",
"def violation_handlers\n if defined_violation_handlers.empty?\n Array(default_violation_handler)\n else\n defined_violation_handlers\n end\n end",
"def derive_handlers(handler_list, depth=0)\n handler_list.compact.map { |handler_name|\n case\n when @settings.handler_exists?(handler_name)\n handler = @settings[:handlers][handler_name].merge(:name => handler_name)\n expand_handler_sets(handler, depth)\n when @extensions.handler_exists?(handler_name)\n @extensions[:handlers][handler_name]\n else\n @logger.error(\"unknown handler\", :handler_name => handler_name)\n nil\n end\n }.flatten.compact.uniq\n end",
"def get_event_handlers(handlers)\n # convert the input into an array\n if handlers.nil?\n puts 'WARNING: There were no handlers passed to the EventHandlerRegister#get_event_handlers. Please make sure your params are correct'\n handlers = []\n else\n handlers = [handlers] unless handlers.is_a? Array\n end\n require_custom_handlers if custom_handler?(handlers)\n _handlers = load_handlers handlers\n check_stdout _handlers\n _handlers\n end",
"def all_handlers\n Handlers::HANDLERS\n end",
"def list_handlers\n robot.handlers.flat_map do |handler|\n handler.namespace if handler.respond_to?(:routes)\n end.compact.uniq.sort\n end",
"def handlers_for_exception(exception)\n prioritized_handlers.select { |handler| handler === exception }\n end",
"def no_handler?\n handlers_set.empty?\n end",
"def default_handlers\n if (handlers_key = config[:handlers])\n Handlers[handlers_key]\n .map { |key, value| [:\"#{key}_handler\", value] }.to_h\n else\n EMPTY_HASH\n end\n end",
"def matching_handlers(query)\n name = query.downcase.strip\n\n return [] unless list_handlers.include?(name)\n\n robot.handlers.select { |handler| handler.namespace == name }\n end",
"def defined_violation_handlers\n @defined_violation_handlers ||= []\n end",
"def prioritized_handlers\n Enumerator.new do |yielder|\n priorities = (registered_handlers.keys | block_handlers.keys).sort!\n priorities.uniq!\n priorities.each do |priority|\n if registered_handlers.key?(priority)\n collected_handlers = registered_handlers[priority].to_a\n end\n if block_handlers.key?(priority)\n temp_handlers = block_handlers[priority].to_a\n if collected_handlers\n collected_handlers.concat(temp_handlers)\n else\n collected_handlers = temp_handlers\n end\n end\n collected_handlers.sort_by!(&:name)\n collected_handlers.uniq!\n collected_handlers.each { |handler| yielder << handler }\n end\n end\n end",
"def all_handlers_for(event_type, source_name)\n handlers_for(event_type, source_name) + handlers_for(event_type, nil)\n end",
"def method_missing method, *args, &block\n method, *args = special_handler(method, *args, &block) \\\n if self.private_methods.include?(:special_handler)\n [method, args].flatten\n end",
"def names\n handler.names\n end",
"def create_handlers\n @handlers ||= (self.class.create_handlers || {})\n end",
"def validate_handlers\n handlers.each { |handler| validate(:handler, handler, children_for(handler)) }\n end",
"def supported_handlers\n @@supported_handlers\n end",
"def action_handlers\n @action_handlers\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Returns true if there is no handler set for this dispatcher
|
def no_handler?
handlers_set.empty?
end
|
[
"def handlers?\n registered_handlers.length > 0\n end",
"def has_event_handler?\n !!collected[:event_handler]\n end",
"def has_handler?(handler_name)\n @handlers.key? handler_name\n end",
"def has_handler?(method)\n send(method) && true\n end",
"def handled?\n handled\n end",
"def root?\n self.kind_of?(RootHandler)\n end",
"def custom_handler?(handlers)\n handlers.each do |handler|\n if !DATSauce::EventHandler.event_handlers.include?(handler.to_sym)\n return true\n end\n end\n false\n end",
"def handler_enabled?\n !datastore['DisablePayloadHandler']\n end",
"def has_handler_for?(handler_method)\n @jr_handlers ||= {}\n @jr_handlers.has_key?(handler_method)\n end",
"def has_ui_event?\n !ui_event_queue.empty?\n end",
"def handler?(handler_id)\n synchronize { @handlers.key? handler_id }\n end",
"def __has_element_handler?(key)\n key = key.to_s.gsub(/[=!]$/, '')\n __elements_handlers.storage.find { |h| h.method_name == key }\n end",
"def empty?\n\t\treturn self.sockets.empty? && self.timers.empty?\n\tend",
"def empty?\n\t\t@handles.empty?\n\tend",
"def at_exit_handler_installed?\n @exit_handler_added ||= false\n end",
"def active?\n @conn.handler == self\n end",
"def has_queued_events?\n !@propagation.empty?\n end",
"def empty?\n @hooks_map.empty?\n end",
"def handler_bind?\n payload_instance && payload_instance.connection_type == 'bind'\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Checks that the model given as a string exists
|
def model_exists?
if model.present?
begin
model.constantize
rescue NameError
errors.add :model, "#{model} is not a valid model"
end
end
end
|
[
"def model_exists? name\n name = ruby_to_oo name\n reflection_model.where(:model => name).exists?\n end",
"def model_exists?(model_type, name)\n get_model(model_type, name) != nil\n end",
"def model_exists?(model)\n model.exists? # Throws invalid ActiveRecord::StatementInvalid if doesn't exist\n rescue ActiveRecord::StatementInvalid => e\n return false if e.message.match?(/Table .* doesn't exist/i)\n\n raise e\n end",
"def model_exists?\n File.exist?(File.join(destination_root, model_path))\n end",
"def model_name_not_blank\n if model_name.nil? || model_name.strip.empty?\n raise ArgumentError.new('Model name nil or empty.')\n end\n end",
"def valid_model?(database_model)\n database_model == @mapping.database_model.to_s\n end",
"def model_name_not_blank\n if argument_blank?(model_name)\n raise ArgumentError.new('Model name is missing')\n end\n end",
"def models_in? name\n name = ruby_to_oo name\n \n reflection_model.where(\"model like #{name}.%\").exists?\n end",
"def model_name_already_exists?\n @camel_name = name.to_s.underscore.camelize\n\n @project_name = \"\"\n @project_name = fetch_project_name\n\n return false unless already_exists?(@camel_name, @project_name)\n true\n end",
"def model_name_valid\n unless model_name.match(/^[a-z]/i)\n raise ArgumentError.new(\"Invalid model name. The model name shouldn't begin with a number.\")\n end\n end",
"def validate_phone_model_exists()\n if ! PhoneModel.exists?( :id => self.phone_model_id )\n errors.add( :phone_model_id, \"There is no phone model with the given ID #{self.phone_model_id}.\" )\n end\n end",
"def model_defined_in_app?(model)\n model.definition_location.any? do |location|\n break if location.label == \"require\"\n\n app_file?(location.absolute_path)\n end\n end",
"def include?(model)\n exists?(model.id)\n end",
"def check_model_column\n @model_name.camelcase.constantize\n raise \"There is no column #{@column_name} in model #{@model_name}\" unless @model_name.camelcase.constantize.columns.map(&:name).include?(@column_name)\n end",
"def entry_exists?(model, name)\n\t\tbool = false\n\t\tmodel.each do |hash|\n\t\t\thash.each do |k, v|\n\t\t\t\tif v == name\n\t\t\t\t\tbool = true\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\t\tbool\n\tend",
"def explicit_fullfilled_model?\n !!explicit_fullfilled_model\n end",
"def check_model_availability!(resource_name)\n unless resource_name.singularize != resource_name && Object.const_defined?(resource_name.singularize.classify)\n flash[:warning] = \"Resource `#{resource_name}` not found.\"\n redirect '/dashboard/resources'\n end\n end",
"def model?\n @model\n end",
"def check(_model, _token)\n true\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Instantiates the reporter to generate the HamlLint report
|
def reporter
@reporter ||= ::HamlLint::Reporter::HashReporter.new(
::HamlLint::Logger.new(StringIO.new)
)
end
|
[
"def initialize\n @problems = []\n @default_info = {:check => 'unknown', :linenumber => 0, :column => 0}\n\n PuppetLint.configuration.checks.each do |check|\n method = PuppetLint.configuration.check_method[check]\n self.class.send(:define_method, \"lint_check_#{check}\", &method)\n end\n\n PuppetLint.configuration.helpers.each do |helper|\n method = PuppetLint.configuration.helper_method[helper]\n self.class.send(:define_method, helper, &method)\n end\n end",
"def initialize(report)\n @report = report\n\n init\n end",
"def initialize(config)\n @config = config\n @lints = []\n end",
"def xunit_report\n document = REXML::Document.new\n declaration = REXML::XMLDecl.new\n declaration.encoding = 'UTF-8'\n declaration.version = '1.0'\n document.add declaration\n testsuites = document.add_element 'testsuites'\n tests = 0\n failures = 0\n task_id = 0\n\n tasks_report_structure.each do |task|\n testsuite = testsuites.add_element 'testsuite'\n testsuite.add_attribute 'id', task_id\n task_id += 1\n testsuite.add_attribute 'name', task[:description]\n testsuite.add_attribute 'package', task[:name]\n testsuite.add_attribute 'tests', task[:example_count]\n testsuite.add_attribute 'failures', task[:failure_count]\n testsuite.add_attribute 'skipped', task[:pending_count]\n testsuite.add_attribute 'time', task[:duration]\n testsuite.add_attribute 'status', task[:status]\n\n properties = testsuite.add_element 'properties'\n property_task = properties.add_element 'property'\n property_task.add_attribute 'name', 'task'\n property_task.add_attribute 'value', task[:task]\n property_spec = properties.add_element 'property'\n property_spec.add_attribute 'name', 'spec'\n property_spec.add_attribute 'value', task[:spec]\n property_hiera = properties.add_element 'property'\n property_hiera.add_attribute 'name', 'hiera'\n property_hiera.add_attribute 'value', task[:hiera]\n property_facts = properties.add_element 'property'\n property_facts.add_attribute 'name', 'facts'\n property_facts.add_attribute 'value', task[:facts]\n\n if task[:examples].is_a? Array\n task[:examples].each do |example|\n tests += 1\n testcase = testsuite.add_element 'testcase'\n testcase.add_attribute 'name', example[:description]\n testcase.add_attribute 'classname', \"#{example[:file_path]}:#{example[:line_number]}\"\n testcase.add_attribute 'time', example[:run_time]\n testcase.add_attribute 'status', example[:status]\n if example[:status] == 'pending'\n skipped = testcase.add_element 'skipped'\n skipped.add_attribute 'message', example[:pending_message] if example[:pending_message]\n end\n if example[:status] == 'failed'\n failures += 1\n end\n if example[:exception_message] and example[:exception_class]\n failure = testcase.add_element 'failure'\n failure.add_attribute 'message', example[:exception_message]\n failure.add_attribute 'type', example[:exception_class]\n end\n end\n end\n end\n testsuites.add_attribute 'tests', tests\n testsuites.add_attribute 'failures', failures\n document.to_s\n end",
"def run\n print_line\n print_status 'Creating HTML report...'\n\n plugins = format_plugin_results( auditstore.plugins )\n @base_path = File.dirname( options['tpl'] ) + '/' +\n File.basename( options['tpl'], '.erb' ) + '/'\n\n title_url = auditstore.options['url']\n begin\n title_url = uri_parse( auditstore.options['url'] ).host\n rescue\n end\n\n params = prepare_data.merge(\n title_url: escapeHTML( title_url ),\n audit_store: auditstore,\n plugins: plugins,\n base_path: @base_path\n )\n\n File.open( outfile, 'w' ) { |f| f.write( erb( options['tpl'], params ) ) }\n\n print_status \"Saved in '#{outfile}'.\"\n end",
"def load_report(report)\n report.configuration_source(config_source_1)\n report.configuration_source(config_source_2)\n report.configuration_directive(config_directive, config_directive_value)\n report.scan_passed(scanner_1, scanner_1_passed)\n report.scan_info(scanner_1, scanner_1_info_type_1, scanner_1_info_type_1_message_1)\n report.scan_info(scanner_1, scanner_1_info_type_1, scanner_1_info_type_1_message_2)\n report.scan_info(scanner_1, scanner_1_info_type_2, scanner_1_info_type_2_message_1)\n report.scan_stdout(scanner_1, scanner_1_stdout)\n report.scan_info(scanner_2, scanner_1_info_type_1, scanner_1_info_type_1_message_1)\n report.scan_stderr(scanner_2, scanner_2_stderr)\n report.salus_info(salus_info_type_1, salus_info_type_1_message_1)\n report.salus_info(salus_info_type_1, salus_info_type_1_message_2)\n report.salus_info(salus_info_type_2, salus_info_type_2_message_1)\n report.salus_error(salus_error_1_class, salus_error_1_data)\n report.salus_error(salus_error_2_class, salus_error_2_data)\n report.salus_runtime_error(salus_runtime_error_data)\n report\n end",
"def reporter= definition\n raise ConfigurationException,\"required key :class is missing from #{definition}\" unless definition[:class]\n @reporters[definition[:class]]=definition\n end",
"def initialize(args = {})\n super({})\n\n defaults = {\n :title => 'Test Results',\n :erb_template => \"#{File.dirname(__FILE__)}/../templates/index.html.erb\",\n :reports_dir => ENV['MINITEST_HTML_REPORTS_DIR'] || 'test/html_reports',\n :mode => :safe,\n :output_filename => ENV['MINITEST_HTML_REPORTS_FILENAME'] || 'index.html',\n }\n\n settings = defaults.merge(args)\n\n @mode = settings[:mode]\n @title = settings[:title]\n @erb_template = settings[:erb_template]\n @output_filename = settings[:output_filename]\n reports_dir = settings[:reports_dir]\n\n @reports_path = File.absolute_path(reports_dir)\n end",
"def initialize\n namespace :lint do\n desc \"check number of lines of code changed. No long PRs\"\n task \"diff_length\" do\n log_diff_length_and_exit\n end\n\n # this will produce 'lint:rubocop','lint:rubocop:auto_correct' tasks\n RuboCop::RakeTask.new do |task|\n task.options = [\"--debug\"]\n end\n end\n end",
"def reporter(name); end",
"def default_reporter(options)\n Licensed::Reporters::YamlReporter.new\n end",
"def yml_report\n report_as :yml\n end",
"def report\n validate\n parse\n filter_issues\n comment\n end",
"def initialize(source, options = {})\n super(source, options)\n @warnings_encountered = {}\n @trackers = source.file_format.report_trackers\n setup\n end",
"def initialize\n @yaml_path = File.dirname(__FILE__) + \"/../history/history.yml\"\n @yaml = YAML::load_file @yaml_path\n @formatter = Yolo::Formatters::ProgressFormatter.new\n end",
"def reporter\n @reporter ||= decorate_reporter(reporter_class.new($stdout, reporter_options))\n end",
"def report\n @report = DataValidator.new.report\n end",
"def html_report(test_report, extra_report_header)\n\n html_report = <<-EOS\n <html>\n EOS\n\n html_style = <<-EOS\n <style>\n body {background-color: #FFFFF0; font-family: \"VAG Round\" ; color : #000080;font-weight:normal;word-break: break-all;}\n #specs-table{font-family:Arial,Helvetica,Sans-serif;font-size:12px;text-align:left;border-collapse:collapse;border-top: 2px solid #6678B1;border-bottom: 2px solid #6678B1;margin:20px;}\n #specs-table th{font-size:13px;font-weight:normal;background:#b9c9fe;border-top:4px solid #aabcfe;border-bottom:1px solid #fff;color:#039;padding:8px;}\n #specs-table td{background:#e8edff;border-top:1px solid #fff;border-bottom:1px solid #fff;color:#039;padding:8px;}\n #specifications{font-family:Arial,Helvetica,Sans-serif;font-size:13px;width:480px;background:#fff;border-collapse:collapse;text-align:left;margin:20px;border:1px solid #ccc;}\n #specifications th{font-size:14px;font-weight:bold;color:#039;border-bottom:2px solid #6678b1;padding:10px 8px;}\n #specifications td{border-bottom:1px solid #ccc;color:#009;padding:6px 8px;}\n #statuspass{font-family:Arial,Helvetica,Sans-serif;font-size:12px;color:green;font-weight:bold;}\n #statusfail{font-family:Arial,Helvetica,Sans-serif;font-size:12px;color:red;font-weight:bold;}\n #tcs{font-family:Arial,Helvetica,Sans-serif;font-size:13px;background:#fff;width:900px;border-collapse:collapse;text-align:left;margin:20px;border:1px solid #ccc;}\n #tcs th{font-size:14px;font-weight:bold;color:#039;border-bottom:2px solid #6678b1;padding:10px 8px;}\n #tcs td{border-bottom:1px solid #ccc;color:#009;padding:6px 8px;}\n #checkpoint{font-family:Arial,Helvetica,Sans-serif;font-size:13px;background:#fff;width:900px;border-collapse:collapse;text-align:left;margin:20px;border:1px solid #ccc;}\n #checkpoint td{border-bottom:1px solid #ccc;color:#009;padding:6px 8px;}\n #container{margin: 0 30px;background: #fff;border:1px solid #ccc;}\n #header{background: #e8edff;padding: 2px;border-bottom: 2px solid #6678b1;}\n #steps{background: #e8edff;font-weight: bold;}\n #dp{font-weight: bold;}\n #validations{font-weight: bold;}\n #content{clear: left;padding: 10px;}\n #footer{background: #e8edff;text-align: right;padding: 10px;}\n </style>\n EOS\n\n title = <<-EOS\n <head><title>#{test_report[:test_suite_title]}</title></head>\n\n <body>\n EOS\n\n html_report += html_style + title\n\n report_header = <<-EOS\n <center>\n\n <a name=#{replace_space_by_dash(test_report[:test_suite_title])}></a>\n <table id=\"specifications\">\n <th align=\"center\">#{test_report[:test_suite_title]}</th>\n <tr><td>Test specification: #{test_report[:test_spec_path]}</td></tr>\n <tr><td>Kadu server: #{test_report[:kadu_server]}</td></tr>\n EOS\n @test_report[:test_cases].each do |tc_id, tc|\n if tc.has_key?(:server_info)\n report_header += <<-EOS\n <tr><td>Kadu branch: #{tc[:server_info][:kadu_branch]}</td></tr>\n <tr><td>Kadu version: #{tc[:server_info][:kadu_version]}</td></tr>\n <tr><td>Kadu index: #{tc[:server_info][:kadu_index]}</td></tr>\n EOS\n break\n end\n end\n if !extra_report_header.nil?\n details = extra_report_header.split(\"\\n\")\n details.each do |line|\n report_header += <<-EOS\n <tr><td>#{line}</td></tr>\n EOS\n end\n end\n test_suite_time_in_secs = Time.parse(test_report[:test_suite_completed_time].to_s) - Time.parse(test_report[:test_suite_start_time].to_s)\n\n report_header += <<-EOS\n <tr><td>Test suite started On: #{test_report[:test_suite_start_time]}</td></tr>\n <tr><td>Duration: #{test_suite_time_in_secs} secs</td></tr>\n <tr><td>Test suite status: <font id=#{status(test_report[:test_suite_result_status])}>#{test_report[:test_suite_result_status]}</font></td></tr>\n </table>\n <br>\n EOS\n report_tc_summary = <<-EOS\n <table id=\"tcs\">\n <tr>\n <th >Test Case</th>\n <th >Test Case Status</th>\n </tr>\n EOS\n\n test_report[:test_cases].each do |tc_id, tc|\n report_tc_summary += <<-EOS\n <tr>\n <td><a href=\"##{tc_id}\">#{tc_id}: #{tc[:title]}</a></td><td><font id=#{status(tc[:test_case_result_status])}>#{tc[:test_case_result_status]}</font></td>\n </tr>\n EOS\n end\n\n report_tc_summary += <<-EOS\n </table>\n <br>\n <h4>#{test_report[:test_suite_description]}</h4>\n <br>\n </center>\n EOS\n test_cases = \"\"\n test_report[:test_cases].each do |tc_id, tc|\n test_case = <<-EOS\n <div id=\"container\" style=\"word-break: break-all;width:100%;\">\n <div id=\"header\">\n <h4>\n <p><a name=\"#{tc_id}\">#{tc_id}: #{tc[:title]}</a></p>\n <p>#{tc[:description]}</p>\n <p>Test result status: <font id=#{status(tc[:test_case_result_status])}>#{tc[:test_case_result_status]}</font></p>\n </h4>\n </div>\n <div id=\"content\">\n <h4>\n Steps to reproduce\n </h4>\n EOS\n\n tc[:test_steps].each do |step_id, step|\n test_steps = <<-EOS\n <p id=\"steps\">#{step_id}</p>\n EOS\n\n if step.has_key?(:action) || step.has_key?(:mt_url)\n test_steps += <<-EOS\n <p style=\"word-break: break-all;\" width=900px >URL: #{step[:action]}</p>\n EOS\n end\n\n if step.has_key?(:dynamic_params)\n test_steps += <<-EOS\n <p id=\"dp\">Dynamic Parameters</p>\n EOS\n\n exclusion_term = \"set @kadu_response\"\n step[:dynamic_params].each do |parameter, expression|\n expression = exclusion_term if expression.to_s.include?(exclusion_term)\n test_steps += <<-EOS\n <p>#{parameter} = #{expression}</p>\n EOS\n end\n end\n\n if step.has_key?(:validation_steps)\n\n test_steps += <<-EOS\n <p id=\"validations\">\n Validations\n </p>\n <table id=\"checkpoint\">\n EOS\n\n step[:validation_steps].each do |vstep, result|\n steps = <<-EOS\n <tr>\n <td colspan=\"2\" width=\"90%\">\n <p>#{vstep}</p>\n <p>#{result[\"test_result_message\"]}</p>\n </td>\n <td width=\"10%\" rowspan=\"1\" align=\"center\"><font id=#{status(result[\"test_result_status\"])}>#{result[\"test_result_status\"]}</font></td>\n </tr>\n EOS\n test_steps += steps\n end\n\n test_steps += <<-EOS\n </table>\n EOS\n\n end\n test_case += test_steps\n end\n test_cases += test_case\n test_cases += <<-EOS\n </div>\n <div id=\"footer\">\n <a href=\"##{replace_space_by_dash(test_report[:test_suite_title])}\">back to test suite</a> <a href=\"#summary\">back to summary</a>\n\t </div>\n </div>\n <br>\n EOS\n end\n\n report_footer = <<-EOS\n <br>\n <hr>\n <br>\n </body>\n </html>\n EOS\n\n html_report += report_header + report_tc_summary + test_cases + report_footer\n\n html_report\n end",
"def init_report\n @info = Scraper::ImeiReport.new\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
GET /bbs GET /bbs.json
|
def index
@bbs = Bb.all
end
|
[
"def show\n @bbs_body = BbsBody.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @bbs_body }\n end\n end",
"def show\n @bb = Bb.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @bb }\n end\n end",
"def new\n @bbs_body = BbsBody.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @bbs_body }\n end\n end",
"def show\n @bbs_comment = BbsComment.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @bbs_comment }\n end\n end",
"def index\n @baosong_bs = BaosongB.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @baosong_bs }\n end\n end",
"def show\n @bbs_topic = BbsTopic.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @bbs_topic }\n end\n end",
"def show\n @bbs_group = BbsGroup.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @bbs_group }\n end\n end",
"def show\n @testbb = Testbb.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @testbb }\n end\n end",
"def get_blasts()\n exec_get(\"#{@base_path}/api/v2/customers/blasts.json?api_key=#{@api_key}\")\n end",
"def show\n @weibo = Weibo.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @weibo }\n end\n end",
"def show\n @blast = Blast.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @blast }\n end\n end",
"def show\n @lbb = Lbb.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @lbb }\n end\n end",
"def index\n\t\tboats = Boat.all\n \trender json: boats, status: 200\n\tend",
"def new\n @bbs_topic = BbsTopic.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @bbs_topic }\n end\n end",
"def new\n @bb = Bb.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @bb }\n end\n end",
"def show\n @bread = Bread.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @bread }\n end\n end",
"def show\n @bloque = Bloque.find(params[:id])\n\n render json: @bloque\n end",
"def show\n @blab = Blab.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @blab }\n end\n end",
"def show\n @boook = Boook.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @boook }\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
POST /bbs POST /bbs.json
|
def create
@bb = Bb.new(bb_params)
respond_to do |format|
if @bb.save
format.html { redirect_to @bb, notice: 'Bb was successfully created.' }
format.json { render :show, status: :created, location: @bb }
else
format.html { render :new }
format.json { render json: @bb.errors, status: :unprocessable_entity }
end
end
end
|
[
"def new\n @bbs_body = BbsBody.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @bbs_body }\n end\n end",
"def create\n @bb = Bb.new(params[:bb])\n\n respond_to do |format|\n if @bb.save\n format.html { redirect_to @bb, notice: 'Bb was successfully created.' }\n format.json { render json: @bb, status: :created, location: @bb }\n else\n format.html { render action: \"new\" }\n format.json { render json: @bb.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @bbs_comment = BbsComment.new(params[:bbs_comment])\n\n respond_to do |format|\n if @bbs_comment.save\n format.html { redirect_to @bbs_comment, notice: 'Bbs comment was successfully created.' }\n format.json { render json: @bbs_comment, status: :created, location: @bbs_comment }\n else\n format.html { render action: \"new\" }\n format.json { render json: @bbs_comment.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @testbb = Testbb.new(params[:testbb])\n\n respond_to do |format|\n if @testbb.save\n format.html { redirect_to @testbb, notice: 'Testbb was successfully created.' }\n format.json { render json: @testbb, status: :created, location: @testbb }\n else\n format.html { render action: \"new\" }\n format.json { render json: @testbb.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @bbs_thread = BbsThread.new(bbs_thread_params)\n\n respond_to do |format|\n if @bbs_thread.save\n write_bbs_session\n\n format.html { redirect_to @bbs_thread, notice: t('notice.thread.create') }\n format.json { render :show, status: :created, location: @bbs_thread }\n else\n format.html { render :new }\n format.json { render json: @bbs_thread.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @kkb = Kkb.new(kkb_params)\n #TODO\n @kkb.kkb_type = :bbs\n @kkb.posted_by_id = current_user.id\n\n respond_to do |format|\n if @kkb.save\n format.html { redirect_to kkbs_path, notice: 'Kkb was successfully created.' }\n format.json { head :created }\n else\n format.html { render :new }\n format.json { render json: @kkb.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @bloque = Bloque.new(params[:bloque])\n\n if @bloque.save\n render json: @bloque, status: :created, location: @bloque\n else\n render json: @bloque.errors, status: :unprocessable_entity\n end\n end",
"def create\n @bs_blog = BsBlog.new(bs_blog_params)\n respond_to do |format|\n if @bs_blog.save\n format.html { redirect_to @bs_blog, notice: 'Bs blog was successfully created.' }\n format.json { render :show, status: :created, location: @bs_blog }\n else\n format.html { render :new }\n format.json { render json: @bs_blog.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @bbhk = Bbhk.new(bbhk_params)\n\n respond_to do |format|\n if @bbhk.save\n format.html { redirect_to @bbhk, notice: 'Bbhk was successfully created.' }\n format.json { render action: 'show', status: :created, location: @bbhk }\n else\n format.html { render action: 'new' }\n format.json { render json: @bbhk.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @bbs_topic = BbsTopic.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @bbs_topic }\n end\n end",
"def create\n @bubot_blog = BubotBlog.new(bubot_blog_params)\n\n respond_to do |format|\n if @bubot_blog.save\n format.html { redirect_to @bubot_blog, notice: 'Bubot blog was successfully created.' }\n format.json { render :show, status: :created, location: @bubot_blog }\n else\n format.html { render :new }\n format.json { render json: @bubot_blog.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @bbs_comment = BbsComment.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @bbs_comment }\n end\n end",
"def create_new_board(name)\n headers = {\n \"X-CSRFToken\" => csrftoken(),\n \"X-Pinterest-Referrer\" => \"http://pinterest.com/\",\n \"X-Requested-With\" => \"XMLHttpRequest\",\n }\n body = {\n :name => name,\n :pass_category => true,\n } \n res = post \"https://pinterest.com/board/create/\", body, headers\n JSON.parse res.content\n end",
"def create\n @bounty = Bounty.new(params[:bounty])\n\n respond_to do |format|\n if @bounty.save\n format.html { redirect_to admin_bounties_path, notice: 'Bounty was successfully created.' }\n format.json { render json: @bounty, status: :created, location: @bounty }\n else\n format.html { render action: \"new\" }\n format.json { render json: @bounty.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @bnpb = Bnpb.new(bnpb_params)\n\n respond_to do |format|\n if @bnpb.save\n format.html { redirect_to @bnpb, notice: 'Bnpb was successfully created.' }\n format.json { render :show, status: :created, location: @bnpb }\n else\n format.html { render :new }\n format.json { render json: @bnpb.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @boook = Boook.new(params[:boook])\n\n respond_to do |format|\n if @boook.save\n format.html { redirect_to @boook, notice: 'Boook was successfully created.' }\n format.json { render json: @boook, status: :created, location: @boook }\n else\n format.html { render action: \"new\" }\n format.json { render json: @boook.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @boat = Boat.new(boat_params)\n\n if @boat.save\n render json: @boat, status: :created, location: @boat\n else\n render json: @boat.errors, status: :unprocessable_entity\n end\n end",
"def create\n\n uids = params[:uids]\n uids.split(\"\\n\").each{|row|\n row = row.strip\n next if row.blank?\n MWeiboMessage.create({user_id:current_user.id, uid:row,content:params[:content],status:0})\n }\n\n respond_to do |format|\n format.html { redirect_to [:client,:weibo_messages], notice: 'Post category was successfully created.' }\n format.json { render json: @weibo_message, status: :created, location: @weibo_message }\n end\n end",
"def create\n @bs_type = BsType.new(bs_type_params)\n\n respond_to do |format|\n if @bs_type.save\n format.html { redirect_to @bs_type, notice: 'Bs type was successfully created.' }\n format.json { render :show, status: :created, location: @bs_type }\n else\n format.html { render :new }\n format.json { render json: @bs_type.errors, status: :unprocessable_entity }\n end\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
PATCH/PUT /bbs/1 PATCH/PUT /bbs/1.json
|
def update
respond_to do |format|
if @bb.update(bb_params)
format.html { redirect_to @bb, notice: 'Bb was successfully updated.' }
format.json { render :show, status: :ok, location: @bb }
else
format.html { render :edit }
format.json { render json: @bb.errors, status: :unprocessable_entity }
end
end
end
|
[
"def update\n @bbs_body = BbsBody.find(params[:id])\n\n respond_to do |format|\n if @bbs_body.update_attributes(params[:bbs_body])\n format.html { redirect_to @bbs_body, notice: 'Bbs body was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @bbs_body.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @bb = Bb.find(params[:id])\n\n respond_to do |format|\n if @bb.update_attributes(params[:bb])\n format.html { redirect_to @bb, notice: 'Bb was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @bb.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @blab = Blab.find(params[:id])\n\n respond_to do |format|\n if @blab.update_attributes(params[:blab])\n format.html { redirect_to blabs_path, notice: 'good job blabber mouth.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @blab.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @bbs_comment = BbsComment.find(params[:id])\n\n respond_to do |format|\n if @bbs_comment.update_attributes(params[:bbs_comment])\n format.html { redirect_to @bbs_comment, notice: 'Bbs comment was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @bbs_comment.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n if @boat.update(boat_params)\n head :no_content\n else\n render json: @boat.errors, status: :unprocessable_entity\n end\n end",
"def update\n @boook = Boook.find(params[:id])\n\n respond_to do |format|\n if @boook.update_attributes(params[:boook])\n format.html { redirect_to @boook, notice: 'Boook was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @boook.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @bbs_topic = BbsTopic.find(params[:id])\n\n respond_to do |format|\n if @bbs_topic.update_attributes(params[:bbs_topic])\n format.html { redirect_to @bbs_topic, notice: 'Bbs topic was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @bbs_topic.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @bounty.update(bounty_params)\n format.html { redirect_to @bounty, notice: 'Bounty was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @bounty.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @bounty = Bounty.find(params[:id])\n\n respond_to do |format|\n if @bounty.update_attributes(params[:bounty])\n format.html { redirect_to @bounty, notice: 'Bounty was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @bounty.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @kb = Kb.find(params[:id])\n\n respond_to do |format|\n if @kb.update_attributes(params[:kb])\n format.html { redirect_to @kb, :notice => 'Kb was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @kb.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def patch *args\n make_request :patch, *args\n end",
"def update\n @bread = Bread.find(params[:id])\n\n respond_to do |format|\n if @bread.update_attributes(params[:bread])\n format.html { redirect_to @bread, notice: 'Bread was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @bread.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @bbhk.update(bbhk_params)\n format.html { redirect_to @bbhk, notice: 'Bbhk was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @bbhk.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @boc.update(boc_params)\n format.html { redirect_to @boc, notice: 'Boc was successfully updated.' }\n format.json { render :show, status: :ok, location: @boc }\n else\n format.html { render :edit }\n format.json { render json: @boc.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @bounty = Bounty.find(params[:id])\n\n respond_to do |format|\n if @bounty.update_attributes(params[:bounty])\n format.html { redirect_to admin_bounties_path, notice: 'Bounty was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @bounty.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @boook.update(boook_params)\n format.html { redirect_to @boook, notice: 'Boook was successfully updated.' }\n format.json { render :show, status: :ok, location: @boook }\n else\n format.html { render :edit }\n format.json { render json: @boook.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @bulb.update(bulb_params)\n format.html { redirect_to @bulb, notice: 'Bulb was successfully updated.' }\n format.json { render :show, status: :ok, location: @bulb }\n else\n format.html { render :edit }\n format.json { render json: @bulb.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @bof = Bof.find(params[:id])\n\n respond_to do |format|\n if @bof.update_attributes(params[:bof])\n format.html { redirect_to @bof, notice: 'Bof was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @bof.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @bbs_group = BbsGroup.find(params[:id])\n\n respond_to do |format|\n if @bbs_group.update_attributes(params[:bbs_group])\n format.html { redirect_to @bbs_group, notice: 'Bbs group was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @bbs_group.errors, status: :unprocessable_entity }\n end\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
DELETE /bbs/1 DELETE /bbs/1.json
|
def destroy
@bb.destroy
respond_to do |format|
format.html { redirect_to bbs_url, notice: 'Bb was successfully destroyed.' }
format.json { head :no_content }
end
end
|
[
"def destroy\n @bb = Bb.find(params[:id])\n @bb.destroy\n\n respond_to do |format|\n format.html { redirect_to bbs_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @testbb = Testbb.find(params[:id])\n @testbb.destroy\n\n respond_to do |format|\n format.html { redirect_to testbbs_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @bbs_body = BbsBody.find(params[:id])\n @bbs_body.destroy\n\n respond_to do |format|\n format.html { redirect_to bbs_bodies_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @bb = Bb.find(params[:id])\n @bb.destroy\n\n respond_to do |format|\n format.html { redirect_to(bbs_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @blab = Blab.find(params[:id])\n @blab.destroy\n\n respond_to do |format|\n format.html { redirect_to blabs_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @b = B.find(params[:id])\n @b.destroy\n\n respond_to do |format|\n format.html { redirect_to bs_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @bread = Bread.find(params[:id])\n @bread.destroy\n\n respond_to do |format|\n format.html { redirect_to bread_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @bbhk.destroy\n respond_to do |format|\n format.html { redirect_to bbhks_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @bbs_comment = BbsComment.find(params[:id])\n @bbs_comment.destroy\n\n respond_to do |format|\n format.html { redirect_to bbs_comments_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @bbs_topic = BbsTopic.find(params[:id])\n @bbs_topic.destroy\n\n respond_to do |format|\n format.html { redirect_to bbs_topics_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @jg_bsb = JgBsb.find(params[:id])\n @jg_bsb.destroy\n\n respond_to do |format|\n format.html { redirect_to jg_bsbs_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @bbs_group = BbsGroup.find(params[:id])\n @bbs_group.destroy\n\n respond_to do |format|\n format.html { redirect_to bbs_groups_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @bl = Bl.find(params[:id])\n @bl.destroy\n\n respond_to do |format|\n format.html { redirect_to bls_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @blast = Blast.find(params[:id])\n @blast.destroy\n\n respond_to do |format|\n format.html { redirect_to blasts_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @bread.destroy\n respond_to do |format|\n format.html { redirect_to breads_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @bulb.destroy\n respond_to do |format|\n format.html { redirect_to bulbs_url, notice: 'Bulb was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @bloque = Bloque.find(params[:id])\n @bloque.destroy\n\n head :no_content\n end",
"def destroy\n @boook = Boook.find(params[:id])\n @boook.destroy\n\n respond_to do |format|\n format.html { redirect_to boooks_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @bounty.destroy\n respond_to do |format|\n format.html { redirect_to bounties_url }\n format.json { head :no_content }\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
We use these mock accounts to ensure that the ones which are available in development are valid, to test omniauth actions and verify that a mock account is available for every supported omniauth provider. These must be identical to the ones in /config/environments/development.rb Remember to keep them in sync with development.rb
|
def mock_auth_accounts
OmniAuth.config.mock_auth[:facebook] =
OmniAuth::AuthHash.new(
provider: 'facebook',
uid: 'facebook-test-uid-1',
info: {
name: 'facebook user',
email: 'user-facebook@example.com',
username: 'user_facebook'
},
credentials: {
token: 'fb_mock_token',
secret: 'fb_mock_secret'
}
)
OmniAuth.config.mock_auth[:google] =
OmniAuth::AuthHash.new(
provider: 'google',
uid: 'google-test-uid-1',
info: {
name: 'google user',
email: 'user-google@example.com',
username: 'user_google'
},
credentials: {
token: 'google_mock_token',
secret: 'google_mock_secret'
}
)
OmniAuth.config.mock_auth[:suse] =
OmniAuth::AuthHash.new(
provider: 'suse',
uid: 'suse-test-uid-1',
info: {
name: 'suse user',
email: 'user-suse@example.com',
username: 'user_suse'
},
credentials: {
token: 'suse_mock_token',
secret: 'suse_mock_secret'
}
)
OmniAuth.config.mock_auth[:github] =
OmniAuth::AuthHash.new(
provider: 'github',
uid: 'github-test-uid-1',
info: {
name: 'github user',
email: 'user-github@example.com',
username: 'user_github'
},
credentials: {
token: 'github_mock_token',
secret: 'github_mock_secret'
}
)
end
|
[
"def mock_login\n if Rails.env.development?\n a = Account.find_or_create_by(name: \"development\")\n user = User.find_or_create_by(username: \"luis.perichon\")\n user.current_account_id = a.id\n user.save\n\n sign_in(user)\n end\n end",
"def mock_auth_hash(merchant)\n return {\n provider: merchant.provider,\n uid: merchant.uid,\n username: merchant.username,\n info: {\n email: merchant.email,\n name: merchant.username,\n nickname: merchant.username,\n },\n }\n end",
"def mock_auth; end",
"def mock_twitter_hash\n\t\tOmniAuth.config.mock_auth[:twitter] = { provider: 'twitter',\n\t\t uid: '54321',\n\t\t info: {\n\t\t\t\t nickname: 'NatashaTheRobot',\n\t\t\t\t first_name: 'Natasha',\n\t\t\t\t last_name: 'RobotTester',\n\t\t\t\t email: 'hi@natashatherobot.com'\n\t\t },\n\t\t}\n\tend",
"def mock_omniauth(provider, test_mode=true)\n\n # call method with mock values and attributes for given provider\n params_provider = send(\"params_#{provider}\")\n\n # deactivate test_mode\n OmniAuth.config.test_mode = test_mode ? true : false\n\n #deliver omniauth mock credentials\n OmniAuth.config.add_mock(provider.to_sym, params_provider)\n end",
"def log_in(user, provider: \"google_oauth2\")\n OmniAuth.config.mock_auth[provider.to_sym] = OmniAuth::AuthHash.new({\n provider: provider.to_s,\n uid: \"123456789\",\n info: {\n name: user.full_name,\n email: user.email\n }\n })\n\n get omniauth_callback_path(provider:)\nend",
"def configured_providers\n Rails.configuration.providers.select do |provider|\n Rails.configuration.send(\"omniauth_#{provider}\")\n end\n end",
"def test_bank_profile\r\n\r\n @place_holder.login_and_assert_profile(PropertiesReader.get_bank_profile_url)\r\n\r\n end",
"def check_accounts\n\t\t( @participant.facebook_account != nil && @participant.facebook_account != \"\" ) ||\n\t\t( @participant.twitter_account != nil && @participant.twitter_account != \"\" ) ||\n\t\t( @participant.github_account != nil && @participant.github_account != \"\" ) ||\n\t\t( @participant.google_plus_account != nil && @participant.google_plus_account != \"\" )\n\tend",
"def mock_invalid_hash\n\t\tOmniAuth.config.mock_auth[:twitter] = :invalid_credentials\n\tend",
"def stub_provider_account(attributes = {})\n FactoryBot.build_stubbed(:provider_account, attributes)\n end",
"def provider_auth_hash(provider, omniauth)\n case provider\n when :facebook\n {\n provider: omniauth.provider,\n uid: omniauth.uid,\n username: omniauth.info.nickname,\n profile_url: omniauth.info.urls['Facebook'],\n token: (omniauth.credentials.token rescue nil),\n token_expires_at: (Time.at(omniauth.credentials.expires_at) rescue nil),\n secret: nil\n }\n when :twitter\n {\n provider: omniauth.provider,\n uid: omniauth.uid,\n username: omniauth.info.nickname,\n profile_url: omniauth.info.urls['Twitter'],\n token: (omniauth.credentials.token rescue nil),\n token_expires_at: (Time.at(omniauth.credentials.expires_at) rescue nil),\n secret: (omniauth.credentials.secret rescue nil)\n }\n end\n end",
"def test_address_auth \n c = customers(:bob)\n assert_equal(addresses(:address1), c.find_address(1))\n assert_equal(addresses(:address2), c.find_address(2))\n assert_nil(c.find_address(3))\n assert_nil(c.find_address(100000))\n end",
"def omniauth?\n provider? && uid?\n end",
"def verify_account!\n @verified_account ||= if credentials\n get_account\n else\n say_ok \"\\nWelcome to Gemical! You are new, right?\"\n say_ok \"We need to authenticate your account:\"\n collect_credentials\n end\n end",
"def omniauth_credentials\n if omniauth_hash = request.env['omniauth.auth']\n {\n provider: omniauth_hash['provider'],\n uid: omniauth_hash['uid'],\n email: omniauth_hash['info']['email'],\n name: omniauth_hash['info']['name'],\n }\n else\n nil\n end\n end",
"def test_phone_number_profile\r\n\r\n @place_holder.login_and_assert_profile(PropertiesReader.get_phone_number_profile_url)\r\n\r\n end",
"def real_credentials_available?\n if real_api_key\n STDERR.puts %{\nNOTE: real PT credentials are configured so if the integration tests are missing request cassettes,\nlive queries will be performed to record the actual interaction.\n }\n true\n else\n STDERR.puts %{\nNOTE: real PT credentials are not configured so if the integration tests are missing request cassettes,\nthey will fail. Set real PT credentials with environment variables:\n\n export TEST_PTLOGGER_API_KEY=your_api_key\n export TEST_PTLOGGER_PROJECT_ID=your_project_id\n export TEST_PTLOGGER_STORY_ID=your_story_id\n\n }\n false\n end\n end",
"def test_user?\n test_users.keys.include?(account)\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Delete all entities which match all attributes attributes: Hash of attributes to match Returns: Integer count of entities deleted.
|
def delete_all(attributes = {})
raise NotImplementedError.new
end
|
[
"def delete_all!\n # find all current criteria and extract operand params from them \n original_criteria = CampaignCriterion.find(:campaign_id => @campaign_id).map do |criterion|\n criterion.select { |k,v| [ :xsi_type, :id ].include?(k) }\n end\n\n # HOTFIX temporarily remove platforms, adwords api throws error on no platforms\n original_criteria.delete_if { |c| c[:xsi_type] == \"Platform\" }\n\n operations = original_criteria.map do |criterion|\n {\n :operator => 'REMOVE',\n :operand => {\n :campaign_id => @campaign_id,\n :criterion => criterion\n }\n }\n end\n \n response = self.mutate(operations)\n\n (response and response[:value]) ? true : false\n end",
"def delete_all(conditions = \"\")\n collection.remove(XGen::SQL::Parser.parse_where(conditions, true) || {})\n end",
"def delete(*args)\n if args.blank?\n raise \"Using delete with no args is not allowed. Please use delete_all to delete all records\"\n end\n keys = find_keys(*args)\n self.store.delete_keys(keys)\n end",
"def delete_all\n klass.delete_all(:conditions => selector)\n end",
"def delete_all\n self.each do |i|\n i[\"_deleted\"] = true\n end\n update_all\n end",
"def delete_by(*args)\n where(*args).delete_all\n end",
"def delete_all\n with_associations {|t,_| t.delete_all}\n end",
"def delete_all\n neo4j_query(\"MATCH (n:`#{mapped_label_name}`) OPTIONAL MATCH (n)-[r]-() DELETE n,r\")\n end",
"def delete_all(conditions={}, &block)\n @items.inject([]) do |items, (key, item)|\n items << @items.delete(key) if match_all(item, conditions, &block)\n items\n end\n end",
"def delete_all!\n delete(query: \"*:*\")\n end",
"def delete_all\n keys = @kvs_instance.keys(safe_key('*'))\n del_num = @kvs_instance.del(keys) if keys.length > 0\n\n (del_num == keys.length)\n end",
"def destroy_all(conditions = nil)\n find(:all, :conditions => conditions).each { |object| object.destroy }\n end",
"def destroy_all\n klass.destroy_all(:conditions => selector)\n end",
"def delete_specific_revision_attributes(attributes, needed_attributes)\n attributes.delete_if { |key, value| needed_attributes.include?(key) }\n attributes\n end",
"def delete_all(conditions = nil)\n raise ActiveRecordError.new(\"delete_all doesn't support limit scope\") if self.limit_value\n\n IdentityMap.repository[symbolized_base_class] = {} if IdentityMap.enabled?\n if conditions\n where(conditions).delete_all\n else\n statement = arel.compile_delete\n affected = @klass.connection.delete(statement, 'SQL', bind_values)\n\n reset\n affected\n end\n end",
"def delete(attributes)\n @connection.execute(delete_statement(attributes), \"#{self.class} Delete from #{@heading.name}\")\n end",
"def delete_all\n sum(&:delete_all)\n end",
"def destroy_all(conditions = nil)\n find_all(conditions).each { |object| object.destroy }\n end",
"def destroy_all(conditions = nil)\n all(:conditions => conditions).each { |object| object.destroy }\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
The default stitching is a board of square tiles Inherit & overwrite these as necessary
|
def stitching(x, y)
# Return only what's already been generated (that is, what's above and to the left).
return [:normal, [
([x - 1, y] if x > 0),
([x - 1, y - 1] if x > 0 and y > 0),
([x, y - 1] if y > 0),
([x + 1, y - 1] if y > 0 and x < @field_width - 1)
].compact]
end
|
[
"def flip_board\n temp_board = deep_dup_array @board\n @size.times do |x|\n @size.times do |y|\n @board[x][y] = temp_board[@size - x - 1][y]\n end\n end\n pieces_fall\n end",
"def randomize_board\n (@size + rand(@size)).times do\n flip_bit rand(@size), rand(@size)\n end\n end",
"def rotate_board\n temp_board = deep_dup_array @board\n @size.times do |x|\n @size.times do |y|\n @board[x][y] = temp_board[@size - y - 1][x]\n end\n end\n pieces_fall\n end",
"def place_knights\r\n $board[1][0] = Knight.new('white')\r\n\t\t$board[6][0] = Knight.new('white')\r\n\t\t$board[1][7] = Knight.new('black')\r\n\t\t$board[6][7] = Knight.new('black')\r\n end",
"def reset(cols, rows)\n\t\t@holeSize = 3 + rand(2)\n\t\t@holeHeight = rows / 4 + rand * (rows / 2)\n\t\t@tubePosition = cols - 1\n\t\t@lastPosition = @tubePosition\n\t\t@passed = false\n\tend",
"def set_pieces\n\t\t(0..7).each do |row|\n\t\t\tcase row\n\t\t\twhen 0\n\t\t\t\tplace_special_pieces(row, :white)\n\t\t\twhen 1\n\t\t\t\tplace_pawns(row, :white)\n\t\t\twhen 6\n\t\t\t\tplace_pawns(row, :black)\n\t\t\twhen 7\n\t\t\t\tplace_special_pieces(row, :black)\n\t\t\tend\n\t\tend\n\tend",
"def place_pawn_first_rows\n 8.times { |col| self[[1, col]] = Pawn.new(self, :black, [1, col]) }\n 8.times { |col| self[[6, col]] = Pawn.new(self, :white, [6, col]) }\n end",
"def reset_corners\n set_tile(0, 0, @grass)\n set_tile(MAPSIZE - 1, 0, @grass)\n set_tile(0, MAPSIZE - 1, @grass)\n set_tile(MAPSIZE - 1, MAPSIZE - 1, @grass)\n end",
"def resetBoard\n (0..3).each do |x|\n (0..3).each { |y| @board[x][y].setTileVal(0) }\n end\n end",
"def create_tiles(tiles, seed = Random.new)\n words = DICTIONARY.sample(tiles, random: seed)\n self.board = []\n tiles.times do |i|\n board << {\n position: i,\n codeword: words.shift,\n type: BYSTANDER,\n revealed: false\n }\n end\n end",
"def tile_board\n @board_array.each_with_index do |row_array, row|\n 10.times{|column| row_array << Tile.new(row,column)}\n end\n end",
"def get_surrounding_tiles(row, column, cell)\n surrounding_tiles = []\n index = GAME_BOARD[column-1].index(cell)\n if column == 1 \n if index == 0\n surrounding_tiles << GAME_BOARD[column-1][index+1]\n surrounding_tiles << GAME_BOARD[column][index]\n elsif index == 8\n surrounding_tiles << GAME_BOARD[column-1][index-1]\n surrounding_tiles << GAME_BOARD[column][index]\n else\n surrounding_tiles << GAME_BOARD[column-1][index-1] \n surrounding_tiles << GAME_BOARD[column-1][index+1] \n surrounding_tiles << GAME_BOARD[column][index]\n end\n elsif column == 12\n if index == 0\n surrounding_tiles << GAME_BOARD[column-1][index+1]\n surrounding_tiles << GAME_BOARD[column-2][index] \n elsif index == 8\n surrounding_tiles << GAME_BOARD[column-1][index-1]\n surrounding_tiles << GAME_BOARD[column-2][index] \n else\n surrounding_tiles << GAME_BOARD[column-1][index-1] \n surrounding_tiles << GAME_BOARD[column-1][index+1] \n surrounding_tiles << GAME_BOARD[column-2][index]\n end\n elsif index == 0\n surrounding_tiles << GAME_BOARD[column-1][index+1] \n surrounding_tiles << GAME_BOARD[column][index]\n surrounding_tiles << GAME_BOARD[column-2][index]\n elsif index == 8\n surrounding_tiles << GAME_BOARD[column-1][index-1] \n surrounding_tiles << GAME_BOARD[column][index]\n surrounding_tiles << GAME_BOARD[column-2][index]\n else\n surrounding_tiles << GAME_BOARD[column-1][index+1] \n surrounding_tiles << GAME_BOARD[column-1][index-1] \n surrounding_tiles << GAME_BOARD[column][index]\n surrounding_tiles << GAME_BOARD[column-2][index]\n end\n\n surrounding_tiles\n end",
"def add_spacings\n 2.times do |i|\n @chess_board =\n if i.zero?\n add_top_spacing(chess_board, 2)\n else\n add_bottom_spacing(chess_board, 2)\n end\n end\n end",
"def put_black\n i = 0\n while i<=7 do\n n = Pawn.new([i,6],\"\\u265F\",false)\n put_piece(n)\n i+=1\n end\n n = Rook.new([0,7],\"\\u265C\",false)\n put_piece(n)\n n = Rook.new([7,7],\"\\u265C\",false)\n put_piece(n)\n n = Knight.new([1,7],\"\\u265E\",false)\n put_piece(n)\n n = Knight.new([6,7],\"\\u265E\",false)\n put_piece(n)\n n = Bishop.new([2,7],\"\\u265D\",false)\n put_piece(n)\n n = Bishop.new([5,7],\"\\u265D\",false)\n put_piece(n)\n n = Queen.new([3,7],\"\\u265B\",false)\n put_piece(n)\n n = King.new([4,7],\"\\u265A\",false)\n put_piece(n,true)\n end",
"def split_tile_set_into_tiles\n number_of_tiles = @tile_set[0].columns/32\n\n number_of_tiles.times do |i|\n image = @tile_set[0].crop((i*32), 0, 32, 32)\n image.write(\"media/tmp/tile#{i}.png\")\n end\n end",
"def generate_board\n [0, 1, 2].each do |row|\n 0.upto(7) do |col|\n offset = row + col\n self[[row, col]] = Piece.new([row, col], self, :B) if offset.odd?\n end\n end\n\n [5, 6, 7].each do |row|\n 0.upto(7) do |col|\n offset = row + col\n self[[row, col]] = Piece.new([row, col], self, :W) if offset.odd?\n end\n end\n end",
"def setup(tiles = TILES, spies = SPIES, seed = Random.new)\n create_tiles(tiles, seed)\n tiles = (0..(tiles - 1)).to_a.shuffle(random: seed)\n set_spies(RED_SPY, tiles.shift(spies))\n set_spies(BLUE_SPY, tiles.shift(spies))\n set_spies([RED_SPY, BLUE_SPY].sample(random: seed), tiles.shift(1))\n set_spies(ASSASSIN, tiles.shift(1))\n self.spymaster = SecureRandom.urlsafe_base64\n self.teams = SecureRandom.urlsafe_base64\n self.turn = first_turn\n self\n end",
"def initializeBlackSide(x)\n x[7][1] = Pawn.new(color: @player2.color, player: @player2)\n x[7][2] = Pawn.new(color: @player2.color, player: @player2)\n x[7][3] = Pawn.new(color: @player2.color, player: @player2)\n x[7][4] = Pawn.new(color: @player2.color, player: @player2)\n x[7][5] = Pawn.new(color: @player2.color, player: @player2)\n x[7][6] = Pawn.new(color: @player2.color, player: @player2)\n x[7][7] = Pawn.new(color: @player2.color, player: @player2)\n x[7][8] = Pawn.new(color: @player2.color, player: @player2)\n \n #other pieces\n x[8][1] = Rook.new(color: @player2.color, player: @player2) \n x[8][2] = Knight.new(color: @player2.color, player: @player2) \n x[8][3] = Bishop.new(color: @player2.color, player: @player2) \n x[8][4] = Queen.new(color: @player2.color, player: @player2) \n x[8][5] = King.new(color: @player2.color, player: @player2) \n x[8][6] = Bishop.new(color: @player2.color, player: @player2) \n x[8][7] = Knight.new(color: @player2.color, player: @player2) \n x[8][8] = Rook.new(color: @player2.color, player: @player2) \n\n return x\n end",
"def tile_board\n board_array.each do |row|\n 9.times {row << Tile.new}\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Public Methods Process => Validate, setup all appropriate request data, and register contexts => Return self
|
def process
raise RequestValidationError unless validate
@roles = get_roles
@user = get_or_create_user
@resource = get_resource
@assignment = get_assignment
@space = get_space
@space_user = get_or_create_space_user
register
self
end
|
[
"def process\n raise RequestValidationError unless validate\n\n @roles = get_roles\n @user = get_or_create_user\n @resource = get_resource\n\n self\n end",
"def process\n warn \"Processing configuration...\"\n process_config\n warn \"Processing initializers...\"\n process_initializers\n warn \"Processing libs...\"\n process_libs\n warn \"Processing routes...\"\n process_routes\n warn \"Processing templates...\"\n process_templates\n warn \"Processing models...\"\n process_models\n warn \"Processing controllers...\"\n process_controllers\n tracker\n end",
"def validate(context = :all)\n self.class.validators.set(context).validate self\n end",
"def _prepare_context; end",
"def process_request\n walk_the_routes\n end",
"def run_validations(context)\n __run_validations__(context)\n end",
"def validate_request(&block)\n if\n validate_request_set &&\n validate_operation_exists\n then\n yield if block_given?\n validate_params\n end\n end",
"def process_params\n set_params_authable if process_params_authable?\n set_params_ownerable if process_params_ownerable?\n set_params_sub_action\n end",
"def call\n all_steps = parents.map(&:steps) + [steps]\n\n result_steps = all_steps.inject { |result, steps| result.merge(steps) }\n\n result_steps[:key_validator] = key_validator if config.validate_keys\n result_steps[:key_coercer] = key_coercer\n result_steps[:value_coercer] = value_coercer\n result_steps[:rule_applier] = rule_applier\n result_steps[:filter_schema] = filter_schema.rule_applier if filter_rules?\n\n processor_type.new(schema_dsl: self, steps: result_steps)\n end",
"def validate\n fail ArgumentError, 'Must provide a run_context OR host to initialize' unless\n run_context || host\n end",
"def process_context context\n\n # Add your own data organisation here\n\n # If you wish, you can examine context[:src_filename] to determine\n # which view you are preparing. Alternatively, you can just give\n # every view an identical context.\n\n add_content_items_to_collections context\n\n if context[:src_filename] == './src/indicator-visualisations/index.html.mustache'\n add_viz_styles context\n add_viz_scripts context\n end\n if context[:src_filename] == './src/indicator-descriptions/index.html.mustache'\n add_desc_styles context\n add_desc_scripts context\n end\n if context[:src_filename] == './src/indicator-descriptions/all/index.html.mustache'\n add_all_desc_styles context\n add_all_desc_scripts context\n end\n end",
"def request_validation\n if is_api_validator_applicable?(params[:controller], params[:action])\n # validation - parameters defination\n validation_pd = VALIDATION_CONFIG[params[:controller]][params[:action]]\n \n validation_pd.keys.each do |key|\n next if params.has_key?(key) == false && validation_pd[key][\"rules\"].has_key?(\"presence\") == false\n validation_pd[key][\"rules\"].each do |rule, defination|\n # when param's value is JSON string then parse it and validate parameters\n if (rule == \"json_string\" and defination == true)\n begin\n json_data = JSON.parse(params[key]) rescue params[key]\n json_data = [json_data] unless json_data.class == Array\n json_data.each do |data|\n data.keys.each do |json_data_key|\n if validation_pd[key].has_key?(\"parameters\")\n next unless validation_pd[key][\"parameters\"].has_key?(json_data_key)\n validation_pd[key][\"parameters\"][json_data_key][\"rules\"].each do |json_data_rule, json_data_defination|\n #CAUTION: if nested JSON, this should be recursive\n return error_response(validation_pd[key][\"parameters\"][json_data_key][\"messages\"][json_data_rule]) if validate?(json_data_key, data[json_data_key], json_data_rule, json_data_defination, validation_pd[key][\"parameters\"][json_data_key])\n end\n end\n end\n end\n rescue JSON::ParserError => e \n return error_response(validation_pd[key][\"messages\"][rule], 422)\n end\n # when param's value is NOT JSON\n else\n return error_response(validation_pd[key][\"messages\"][rule]) if validate?(key, params[key], rule, defination, validation_pd[key])\n end\n end # param rule loop end\n end # params list loop end\n end # main if end\n end",
"def request_validation_phase=(_arg0); end",
"def post_context; end",
"def prepare\n set_options\n set_headers\n set_callbacks\n end",
"def validate_schemas!\n validate_request_schema!\n yield\n validate_response_schema! if Respect::Rails::Engine.validate_response\n end",
"def perform\n not_authorized unless can_create?\n\n @model = consultation_request = ConsultationRequest.new attrs\n consultation_request.patient = patient\n online = Services.online_status.online?(consultation_request.doctor.user)\n\n if online && consultation_request.save\n Services.analytics.track_new_request consultation_request\n\n consultation_request\n else\n validation_error\n end\n end",
"def process_request(env)\n end",
"def validate\n @errors ||= {}\n params.each{ |x| x.validate }\n self\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Helper function to expect a class to have a set of options defined. These options are not firstclass citizens of puppet, but instead a keyvalue map. So regular rspec matchers don't help. To stay DRY, introduce this helper.
|
def expect_option(klass, key, val)
# test each option
it do
should contain_class(klass).with_options(
lambda do |map|
if map[key] == val
true
else
fail "#{klass} option #{key.inspect} doesn't match (-- expected, ++ actual):\n"\
"-- #{val.inspect}\n"\
"++ #{map[key].inspect}\n"
end
end
)
end
end
|
[
"def expect_option(klass, key, val)\n # test each option\n it do\n should contain_class(klass).with_options(\n lambda do |map|\n # check\n if map[key] == wrap_expected(val)\n true\n else\n fail \"#{klass} option #{key.inspect} doesn't match (-- expected, ++ actual):\\n\"\\\n \"-- #{val.inspect}\\n\"\\\n \"++ #{map[key].inspect}\\n\"\n end\n end\n )\n end\nend",
"def class_options(options=nil)\n @class_options ||= from_superclass(:class_options, Thor::CoreExt::OrderedHash.new)\n build_options(options, @class_options) if options\n @class_options\n end",
"def test_options_block\n assert_not_nil(@opts2)\n assert_not_nil(@opts2.foo)\n assert_equal 'foo', @opts2.foo\n assert_equal 'baz', @opts2.baz\n end",
"def assert_chaos_options( chaos )\n eval = -> { [Array, String, TrueClass, FalseClass, Symbol].include? chaos.class }\n msg = \"chaos: arg does not support object of type: '#{chaos.class}'\"\n raise NoSuchAttributeError, msg unless eval.call\n\n case chaos\n when Array\n assert_only_known_and_optional_attributes_for_chaos(chaos)\n when String, Symbol\n assert_only_known_and_optional_attributes_for_chaos([chaos])\n end\n end",
"def assert_valid_options\n valid_options = {\n :skip_gemfile => false,\n :skip_bundle => false,\n :skip_git => false,\n :skip_test_unit => false,\n :edge => false\n }\n valid_options.each do |key, expected|\n next unless options.key?(key)\n actual = options[key]\n unless actual == expected\n fail Rails::Generators::Error, \"Unsupported option: #{key}=#{actual}\"\n end\n end\nend",
"def assert_valid_options\n valid_options = {\n skip_gemfile: false,\n skip_git: false,\n skip_test_unit: false,\n edge: false\n }\n valid_options.each do |key, expected|\n next unless options.key?(key)\n actual = options[key]\n unless actual == expected\n fail Rails::Generators::Error, \"Unsupported option: #{key}=#{actual}\"\n end\n end\nend",
"def assert_valid_options\n valid_options = {\n skip_gemfile: false,\n skip_bundle: false,\n skip_git: false\n }\n valid_options.each do |key, expected|\n next unless options.key?(key)\n actual = options[key]\n unless actual == expected\n fail Rails::Generators::Error, \"Unsupported option: #{key}=#{actual}\"\n end\n end\nend",
"def should_deny_values(options)\n klass = self.name.gsub(/Test$/, '').constantize\n\n context \"#{klass}\" do\n options.each_pair do |attribute, values|\n [*values].each do |value|\n display_value = value.class == NilClass ? \"nil\" : \"\\\"#{value}\\\"\"\n \n should \"not allow #{attribute} to be #{display_value}\" do\n instance = get_instance_of(klass)\n instance.send(\"#{attribute}=\", value)\n assert !instance.valid?, \n \"Expected #{klass} to be invalid when #{attribute} is set to #{display_value}\"\n assert instance.errors.on(attribute.to_sym), \n \"Expected errors on #{attribute} when set to #{display_value}\"\n end\n end\n end\n end\n end",
"def ensure_valid_options\n valid_options = {\n skip_gemfile: false,\n skip_bundle: false,\n skip_git: false\n }\n valid_options.each do |key, expected|\n next unless options.key?(key)\n actual = options[key]\n unless actual == expected\n fail Rails::Generators::Error, \"Unsupported option: #{key}=#{actual}\"\n end\n end\nend",
"def verify_options_hook; end",
"def assert_required_options\n missing = required_options - present_keys\n\n unless missing.empty?\n raise \"Missing options: #{missing.inspect}\"\n end\n end",
"def update_options_with_class!(options, klass)\n options[:class] ||= ''\n options[:class] << \" #{klass}\"\n options\n end",
"def resolved_options!(klass)\n klass.value_provider_options.settings\n end",
"def options_with_specifications\n @options_with_specs ||= options.select{|x| x.has_input?}.sort_by{|x| x.option_set.name}\n end",
"def update_options_with_class!(options, klass)\n @template.update_options_with_class!(options, klass)\n end",
"def find_class_options_for_query_with_and(query, options={})\n options\n end",
"def check_options_against_database\n # extract the keys from the options hash as strings\n option_keys = @options.stringify_keys.keys\n # fetch the names of the Class's database table columns\n klass_columns = klass_name_constant.columns.collect { |c| c.name }\n # @should create new will be true if there are any option keys that are not in db table\n @should_create_new = !!option_keys.detect { |opt| !klass_columns.include?(opt) }\n end",
"def valid_opts\n true\n end",
"def insert_dependence_options(opts, class_options)\r\n class_options.each do |name, args|\r\n next if @@templates[name].nil? or @@templates[name].dependence.nil?\r\n @@templates[name].dependence.each{ |d| opts << d }\r\n end\r\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
get list of current available books
|
def available_books
puts "Available Books:"
@book_status.each { |k, v| puts "#{k}" if v == "available" }
end
|
[
"def available_books\n \n available_books = []\n \n @books.each do |book|\n \n if book.status == :available\n \n available_books << book.title\n\n end\n\n end\n\n end",
"def available_books\n @books.each do |book|\n if book.status == \"available\"\n puts \"#{book.title} by #{book.author} is available.\"\n end\n end\n end",
"def available_books\n @books.select { |book| book.status == 'available' }\n end",
"def showAvailableBooks \n\t@library.each_with_index do |book, index|\n\t\tif book.isAvailable == \"Available\"\n\t\t\tputs \"Id: #{index}\" \n\t\t\t\t\t\tputs book\n\t\t# only show books that are not currently borrowed\n\t\tend\n\tend\nend",
"def list_books\n @books.each { |book| puts book.title + \" by \" + book.author + \" is \" + book.status + \".\" }\n end",
"def list\n\t\t@books = Book.all\n\tend",
"def list_books\n @books.each { |book| puts \"#{book.title} - #{book.author} : #{book.status}\"}\n end",
"def list_books\n @books.each do |book|\n puts \"#{book.title}: #{book.status}\"\n end\n end",
"def borrowed_books_list\n @checked_out_books.each do |book|\n puts book.title\n end\n end",
"def borrowed_books_list\n @borrowed_books.each do |book|\n puts \"#{book.title} by #{book.author}\"\n end\n end",
"def borrowed_books_list\n @borrowed_books.each {|book| puts \"'#{book.title}' by #{book.author}\" }\n end",
"def list_books\n @books.each {|book| puts \"'#{book.title}' is #{book.status}.\" }\n end",
"def borrowed_books_list\n @borrowed_books.each { |book| \n puts book.title + \" by \" + book.author + \".\"\n }\n end",
"def index\n @books = Book.get_avaible_books\n end",
"def list_books\n @books.each do |book|\n print book.title + ' - ' + book.status\n puts ''\n end\n end",
"def index\n @provided_books = ProvidedBook.all\n end",
"def borrowed_books\n @books.each do |book|\n if book.status === \"unavailable\"\n puts \"#{book.borrower.name}: #{book.title}\"\n else\n end\n end\n end",
"def lib_books\n @contains_books = Contain.get_books_of_lib(@library.id)\n end",
"def all_books\n puts \"All Library Books:\"\n @shelves.each do |shelf|\n shelf.books\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Derives the mac key and encryption key
|
def derive_keys(key)
derived_key_size = key.size / 2
mac_key = key[0...derived_key_size]
enc_key = key[derived_key_size..-1]
return mac_key, enc_key
end
|
[
"def mac_encryption_key\n upload_key[0,4]\n end",
"def get_crypto_key_hex\n return @crypto_key if ! @crypto_key\n @crypto_key.unpack(\"H*\")\n end",
"def derive_format_2_mac_key(passphrase)\n key = ::OpenSSL::Digest::SHA1.new\n key.update(FORMAT_2_MAC_KEY)\n key.update(passphrase) if passphrase\n key.digest\n end",
"def encryption_key; end",
"def key_derivation_salt; end",
"def to_der\n if in_hardware?\n raise R509::R509Error, \"This method cannot be called when using keys in hardware\"\n end\n self.key.to_der\n end",
"def compute_private_mac(passphrase, encryption_type, padded_private_blob)\n key = ::OpenSSL::Digest::SHA1.new\n key.update(MAC_KEY)\n key.update(passphrase) if passphrase\n data = Util.ssh_pack(@algorithm, encryption_type, @comment || '', @public_blob, padded_private_blob)\n ::OpenSSL::HMAC.hexdigest(::OpenSSL::Digest::SHA1.new, key.digest, data)\n end",
"def key(hash160)\n key = Bitcoin::Key.new\n key.priv = hash160\n key\n end",
"def encrypted_key\n @rsa.private_encrypt generate_key\n end",
"def mac_iv\n [upload_key[4], upload_key[5], upload_key[4], upload_key[5]]\n end",
"def key(hash160)\n key = Bitcoin::Key.new\n key.priv = hash160\n key\n end",
"def encryption_key_sha256\n return nil unless @gapi.customer_encryption\n Base64.decode64 @gapi.customer_encryption.key_sha256\n end",
"def to_der_hex_s\n @key.to_der.unpack('H*').first\n end",
"def private_key\n @private_key.to_der\n end",
"def derive_key(base_key)\n key = [base_key].pack('L<')\n key1 = [key[0] , key[1] , key[2] , key[3] , key[0] , key[1] , key[2]]\n key2 = [key[3] , key[0] , key[1] , key[2] , key[3] , key[0] , key[1]]\n [transform_key(key1.join), transform_key(key2.join)]\n end",
"def create_encryption_info\n header = [3, 0, 2, 0] # version\n # Header flags copy\n header.concat [0x24, 0, 0, 0] #flags -- VERY UNSURE ABOUT THIS STILL\n # header.concat [0, 0, 0, 0] #unused\n header.concat [0xA4, 0, 0, 0] #length\n # Header\n header.concat [0x24, 0, 0, 0] #flags again\n # header.concat [0, 0, 0, 0] #unused again,\n header.concat [0x0E, 0x66, 0, 0] #alg id\n header.concat [0x04, 0x80, 0, 0] #alg hash id\n header.concat [key.size, 0, 0, 0] #key size\n header.concat [0x18, 0, 0, 0] #provider type\n # header.concat [0, 0, 0, 0] #reserved 1\n # header.concat [0, 0, 0, 0] #reserved 2\n header.concat [0xA0, 0xC7, 0xDC, 0x2, 0, 0, 0, 0]\n header.concat \"Microsoft Enhanced RSA and AES Cryptographic Provider (Prototype)\".bytes.to_a.pack('s*').bytes.to_a\n header.concat [0, 0] #null terminator\n\n #Salt Size\n header.concat [salt.bytes.to_a.size].pack('l').bytes.to_a\n #Salt\n header.concat salt.bytes.to_a.pack('c*').bytes.to_a\n # encryption verifier\n header.concat encrypted_verifier.bytes.to_a.pack('c*').bytes.to_a\n\n # verifier hash size -- MUST BE 32 bytes\n header.concat [verifier_hash.bytes.to_a.size].pack('l').bytes.to_a\n\n #encryption verifier hash\n header.concat encrypted_verifier_hash.bytes.to_a.pack('c*').bytes.to_a\n\n header.flatten!\n header.pack('c*')\n end",
"def key_derivation_salt=(_arg0); end",
"def new( key )\n mac = dup\n mac.key = key[ 0, key_length ]\n return mac\n end",
"def compute_private_mac(format, mac_key, encryption_type, padded_private_blob)\n digest = format <= 2 ? ::OpenSSL::Digest::SHA1 : ::OpenSSL::Digest::SHA256\n data = Util.ssh_pack(@algorithm, encryption_type, @comment || '', @public_blob, padded_private_blob)\n ::OpenSSL::HMAC.hexdigest(digest.new, mac_key, data)\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Default empty implementation of _roda_after, usually overridden by Roda.def_roda_before.
|
def _roda_after(res)
end
|
[
"def def_roda_after\n meths = private_instance_methods.grep(/\\A_roda_after_\\d\\d/).sort\n unless meths.empty?\n plugin :error_handler unless private_method_defined?(:_roda_after)\n if meths.length == 1\n class_eval(\"alias _roda_after #{meths.first}\", __FILE__, __LINE__)\n else\n class_eval(\"def _roda_after(res); #{meths.map{|s| \"#{s}(res)\"}.join(';')} end\", __FILE__, __LINE__)\n end\n private :_roda_after\n alias_method :_roda_after, :_roda_after\n end\n end",
"def def_roda_after\n meths = private_instance_methods.grep(/\\A_roda_after_\\d\\d/).sort\n unless meths.empty?\n plugin :error_handler unless private_method_defined?(:_roda_after)\n if meths.length == 1\n class_eval(\"alias _roda_after #{meths.first}\", __FILE__, __LINE__)\n else\n class_eval(\"def _roda_after(res); #{meths.map{|s| \"#{s}(res)\"}.join(';')} end\", __FILE__, __LINE__)\n end\n private :_roda_after\n end\n end",
"def _roda_before\n end",
"def def_roda_before\n meths = private_instance_methods.grep(/\\A_roda_before_\\d\\d/).sort.join(';')\n class_eval(\"def _roda_before; #{meths} end\", __FILE__, __LINE__)\n private :_roda_before\n end",
"def def_roda_before\n meths = private_instance_methods.grep(/\\A_roda_before_\\d\\d/).sort\n unless meths.empty?\n plugin :_before_hook unless private_method_defined?(:_roda_before)\n if meths.length == 1\n class_eval(\"alias _roda_before #{meths.first}\", __FILE__, __LINE__)\n else\n class_eval(\"def _roda_before; #{meths.join(';')} end\", __FILE__, __LINE__)\n end\n private :_roda_before\n end\n end",
"def def_roda_before\n meths = private_instance_methods.grep(/\\A_roda_before_\\d\\d/).sort\n unless meths.empty?\n plugin :_before_hook unless private_method_defined?(:_roda_before)\n if meths.length == 1\n class_eval(\"alias _roda_before #{meths.first}\", __FILE__, __LINE__)\n else\n class_eval(\"def _roda_before; #{meths.join(';')} end\", __FILE__, __LINE__)\n end\n private :_roda_before\n alias_method :_roda_before, :_roda_before\n end\n end",
"def after(type=nil, &block)\n if type == :all\n define_method(:after_all) do\n instance_exec(&block)\n super()\n end\n nil\n else\n super\n end\n end",
"def after event = nil, *args, &block\n if event\n if block\n hook :after, event, &block\n else\n trigger :after, event, *args\n end\n else\n super(&block)\n end\n end",
"def after type = nil, &block\n define_method :teardown do\n self.instance_eval(&block)\n super()\n end\n end",
"def eval_after_hook(locals: {})\n return if @after.blank?\n\n evaluate(@after, locals: locals)\n end",
"def after(init, rst = $rst, clk = $clk, &code)\n with_counter(init,rst,clk) do |counter|\n seq do\n hif(rst.to_expr == 1) do\n counter.to_ref <= init.to_expr\n end\n helsif(counter.to_expr == 0) do\n # code.call\n instance_eval(&code)\n end\n helse do\n counter.to_ref <= counter.to_expr - 1\n end\n end\n end\n end",
"def after _type = nil, &block\n define_method :teardown do\n self.instance_eval(&block)\n super()\n end\n end",
"def after datum\n last.after datum\n end",
"def add_after(&hook)\n # old_after = @after\n new_after = proc {\n hook.call\n @after.call\n }\n # self\n self.class.new(before: @before, after: new_after, around: @around)\n end",
"def after(event, callback)\n install_hook(:after, event, callback)\n end",
"def not_after(*) end",
"def after_hooks\n options[:after]\n end",
"def after?\n !after.nil?\n end",
"def eval_after\n self.after_value = eval_expression\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Handle the given exception using handle_error, using a default status of 500. Run after hooks on the rack response, but if any error occurs when doing so, log the error using rack.errors and return the response.
|
def _handle_error(e)
res = @_response
res.send(:initialize)
res.status = 500
res = _roda_handle_route{handle_error(e)}
begin
_roda_after(res)
rescue => e2
if errors = env['rack.errors']
errors.puts "Error in after hook processing of error handler: #{e2.class}: #{e2.message}"
e2.backtrace.each{|line| errors.puts(line)}
end
end
res
end
|
[
"def render_500(exception = nil)\n JsonApiServer.logger.error(exception.try(:message))\n JsonApiServer.logger.error(exception.try(:backtrace))\n\n errors = JsonApiServer.errors(\n status: 500,\n title: I18n.t('json_api_server.render_500.title'),\n detail: I18n.t('json_api_server.render_500.detail')\n )\n render json: errors.to_json, status: 500\n end",
"def track_this_error(exception)\n if Rails.env.production? || Rails.env.staging?\n Appsignal.add_exception(exception) if defined? Appsignal\n else\n Rails.logger.error exception.message\n Rails.logger.error exception.backtrace.join(\"\\n\")\n end\n code = 500\n @message = 'We are sorry but an error occurred processing your request'\n create_api_request(@message, code)\n render 'api/error', status: code\n end",
"def handle_error\n log \"!! Unexpected error while processing request: #{$!.message}\"\n log \"!! #{$!.backtrace.join(\"\\n\")}\"\n log_error\n close_connection rescue nil\n end",
"def handle_request_exception(exception)\n # puts \"ERROR: #{exception}\\n\" + exception.backtrace.join(\"\\n\")\n handle_error_response(exception.kind_of?(CAHTTPError) ? JSON.parse(exception.response) : {})\n end",
"def render_500_template\n template = Web::Controllers::TemplateRenderer.new(\"500.html.erb\").render\n status 500, template\n end",
"def raise_exception_or_error_response(exception, status_code)\n if raise_exceptions?\n raise exception\n else\n bail status_code, exception.message\n end\n end",
"def handle_error\n log \"!! Unexpected error while processing request: #{$!.message}\"\n log_error\n close_connection rescue nil\n end",
"def handle_exceptions &block\n begin\n yield\n rescue RestClient::Exception => e\n Response.new(e.response, :error => e.message)\n end\n end",
"def capture_error(env)\n _, headers, body = response = @app.call(env)\n\n if headers['X-Cascade'] == 'pass'\n body.close if body.respond_to?(:close)\n raise ActionController::RoutingError,\n \"No route matches [#{env['REQUEST_METHOD']}] \" \\\n \"#{env['PATH_INFO'].inspect}\"\n end\n\n response\n rescue Exception => exception # rubocop:disable Lint/RescueException\n wrapper = exception_wrapper(env, exception)\n log_error(env, wrapper)\n render_json_error(wrapper)\n end",
"def error\n render plain: '500 Internal Server Error', status: :internal_server_error\n end",
"def respond_with_500\n [500, { 'Content-Type' => DEFAULT_CONTENT_TYPE }, ['Internal server error']]\n end",
"def rescue_service_error\n render status: :internal_server_error, json: { error: 'Internal Server Error' }\n end",
"def respond_or_raise(error)\n status = nil\n\n begin\n raise error\n rescue ActiveRecord::RecordNotFound\n status = :not_found\n rescue FetchLimitExceededError\n status = :forbidden\n rescue ActiveRecord::RecordInvalid, DetailedArgumentError\n status = :bad_request\n rescue ActionView::Template::Error\n # Any errors from within a template are wrapped in this type.\n # Dispatch based on the unwrapped type.\n # If we fail to dispatch, keep the ActionView::Template::Error object.\n begin\n return respond_or_raise(error.original_exception)\n rescue\n raise error\n end\n end\n\n respond_with_error(error, :status => status)\n end",
"def render_exception(error_message, status_code=500)\n add_error message: error_message\n render 'api/v1/exceptions/exception', status: status_code\n return\n end",
"def exception_handler(error)\n unless(Rails.env == 'development')\n Rails.logger.error \"#{error.class}: #{error} - #{error.backtrace.join(\" | \")}- (user: #{current_user.try(:username)})\"\n Rails.logger.debug \"#{error.class}: #{error}\\n#{error.backtrace.join(\"\\n\")}\"\n msg = error.is_a?(Error) ? error.message : \"Unexpected error: #{error.message}\"\n session[:redirect_count] ||= 0\n session[:redirect_count] += 1\n @error_state = true\n respond_to do |format|\n format.js do\n render(\n :text => msg,\n :status => error.respond_to?(:status_code) ? error.status_code : :internal_server_error\n )\n end\n format.json do\n render(\n :json => {:error => true, :message => msg}.to_json,\n :status => error.respond_to?(:status_code) ? error.status_code : :internal_server_error\n )\n end\n format.html do\n # Only set error if no error is already defined. This allows\n # redirects to non-permissable paths to be auto redirected\n # and still provide the original message\n unless(flash[:error])\n flash[:error] = msg\n end\n if(session[:redirect_count] > 5)\n Rails.logger.error 'Caught in redirect loop. Bailing out!'\n raise default_url\n else\n redirect_to default_url\n end\n end\n end\n else\n raise error\n end\n end",
"def http_status_exception(exception)\n @exception = exception\n render_options = {:template => exception.template, :status => exception.status}\n render_options[:layout] = exception.template_layout if exception.template_layout\n render(render_options)\n rescue ActionView::MissingTemplate\n head(exception.status)\n end",
"def log_error(exception)\n super\n \n # don't email for local requests\n return if local_request?\n \n # this will fail in rails 3\n status_code = response_code_for_rescue(exception)\n status = ActionController::StatusCodes::SYMBOL_TO_STATUS_CODE[status_code] || 500\n \n return if !(500...600).include?(status)\n\n if defined?(Bugsnag)\n Bugsnag.notify(exception)\n else\n unless BetterExceptionNotifier.exception_recipients.blank?\n BetterExceptionNotifier.deliver_exception_notification(exception, self, request, clean_backtrace(exception))\n else\n logger.fatal \"Exception raised, and no way to notify about it\"\n logger.fatal e.message\n logger.fatal e.backtrace\n end\n end\n end",
"def http_status_exception(exception)\n @exception = exception\n render_options = {:template => exception.template, :status => exception.status}\n render_options[:layout] = exception.template_layout if exception.template_layout\n render(render_options)\n rescue ActionView::MissingTemplate\n head(exception.status)\n end",
"def render_500\n render 'errors/index_500'\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
The standard commands which never change.
|
def standard_commands
{
'C' => Commands::ClearImageBuffer,
'T' => Commands::Feed,
'XS' => Commands::Issue
}
end
|
[
"def commands\n unless defined? @commands\n @commands = []\n end\n return @commands\n end",
"def supported_commands\n commands.keys\n end",
"def commands\n @commands ||= Foreman::Thor::CoreExt::OrderedHash.new\n end",
"def commands\r\n @help_commands\r\n end",
"def commands\n @commands ||= [].tap do |c|\n commands_list.each do |command|\n if command.instance_of? String\n c << standard_commands[command].new\n else\n ret_command = send(command)\n c << (ret_command.instance_of?(Hash) ? ret_command.values : ret_command)\n end\n end\n end.flatten\n end",
"def commands\n\t\tself.class.commands_list.each do |name, description|\n\t\t\tprintf \"#{name.ljust(10)} : #{description}\\n\"\n\t\tend\n\tend",
"def undocumented_commands\n command_list - documented_commands\n end",
"def add_original_commands; end",
"def commands\n @commands.keys.sort\n end",
"def deprecated_commands\n []\n end",
"def verbs\n COMMANDS.merge(EXTERNAL_COMMANDS)\n end",
"def global_commands\n @global_commands ||= construct_commands\n end",
"def commands\n self.class.command_classes\n end",
"def all_commands\n storage[:commands]\n end",
"def add_original_commands\n end",
"def command_list\n\t\tcase @name\n\t\twhen :background then [\n\t\t\t\tCommand.new(:w, @assembly.method(:cursor_move_up)),\n\t\t\t\tCommand.new(:a, @assembly.method(:cursor_move_left)),\n\t\t\t\tCommand.new(:s, @assembly.method(:cursor_move_down)),\n\t\t\t\tCommand.new(:d, @assembly.method(:cursor_move_right)),\n\t\t\t\tCommand.new(:q, @assembly.method(:move_floor_down)),\n\t\t\t\tCommand.new(:e, @assembly.method(:move_floor_up)),\n\t\t\t\t\n\t\t\t\tCommand.new(:space, method(:on_background_select)),\n\t\t\t\tCommand.new(:back_slash, method(:on_background_back))\n\t\t\t]\n\t\twhen :assembly then [\n\t\t\t\tCommand.new(:compile, method(:on_compile_assembly), [String]),\n\t\t\t\tCommand.new(:exit, method(:on_assembly_exit)),\n\t\t\t\t\n\t\t\t\tCommand.new(:back_slash, method(:cancel))\n\t\t\t]\n\t\twhen :room then [\n\t\t\t\tCommand.new(:d, method(:on_room_select_doors)),\n\t\t\t\tCommand.new(:e, method(:on_room_select_events)),\n\t\t\t\tCommand.new(:s, method(:on_room_select_tags)),\n\t\t\t\tCommand.new(:x, method(:on_room_remove)),\n\t\t\t\t\n\t\t\t\tCommand.new(:back_slash, method(:on_room_back))\n\t\t\t]\n\t\twhen :room_tags then [\n\t\t\t\tCommand.new(:e, method(:on_tag_command), [], [:e]),\n\t\t\t\tCommand.new(:x, method(:on_tag_command), [], [:x]),\n\t\t\t\tCommand.new(:c, method(:on_tag_command), [], [:c]),\n\t\t\t\t\n\t\t\t\tCommand.new(:back_slash, method(:cancel))\n\t\t\t]\n\t\twhen :events then [\n\t\t\t\tCommand.new(:space, method(:on_events_select_place)),\n\t\t\t\t\n\t\t\t\tCommand.new(:back_slash, method(:cancel))\n\t\t\t]\n\t\twhen :event_place then [\n\t\t\t\tCommand.new(:i, method(:on_place_event)),\n\t\t\t\tCommand.new(:s, method(:on_place_event), [Integer]),\n\t\t\t\tCommand.new(:x, method(:on_delete_event)),\n\t\t\t\t\n\t\t\t\tCommand.new(:back_slash, method(:cancel))\n\t\t\t]\n\t\twhen :doors then [\n\t\t\t\tCommand.new(:space, method(:on_door_create)),\n\t\t\t\tCommand.new(:w, method(:on_modify_door), [], [SUB_UP]),\n\t\t\t\tCommand.new(:a, method(:on_modify_door), [], [SUB_LEFT]),\n\t\t\t\tCommand.new(:d, method(:on_modify_door), [], [SUB_RIGHT]),\n\t\t\t\tCommand.new(:s, method(:on_modify_door), [], [SUB_DOWN]),\n\t\t\t\tCommand.new(:q, method(:on_modify_door), [], [SUB_BELOW]),\n\t\t\t\tCommand.new(:e, method(:on_modify_door), [], [SUB_ABOVE]),\n\t\t\t\t\n\t\t\t\tCommand.new(:back_slash, method(:cancel))\n\t\t\t]\n\t\twhen :door_edit_dest then [\n\t\t\t\tCommand.new(:w, method(:door_edit), [], [:dest, SUB_UP]),\n\t\t\t\tCommand.new(:a, method(:door_edit), [], [:dest, SUB_LEFT]),\n\t\t\t\tCommand.new(:d, method(:door_edit), [], [:dest, SUB_RIGHT]),\n\t\t\t\tCommand.new(:s, method(:door_edit), [], [:dest, SUB_DOWN]),\n\t\t\t\tCommand.new(:q, method(:door_edit), [], [:dest, SUB_BELOW]),\n\t\t\t\tCommand.new(:e, method(:door_edit), [], [:dest, SUB_ABOVE]),\n\t\t\t\t\n\t\t\t\tCommand.new(:back_slash, method(:on_door_edit_cancel))\n\t\t\t]\n\t\twhen :door_edit_dir then [\n\t\t\t\tCommand.new(:s, method(:door_edit), [], [:dir, 0]),\n\t\t\t\tCommand.new(:d, method(:door_edit), [], [:dir, 1]),\n\t\t\t\tCommand.new(:f, method(:door_edit), [], [:dir, 2]),\n\t\t\t\t\n\t\t\t\tCommand.new(:back_slash, method(:on_door_edit_cancel))\n\t\t\t]\n\t\twhen :door_edit_ls then [\n\t\t\t\tCommand.new(:add, method(:door_edit), [Integer, Integer], [:ls, true]),\n\t\t\t\tCommand.new(:del, method(:door_edit), [Integer], [:ls, false]),\n\t\t\t\t\n\t\t\t\tCommand.new(:d, method(:on_door_complete)),\n\t\t\t\tCommand.new(:back_slash, method(:on_door_edit_cancel))\n\t\t\t]\n\t\twhen :door_edit_all then [\n\t\t\t\tCommand.new(:dir, method(:on_door_all_edit_dir)),\n\t\t\t\tCommand.new(:ls, method(:on_door_all_edit_ls)),\n\t\t\t\tCommand.new(:x, method(:on_door_delete)),\n\t\t\t\t\n\t\t\t\tCommand.new(:back_slash, method(:on_door_complete))\n\t\t\t]\n\t\telse []\n\t\tend\n\tend",
"def load_basic_commands\n load_cmd = lambda do |name, cmd|\n if !$_commands[name].nil?\n puts \"Command #{name} is reserved, it won't work unless you rename the file\"\n print \"Press Enter to continue \"\n gets\n end\n $_commands[name] = cmd\n end\n load_cmd[\"help\", BasicCommands::Help.new]\n load_cmd[\"exit\", BasicCommands::Exit.new]\n end",
"def valid_commands\n methods.grep(/_command$/).map {|c| c.to_s.gsub(\"_command\", \"\")}\n end",
"def device_commands\n [:sideload, :package, :test, :deeplink, :delete, :navigate, :navigator,\n :text, :monitor, :screencapture, :applist, :profile, :key, :genkey ]\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Take a list of commands and execute them. Returns an array of objects. In the case of standard commands will just create a new command. With a derived command it will execute the method. A derived command may itself contain a list of commands so the whole thing is flattened to produce a single array.
|
def commands
@commands ||= [].tap do |c|
commands_list.each do |command|
if command.instance_of? String
c << standard_commands[command].new
else
ret_command = send(command)
c << (ret_command.instance_of?(Hash) ? ret_command.values : ret_command)
end
end
end.flatten
end
|
[
"def execute_commands(commands)\n commands.each do |command|\n execute_command(command)\n end\n \n end",
"def invoke_all #:nodoc:\n self.class.all_commands.map { |_, command| invoke_command(command) }\n end",
"def call\n commands = self.commands.any? ? self.commands : self.class.commands\n commands.each do |command_klass|\n command = command_klass.new(kwargs)\n command.run\n _called << command\n end\n end",
"def execCommands(*commands)\n logging(:debug){\n (\"enter: execCommands(\" +\n commands.map{|com| com.class.name}.inspect +\n \")\") ; } ;\n logging(:debug, \"execCommands: commands:\"){ commands.pretty_inspect } ;\n # get message body\n comBuffer = \"\" ;\n commands.each{|com|\n comBuffer << com.genMessage() ;\n }\n # send message exactly\n sendExact(comBuffer) ;\n # receive response exactly\n resBuffer = recvExact() ;\n # scan and set result code\n resList = []\n commands.each{|com|\n res = com.scanResponse(resBuffer) ;\n resList.push(res) ;\n }\n logging(:debug, \"execCommands: responses:\"){\n commands.map{|com| [com.class.name, \n com._resultCode, \n com._response]}.pretty_inspect }\n logging(:debug){\n (\"exit: execCommands(\" +\n commands.map{|com| com.class.name}.inspect +\n \")\") ; } ;\n return resList ;\n end",
"def execute\n @contained_commands.each do |c|\n c.execute\n end\n end",
"def create_commands()\n cmd_list = Array.new\n cmd_list << Command.new('i', 'initialize', nil, 'Initializes a new clean session.', method(:handle_init))\n cmd_list << Command.new('c', 'category', 'NAME', 'Creates a new category', method(:handle_category))\n cmd_list << Command.new('t', 'task', 'NAME', 'Adds a new task to the current category.', method(:handle_task))\n cmd_list << Command.new('d', 'display', nil, 'Displays all the categories', method(:handle_categories))\n cmd_list << Command.new('n', 'now', nil, 'Displays the information for the current category', method(:display_current_category))\n cmd_list << Command.new('h', 'help', nil, 'Displays the help for a command', method(:handle_help))\n\n cmd_list << Command.new('-v', '--version', nil, 'Prints the application version.', method(:show_version))\n cmd_list << Command.new('-h', '--help', nil, 'You are looking at it.', method(:show_help))\n\n cmd_list\n end",
"def commands\n unless defined? @commands\n @commands = []\n end\n return @commands\n end",
"def shell(commands)\n commands.split(\"\\n\").each { |i| run(i) }\n end",
"def exec_commands(cmds)\n exec_tape(cmds)\n exec_out(cmds)\n exec_head(cmds)\n exec_trans(cmds)\n end",
"def init_commands\n @base_commands = []\n initial_commands = actor.initial_commands + self.class.initial_commands\n if initial_commands.empty?\n add_command_attack([])\n add_command_guard([])\n add_command_skill_list([])\n add_command_item([])\n else\n initial_commands.each {|cmd|\n method_name = \"add_command_#{cmd[0]}\"\n send(method_name, cmd[1..-1]) if respond_to?(method_name)\n }\n end\n end",
"def command_list\n\t\tcase @name\n\t\twhen :background then [\n\t\t\t\tCommand.new(:w, @assembly.method(:cursor_move_up)),\n\t\t\t\tCommand.new(:a, @assembly.method(:cursor_move_left)),\n\t\t\t\tCommand.new(:s, @assembly.method(:cursor_move_down)),\n\t\t\t\tCommand.new(:d, @assembly.method(:cursor_move_right)),\n\t\t\t\tCommand.new(:q, @assembly.method(:move_floor_down)),\n\t\t\t\tCommand.new(:e, @assembly.method(:move_floor_up)),\n\t\t\t\t\n\t\t\t\tCommand.new(:space, method(:on_background_select)),\n\t\t\t\tCommand.new(:back_slash, method(:on_background_back))\n\t\t\t]\n\t\twhen :assembly then [\n\t\t\t\tCommand.new(:compile, method(:on_compile_assembly), [String]),\n\t\t\t\tCommand.new(:exit, method(:on_assembly_exit)),\n\t\t\t\t\n\t\t\t\tCommand.new(:back_slash, method(:cancel))\n\t\t\t]\n\t\twhen :room then [\n\t\t\t\tCommand.new(:d, method(:on_room_select_doors)),\n\t\t\t\tCommand.new(:e, method(:on_room_select_events)),\n\t\t\t\tCommand.new(:s, method(:on_room_select_tags)),\n\t\t\t\tCommand.new(:x, method(:on_room_remove)),\n\t\t\t\t\n\t\t\t\tCommand.new(:back_slash, method(:on_room_back))\n\t\t\t]\n\t\twhen :room_tags then [\n\t\t\t\tCommand.new(:e, method(:on_tag_command), [], [:e]),\n\t\t\t\tCommand.new(:x, method(:on_tag_command), [], [:x]),\n\t\t\t\tCommand.new(:c, method(:on_tag_command), [], [:c]),\n\t\t\t\t\n\t\t\t\tCommand.new(:back_slash, method(:cancel))\n\t\t\t]\n\t\twhen :events then [\n\t\t\t\tCommand.new(:space, method(:on_events_select_place)),\n\t\t\t\t\n\t\t\t\tCommand.new(:back_slash, method(:cancel))\n\t\t\t]\n\t\twhen :event_place then [\n\t\t\t\tCommand.new(:i, method(:on_place_event)),\n\t\t\t\tCommand.new(:s, method(:on_place_event), [Integer]),\n\t\t\t\tCommand.new(:x, method(:on_delete_event)),\n\t\t\t\t\n\t\t\t\tCommand.new(:back_slash, method(:cancel))\n\t\t\t]\n\t\twhen :doors then [\n\t\t\t\tCommand.new(:space, method(:on_door_create)),\n\t\t\t\tCommand.new(:w, method(:on_modify_door), [], [SUB_UP]),\n\t\t\t\tCommand.new(:a, method(:on_modify_door), [], [SUB_LEFT]),\n\t\t\t\tCommand.new(:d, method(:on_modify_door), [], [SUB_RIGHT]),\n\t\t\t\tCommand.new(:s, method(:on_modify_door), [], [SUB_DOWN]),\n\t\t\t\tCommand.new(:q, method(:on_modify_door), [], [SUB_BELOW]),\n\t\t\t\tCommand.new(:e, method(:on_modify_door), [], [SUB_ABOVE]),\n\t\t\t\t\n\t\t\t\tCommand.new(:back_slash, method(:cancel))\n\t\t\t]\n\t\twhen :door_edit_dest then [\n\t\t\t\tCommand.new(:w, method(:door_edit), [], [:dest, SUB_UP]),\n\t\t\t\tCommand.new(:a, method(:door_edit), [], [:dest, SUB_LEFT]),\n\t\t\t\tCommand.new(:d, method(:door_edit), [], [:dest, SUB_RIGHT]),\n\t\t\t\tCommand.new(:s, method(:door_edit), [], [:dest, SUB_DOWN]),\n\t\t\t\tCommand.new(:q, method(:door_edit), [], [:dest, SUB_BELOW]),\n\t\t\t\tCommand.new(:e, method(:door_edit), [], [:dest, SUB_ABOVE]),\n\t\t\t\t\n\t\t\t\tCommand.new(:back_slash, method(:on_door_edit_cancel))\n\t\t\t]\n\t\twhen :door_edit_dir then [\n\t\t\t\tCommand.new(:s, method(:door_edit), [], [:dir, 0]),\n\t\t\t\tCommand.new(:d, method(:door_edit), [], [:dir, 1]),\n\t\t\t\tCommand.new(:f, method(:door_edit), [], [:dir, 2]),\n\t\t\t\t\n\t\t\t\tCommand.new(:back_slash, method(:on_door_edit_cancel))\n\t\t\t]\n\t\twhen :door_edit_ls then [\n\t\t\t\tCommand.new(:add, method(:door_edit), [Integer, Integer], [:ls, true]),\n\t\t\t\tCommand.new(:del, method(:door_edit), [Integer], [:ls, false]),\n\t\t\t\t\n\t\t\t\tCommand.new(:d, method(:on_door_complete)),\n\t\t\t\tCommand.new(:back_slash, method(:on_door_edit_cancel))\n\t\t\t]\n\t\twhen :door_edit_all then [\n\t\t\t\tCommand.new(:dir, method(:on_door_all_edit_dir)),\n\t\t\t\tCommand.new(:ls, method(:on_door_all_edit_ls)),\n\t\t\t\tCommand.new(:x, method(:on_door_delete)),\n\t\t\t\t\n\t\t\t\tCommand.new(:back_slash, method(:on_door_complete))\n\t\t\t]\n\t\telse []\n\t\tend\n\tend",
"def execute_commands\n if commands.length < 1\n return nil\n end\n\n case commands[0]\n when \"PLACE\"\n coords = commands[1].split(\",\")\n if valid_place?(coords)\n table.place_robot(coords[0].to_i,coords[1].to_i,robot.name)\n robot.current_direction = coords[2]\n self.commands = commands.drop(2)\n else\n self.commands = commands.drop(1)\n end\n when \"REPORT\"\n return report if robot.current_direction\n self.commands = commands.drop(1)\n when \"LEFT\"\n robot.left if robot.current_direction\n self.commands = commands.drop(1)\n when \"RIGHT\"\n robot.right if robot.current_direction\n self.commands = commands.drop(1)\n when \"MOVE\"\n table.move(robot.name,robot.current_direction) if table.robot_position[robot.name]\n self.commands = commands.drop(1)\n else\n self.commands = commands.drop(1)\n end\n\n execute_commands\n end",
"def commands\n self.class.command_classes\n end",
"def commands\n version = invoke_and_process(\"JSONRPC.Version\")\n if (version[\"version\"] == 2)\n @commands ||= invoke_and_process(\"JSONRPC.Introspect\", :getdescriptions => true)[:commands].map {|c| Xbmc::Command.new(c)}\n else\n @commands ||= invoke_and_process(\"JSONRPC.Introspect\", :getdescriptions => true)[:methods].map {|c| \n attrList = c.at(1)\n attrList[\"command\"] = c.at(0)\n Xbmc::Command.new(attrList)\n }\n end\n end",
"def execute_commands(*commands)\n begin\n transaction_provider.transactional do\n commands.each do |command|\n filters.each { |filter| filter.execute(command) }\n\n raise CommandNotValid.new(command) unless command.valid?\n parsed_command = command.parse_attrs_to_correct_types\n command_handlers.select { |h| h.class.handles_message?(parsed_command) }.each { |h| h.handle_message parsed_command }\n repository.commit(parsed_command)\n end\n end\n ensure\n repository.clear\n end\n\n end",
"def run(command)\n commands << command if command != ''\n end",
"def execute\n @subcommands.each do |k,v|\n v.execute\n end\n end",
"def commands_list\n out = \"\"\n # If there are no commands set\n if COMMANDS.empty?\n out << \" ooops! commands are not here!\"\n else\n # Get the longest command's name, so we can output it nice 'n' clean\n # This '+ int' at the end is a distance (in spaces) from the longest\n # command to descriptions\n longest = COMMANDS.max_by(&:size).size + 8\n COMMANDS.each do |cmd|\n # Calc, calc.\n spaces = longest - cmd.size\n # Check if there is a 'desc' method\n desc = if eval \"Command::#{cmd}.new.respond_to? 'desc'\"\n # If there is - execute it\n eval \"Command::#{cmd}.new.desc\"\n else\n # If there is not\n \"---\"\n end\n out << \" \" << cmd.downcase.to_s << \" \" * spaces << desc\n # If this command is the last one, don't make a new line\n unless cmd == COMMANDS.last\n out << \"\\n\"\n end\n end\n end\n out\n end",
"def execute_commands\n raise Command::NoValidCommandsFound if @commands.size.zero?\n raise Command::PlaceCommandNotFound if @commands.select { |c| c.type == 'PLACE' }.size.zero?\n\n @commands.each { |command| send_command_to_robot(command) }\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Converts a Cygwin Unix path to a Windows path, e.g.: /cygdrive/d/path/to/file ==> D:/path/to/file
|
def cygwin_path(path)
if RUBY_PLATFORM.include?('cygwin') && path.index('/') == 0
IO.popen("cygpath -m #{path.include?(':') ? '-p' : ''} #{path.shellescape}").readline.chomp.gsub(/;/, '\\;')
else
path
end
end
|
[
"def fix_path(path)\n return path if not OS.windows?\n `cygpath -w #{path}`.strip.gsub(\"\\\\\", \"/\")\nend",
"def windows_path(path)\n path = cygwin_windows_path(path)\n path = wsl_to_windows_path(path)\n if windows? || wsl?\n path = windows_unc_path(path)\n end\n path\n end",
"def cygwin cygdrive_prefix = '/cygdrive/'\n fpath('/', cygdrive_prefix){|drive| \"#{@drive.downcase}\"}\n end",
"def windows_path(nix_path)\n nix_path.gsub('/', '\\\\')\nend",
"def cyg_path(path)\r\n\tif /^rsync:/ =~ path # If it's an rsync remote path, don't prepend /cygdrive to the front of it\r\n\t\tpath.gsub('\\\\','/')\r\n\telse\r\n\t\t\"/cygdrive/#{File.expand_path(path).gsub('\\\\','/').gsub(':','')}\"\r\n\tend\r\nend",
"def makeWindowsPathIntoMinGWPath(path)\n modifiedPath = path.gsub(/\\\\/, '/')\n modifiedPath.gsub(/^(\\w+):[\\\\\\/]/) { \"/#{$1.downcase}/\" }\nend",
"def makeWindowsPathIntoMinGWPath(path)\n modified_path = path.tr('\\\\', '/')\n modified_path.gsub(%r{^(\\w+):[\\\\/]}) { \"/#{Regexp.last_match(1).downcase}/\" }\nend",
"def windows_unc_path(path)\n path = path.gsub(\"/\", \"\\\\\")\n\n # Convert to UNC path\n if path =~ /^[a-zA-Z]:\\\\?$/\n # If the path is just a drive letter, then return that as-is\n path + \"\\\\\"\n elsif path.start_with?(\"\\\\\\\\\")\n # If the path already starts with `\\\\` assume UNC and return as-is\n path\n else\n \"\\\\\\\\?\\\\\" + path.gsub(\"/\", \"\\\\\")\n end\n end",
"def win_friendly_path(path)\n system_drive = ENV['SYSTEMDRIVE'] ? ENV['SYSTEMDRIVE'] : ''\n path = File.join(system_drive, path)\n path.gsub!(File::SEPARATOR, (File::ALT_SEPARATOR || '\\\\'))\n end",
"def format_windows_path(path, *args)\n path = cygwin_path(path) if cygwin?\n path = msys_path(path) if msys?\n path = wsl_to_windows_path(path) if wsl?\n if windows? || wsl?\n path = windows_unc_path(path) if !args.include?(:disable_unc)\n end\n\n path\n end",
"def fix_hg_path(path)\n return path if not OS.windows?\n return path.strip.gsub(\"\\\\\", \"/\")\nend",
"def wsl_to_windows_path(path)\n if wsl? && wsl_windows_access?\n if wsl_path?(path)\n parts = path.split(\"/\")\n parts.delete_if(&:empty?)\n [wsl_windows_appdata_local, \"lxss\", *parts].join(\"\\\\\")\n else\n path = path.to_s.sub(\"/mnt/\", \"\")\n parts = path.split(\"/\")\n parts.first << \":\"\n path = parts.join(\"\\\\\")\n path\n end\n else\n path\n end\n end",
"def rubywin \n fpath('/'){ |drive| \"#{@drive.upcase}:\"}\n end",
"def platform_specific_path(path)\n if RUBY_PLATFORM =~ /mswin|mingw|windows/\n system_drive = ENV['SYSTEMDRIVE'] ? ENV['SYSTEMDRIVE'] : \"\"\n path = win_slashify File.join(system_drive, path.split('/')[2..-1])\n end\n\n path\n end",
"def win_friendly_path(path)\n if path\n new_path = path.gsub('/', '\\\\')\n new_path = \"c:#{new_path}\" if new_path =~ /^\\\\/\n end\n new_path\n end",
"def absolute_path path\n # Look for drive letters and '//SERVER/' paths.\n patterns = [%r{^[A-Za-z]:(/|\\\\)}, %r{^(//|\\\\\\\\)}]\n patterns.each {|patttern| return path if path =~ patttern }\n absolute = Pathname.new(path).realpath\n absolute.to_s.sub(%r{^/cygdrive/(\\w)/}, '\\\\1:/')\n end",
"def dir(s)\n case JblasConfig::CONFIG['OS_NAME']\n when 'Windows'\n s = s.gsub(/\\\\/, '\\\\\\\\\\\\\\\\')\n %x(cygpath -u '#{s}').chomp\n else\n s # safe default...\n end\n end",
"def strip_windows_drive(path = T.unsafe(nil)); end",
"def escape_path(path)\n if RbConfig::CONFIG[\"host_os\"] =~ /mswin|mingw/\n # I know what you're thinking: \"just use Shellwords.escape\". That\n # method produces incorrect results on Windows with certain input\n # which would be a metacharacter in Sh but is not for one or more of\n # Windows command line parsing libraries. This covers the 99% case of\n # spaces in the path without breaking other stuff.\n if path =~ /[ \\t\\n\\v\"]/\n \"\\\"#{path.gsub(/[ \\t\\n\\v\\\"\\\\]/) { |m| '\\\\' + m[0] }}\\\"\"\n else\n path\n end\n else\n Shellwords.escape(path)\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
POST /system/platform_accounts POST /system/platform_accounts.json
|
def create
@system_platform_account = System::PlatformAccount.new(system_platform_account_params)
if @system_platform_account.save
record_activities('创建接入账号', @system_platform_account.account_name)
flash[:success] = '创建接入账号成功'
redirect_to system_platform_accounts_path
else
flash[:error] = "创建失败: #{@system_platform_account.errors.full_message}"
render :new
end
end
|
[
"def create_platform_account(data)\n url_params = {\n format: JSON_RESPONSE_FORMAT\n }\n url = Utils.prepare_url('accounts/', @sandbox, url_params)\n data = Utils.prepare_json_data(data)\n response = @transport.make_http_request(url, 'POST', data)\n\n block_given? ? yield(response) : response\n end",
"def create\n @platform_account = PlatformAccount.new(platform_account_params)\n\n respond_to do |format|\n if @platform_account.save\n format.html { redirect_to @platform_account, notice: 'Platform account was successfully created.' }\n format.json { render :show, status: :created, location: @platform_account }\n else\n format.html { render :new }\n format.json { render json: @platform_account.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @platform_account_type = PlatformAccountType.new(platform_account_type_params)\n\n respond_to do |format|\n if @platform_account_type.save\n format.html { redirect_to @platform_account_type, notice: 'Platform account type was successfully created.' }\n format.json { render :show, status: :created, location: @platform_account_type }\n else\n format.html { render :new }\n format.json { render json: @platform_account_type.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @account = current_user.accounts.new(account_params)\n\n if @account.save\n render json: @account, status: :created, location: @account\n else\n render json: @account.errors, status: :unprocessable_entity\n end\n end",
"def create\n @account = AndroidAccount.new(params[:android_account])\n current_user.person.android_accounts << @account\n\n respond_to do |format|\n if @account.save\n format.html { redirect_to( accounts_path, :notice => 'Account was successfully created.') }\n format.json { render :json => @account, :status => :created, :location => @account }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @account.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def add_user_for_tenant(args = {}) \n post(\"/tenants.json/#{args[:tenantId]}/users\", args)\nend",
"def create\n @account = PhoneAccount.new(params[:phone_account])\n current_user.person.phone_accounts << @account\n\n respond_to do |format|\n if @account.save\n format.html { redirect_to( accounts_path, :notice => 'Account was successfully created.') }\n format.json { render :json => @account, :status => :created, :location => @account }\n else\n format.html { render :action => 'new' }\n format.json { render :json => @account.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @admins_account = Admins::Account.new(admins_account_params)\n\n if @admins_account.save\n render json: @admins_account, status: :created, location: @admins_account\n else\n render json: @admins_account.errors, status: :unprocessable_entity\n end\n end",
"def add_tenant_circle(args = {}) \n post(\"/tenantcircles.json/\", args)\nend",
"def add_accounttttttttttttttttttttttttttt\n\n res = {\n :success => false,\n :msg => '',\n :data => []\n }\n\n begin\n params[\"account\"][:company_id] = params[:id]\n account = Account.create(params[\"account\"])\n\n res[:msg] = 'Created new company account'\n res[:data] = {:account => {:id => account.id, :name => account.first.capitalize + ' ' + account.last.capitalize}}\n res[:success] = true\n rescue AccountCreateError => e\n res[:msg] = e.errors[0]\n res[:data][:errors] = e.errors[1]\n end\n\n render :json => res.to_json, :layout => false\n end",
"def create_gen1_account(name) \n\n account_create = {\n \"location\"=> \"centralus\",\n \"tags\"=> {\n \"test_key\"=> \"test_value\"\n },\n \"identity\"=> {\n \"type\"=> \"SystemAssigned\"\n },\n \"properties\"=> {\n \"encryptionState\"=> \"Enabled\",\n \"encryptionConfig\"=> {\n \"type\"=> \"ServiceManaged\",\n },\n \"firewallState\"=> \"Disabled\",\n \"firewallRules\"=> [\n \n ],\n \"trustedIdProviderState\"=> \"Disabled\",\n \"trustedIdProviders\"=> [\n \n ],\n \n \"newTier\"=> \"Consumption\",\n \"firewallAllowAzureIps\"=> \"Enabled\"\n }\n }\n\n response = HTTParty.put(\"https://management.azure.com/subscriptions/#{subscriptionId}/resourceGroups/#{resourceGroupName}/providers/Microsoft.DataLakeStore/accounts/#{name}?api-version=2016-11-01\", {\n\n body: account_create.to_json,\n\n headers: {\n \"Authorization\" => \"Bearer #{bearerToken}\",\n \"Content-Type\" => 'application/json', \n \"Accept\" => '*/*',\n \"Cache-Control\" => 'no-cache',\n \"Connection\" => 'keep-alive',\n \"cache-control\" => 'no-cache'\n },\n \n verify: true,\n })\n\n return JSON.parse response.read_body\n end",
"def create\n @account = BlackberryAccount.new(params[:blackberry_account])\n current_user.person.blackberry_accounts << @account\n\n respond_to do |format|\n if @account.save\n format.html { redirect_to( accounts_path, :notice => 'Account was successfully created.') }\n format.json { render :json => @account, :status => :created, :location => @account }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @account.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create_account(email, password)\n @api.post(Services::ACCOUNTS,\"accounts/\",:body => { :email=> email, :password=> password } )\n end",
"def create\n @account = WinMobAccount.new(params[:win_mob_account])\n current_user.person.win_mob_accounts << @account\n\n respond_to do |format|\n if @account.save\n format.html { redirect_to( accounts_path, :notice => 'Account was successfully created.') }\n format.json { render :json => @account, :status => :created, :location => @account }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @account.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def test_post_accounts_admin\n response =megams.post_accounts(random_id, sandbox_email, sandbox_apikey, $admin)\n response.body.to_s\n assert_equal(201, response.status)\n end",
"def create_account(options)\n form_data = options.merge({ 'action' => 'createaccount' })\n res, dummy = make_api_request(form_data)\n res\n end",
"def create_account\n account = accounts.build\n create_account_user(account)\n end",
"def create\n @account = IphoneAccount.new(params[:iphone_account])\n current_user.person.iphone_accounts << @account\n\n respond_to do |format|\n if @account.save\n format.html { redirect_to( accounts_path, :notice => 'Account was successfully created.') }\n format.json { render :json => @account, :status => :created, :location => @account }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @account.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @api_v1_account = Api::V1::Account.new(api_v1_account_params)\n\n respond_to do |format|\n if @api_v1_account.save\n format.html { redirect_to @api_v1_account, notice: 'Account was successfully created.' }\n format.json { render :show, status: :created, location: @api_v1_account }\n else\n format.html { render :new }\n format.json { render json: @api_v1_account.errors, status: :unprocessable_entity }\n end\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
PATCH/PUT /system/platform_accounts/1 PATCH/PUT /system/platform_accounts/1.json
|
def update
if @system_platform_account.update(system_platform_account_params)
record_activities('修改接入账号', @system_platform_account.account_name)
flash[:success] = '修改接入账号成功'
redirect_to system_platform_accounts_path
else
flash[:error] = "修改失败: #{@system_platform_account.errors.full_message}"
render :edit
end
end
|
[
"def update\n respond_to do |format|\n if @platform_account.update(platform_account_params)\n format.html { redirect_to @platform_account, notice: 'Platform account was successfully updated.' }\n format.json { render :show, status: :ok, location: @platform_account }\n else\n format.html { render :edit }\n format.json { render json: @platform_account.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n account = current_organization.accounts.find(params[:id])\n return forbidden unless account && current_account.allowed_to_edit_account?(account, current_organization)\n return json(account) unless account.update_attributes pick(params, :first_name, :last_name, :email,:language, :document_language)\n\n role = pick(params, :role)\n #account.update_attributes(role) if !role.empty? && current_account.admin?\n membership = current_organization.role_of(account)\n membership.update_attributes(role) if !role.empty? && current_account.admin?\n password = pick(params, :password)[:password]\n if (current_account.id == account.id) && password\n account.update(password: password)\n end\n json account.canonical(membership: membership)\n end",
"def update\n respond_to do |format|\n if @platform_account_type.update(platform_account_type_params)\n format.html { redirect_to @platform_account_type, notice: 'Platform account type was successfully updated.' }\n format.json { render :show, status: :ok, location: @platform_account_type }\n else\n format.html { render :edit }\n format.json { render json: @platform_account_type.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update_tenant_circle(args = {}) \n put(\"/tenantcircles.json/#{args[:circleId]}\", args)\nend",
"def update \n begin\n @resource = Account.find(params[:id])\n @resource.update_attributes!(params[:account])\n render :response => :PUT\n rescue Exception => e\n @error = process_exception(e)\n render :response => :error\n end\n end",
"def update\n keystone.update_tenant({:id=>params[:id],:name=>params[:name],:description=>params[:description],:enabled=>params[:enabled]})\n respond_to do |format|\n format.html { redirect_to tenants_path, :notice => 'Tenant was successfully updated.' }\n format.json { head :ok }\n end\n end",
"def update\n respond_to do |format|\n if @api_v1_account.update(api_v1_account_params)\n format.html { redirect_to @api_v1_account, notice: 'Account was successfully updated.' }\n format.json { render :show, status: :ok, location: @api_v1_account }\n else\n format.html { render :edit }\n format.json { render json: @api_v1_account.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update_current_logged_in_users_password(args = {}) \n put(\"/users.json/current/password\", args)\nend",
"def update\n @account = current_user.accounts.find(params[:id])\n\n if @account.update(account_params)\n head :no_content\n else\n render json: @account.errors, status: :unprocessable_entity\n end\n end",
"def update(info)\n\t\tresponse = @client.put(\n\t\t\t@session[\"resources\"][\"account\"][\"url\"],\n\t\t\t:body => info.to_json,\n\t\t\t:headers => {\n\t\t\t\t\"Accept\" => mediaType(\"account\"),\"Content-Type\" => mediaType(\"account\"),\n\t\t\t\t\"Authorization\" => \"Capability #{@session[\"resources\"][\"account\"][\"capability\"]}\"\t\n\t\t\t})\n\t\traise \"Error attempting to update account: (#{response.status}) #{response.body}\" if response.status != 200\n\t\t@session[\"resources\"][\"account\"] = JSON.parse(response.body)\n\t\tself\n\tend",
"def update_account(id, model) path = \"/api/v2/accounts/#{id}\"\n put(path, model, {}, AvaTax::VERSION) end",
"def update\n @account = current_user.person.blackberry_accounts.find(params[:id])\n\n respond_to do |format|\n if @account.update_attributes(params[:blackberry_account])\n format.html { redirect_to( accounts_path, :notice => 'Account was successfully updated.') }\n format.json { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @account.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update_platform_account(account_data, account_id)\n url_params = {\n format: JSON_RESPONSE_FORMAT\n }\n url = Utils.prepare_url('accounts/%s/' % account_id, @sandbox, url_params)\n data = Utils.prepare_json_data(account_data)\n response = @transport.make_http_request(url, 'POST', data)\n\n block_given? ? yield(response) : response\n end",
"def update\n @platform = Platform.find(params[:id])\n\n if @platform.update(params[:platform])\n head :no_content\n else\n render json: @platform.errors, status: :unprocessable_entity\n end\n end",
"def update\n @account = current_user.person.android_accounts.find(params[:id])\n\n respond_to do |format|\n if @account.update_attributes(params[:android_account])\n format.html { redirect_to( accounts_path, :notice => 'Account was successfully updated.') }\n format.json { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @account.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @core_account.update(core_account_params)\n format.html { redirect_to @core_account, notice: 'Account was successfully updated.' }\n format.json { render :show, status: :ok, location: @core_account }\n else\n format.html { render :edit }\n format.json { render json: @core_account.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n if @current_user.admin?\n @v1_admin_user = V1::Admin::User.find(params[:id])\n # TODO strip :company_id and maybe :encrypted_password out of params here\n if @v1_admin_user.update(user_params)\n render json: @v1_admin_user, status: :ok\n else\n render json: @v1_admin_user.errors, status: :unprocessable_entity\n end\n else\n render json: {error: 'forbidden'}, status: :forbidden\n end\n end",
"def update\n @account = current_user.person.iphone_accounts.find(params[:id])\n\n respond_to do |format|\n if @account.update_attributes(params[:iphone_account])\n format.html { redirect_to( accounts_path, :notice => 'Account was successfully updated.') }\n format.json { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @account.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n current_account.update(provider_params, without_protection: true)\n respond_with current_account\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Processes the given email, submits attachments to the Heathen server, delivers responses as configured
|
def process email, mail_to, is_rts=false
documents = []
unless email.has_attachments?
logger.info "From: #{email.from} Subject: (#{email.subject}) Files: no attachments"
return
end
logger.info "From: #{email.from} Subject: (#{email.subject}) Files: #{email.attachments.map(&:filename).join(',')}"
#
# Convert the attachments
#
email.attachments.each do |attachment|
begin
converter = AutoHeathen::Converter.new( { logger: logger } )
input_source = attachment.body.decoded
action = converter.get_action input_source.content_type
logger.info " convert #{attachment.filename} using action: #{action}"
converted_filename, data = converter.convert action, @cfg[:language], attachment.filename, input_source
documents << { orig_filename: attachment.filename, orig_content: input_source, filename: converted_filename, content: data, error: false }
rescue StandardError => e
documents << { orig_filename: attachment.filename, orig_content: input_source, filename: nil, content: nil, error: e.message }
end
end
#
# deliver the results
#
if is_rts
deliver_rts email, documents, mail_to
else
deliver_onward email, documents, mail_to
end
#
# Summarise the processing
#
logger.info "Results of conversion"
documents.each do |doc|
if doc[:content].nil?
logger.info " #{doc[:orig_filename]} was not converted (#{doc[:error]}) "
else
logger.info " #{doc[:orig_filename]} was converted successfully"
end
end
documents
end
|
[
"def process email\n documents = []\n\n unless email.has_attachments?\n logger.info \"From: #{email.from} Subject: (#{email.subject}) Files: no attachments\"\n return\n end\n\n logger.info \"From: #{email.from} Subject: (#{email.subject}) Files: #{email.attachments.map(&:filename).join(',')}\"\n\n #\n # Submit the attachments to heathen\n #\n email.attachments.each do |attachment|\n begin\n # icky - decode the whole body just to read the first few bytes\n # double-icky - use FileMagic's extension to String\n content_type = attachment.body.decoded.mime_type\n\n op = cfg[:operation] ? cfg[:operation] : get_operation(content_type)\n logger.debug \"Sending '#{op}' conversion request for #{attachment.filename} to Heathen, content-type: #{content_type}\"\n opts = {\n language: @cfg[:language],\n file: AttachmentIO.new(attachment),\n original_filename: attachment.filename,\n multipart: true,\n }\n heathen_client.convert(op,opts).get do |data|\n filename = attachment.filename\n filename = File.basename(filename,File.extname(filename)) + '.pdf'\n logger.debug \"Conversion received: #{filename}\"\n documents << { orig_filename: attachment.filename, filename: filename, content: data, error: false }\n end\n rescue StandardError => e\n documents << { orig_filename: attachment.filename, filename: nil, content: nil, error: e.message }\n end\n end\n\n #\n # Deliver the converted documents\n #\n case @cfg[:mode]\n when :directory\n deliver_directory email, documents\n when :email, :return_to_sender\n deliver_email email, documents\n end\n\n #\n # Summarise the processing\n #\n logger.info \"Results of conversion\"\n documents.each do |doc|\n if doc[:content].nil?\n logger.info \" #{doc[:orig_filename]} was not converted (#{doc[:error]}) \"\n else\n logger.info \" #{doc[:orig_filename]} was converted successfully\"\n end\n end\n\n nil\n end",
"def post_report(recipient_email)\n @logger.info(\"Getting work items for report.\")\n work_items = @db_wrapper.get_work_items\n @logger.debug(\"Prepared the following items for a report:\\n#{work_items}\")\n\n file_name = File.join(File.dirname(__FILE__), \"templates\", \"report.html.erb\")\n html_report = render_template(file_name, work_items)\n @logger.debug(\"HTML report: \\n#{html_report}\")\n\n text_report = \"\"\n work_items.each do |work_item|\n text_report += \"\\n#{work_item.to_json}\"\n end\n @logger.debug(\"Text report: \\n#{text_report}\")\n\n @logger.info(\"Successfully rendered work_items into HTML & text.\")\n\n csv_file = \"data.csv\"\n render_csv(work_items, csv_file)\n @logger.info(\"Successfully saved work items as CSV attachment: #{csv_file}\")\n\n @logger.info(\"Sending report of #{work_items.count} items to #{recipient_email}\")\n if work_items.count > 5\n mime_msg = format_mime_message(recipient_email, text_report, html_report, csv_file)\n @ses_client.send_raw_email({\n source: @email_sender,\n destinations: [recipient_email],\n raw_message: {\n data: mime_msg.to_s\n }\n })\n 204\n else\n @ses_client.send_email({\n source: @email_sender,\n destination: {\n to_addresses: [recipient_email]\n },\n message: {\n subject: {\n data: \"Work Items Report\"\n },\n body: {\n text: {\n data: text_report\n },\n html: {\n data: html_report\n }\n }\n }\n })\n 204\n end\n rescue StandardError => e\n @logger.error(\"Couldn't send email: #{e}\")\n 500\n end",
"def handle(email,cert=nil)\n if email.multipart?\n error=nil\n pgp=false\n possible_pgp=''\n h=nil\n email.parts.each do |e|\n if e.content_type == 'application/octet-stream'\n possible_pgp = e.body\n end\n if e.content_type == 'application/pgp-encrypted'\n pgp = true\n end\n h = handle(e,cert)\n if ! h[:error] # i.e. if we succeeded, return directly, otherwise, keep trying\n return h\n end\n end\n if pgp\n return handle_pgp(possible_pgp)\n else\n return h\n end\n else\n ct = MessageProcessor.detect_type(email.body,email.content_type,\"\")\n logger.info \"detected type %p\" % ct\n if ct=='application/edi-hl7'\n # we have HL7\n begin\n msg = HL7::Message.parse(email.body)\n begin\n mp = MessageProcessor.new(logger,user_from_cert(cert),msg,ct,nil)\n return {:hl7=>msg,:mp=>mp,:cert=>cert}\n rescue WedgieError\n return {:error=>$!.to_s,:hl7=>msg,:cert=>cert}\n end\n rescue WedgieError\n return {:error=>\"HL7 parse error: %s\" % $!.to_s,:cert=>cert}\n end\n elsif ct=='application/x-openpgp'\n logger.info 'Extracting from inline PGP'\n a = email.body =~ /-----BEGIN PGP MESSAGE-----/\n b = (email.body =~ /-----END PGP MESSAGE-----/)+25\n return handle_pgp(email.body[a..b])\n elsif ct=='application/x-pkcs7-mime'\n # it's encrypted X.509. Deep breath..\n result = ''\n err = ''\n m = nil\n cert = nil\n file = RAILS_ROOT+\"/certs/wedgie_decrypt.key\"\n cert = RAILS_ROOT+\"/certs/fac_encrypt.pem\"\n open(file).read\n cmd = \"openssl smime -decrypt -inkey %s -passin pass:Pass-123\" % file\n logger.debug \"CMD: %s\" % cmd\n popen3(cmd) do |stdin, stdout, stderr|\n begin\n stdin.write(email.port)\n stdin.close\n rescue Errno::EPIPE\n end\n result = stdout.read\n err = stderr.read\n end\n unless $?.success? and err.blank?\n logger.info \"decryption error: %s\" % err\n return {:error=>err}\n end\n logger.info \"OpenSSL decryption succeeded\"\n # result should be a signed block\n result2 = ''\n certsdir = RAILS_ROOT+\"/certs\"\n outcert = Tempdir.tmpdir+\"cert.pem\"\n cmd = \"openssl smime -verify -signer %s -CApath %s\" % [outcert,certsdir]\n logger.debug \"CMD: %s\" % cmd\n popen3(cmd) do |stdin, stdout, stderr|\n stdin.write(result)\n stdin.close\n result2 = stdout.read\n err = stderr.read\n end\n if result2.length < 20\n logger.debug(\"OpenSSL verification give clearly invalid data: %p\" % result2)\n return {:error=>err}\n end\n logger.info \"OpenSSL verification succeeded\"\n logger.info \"*****Verified doc*****\"\n logger.info result2\n # result now *should* contain a MIME packet...\n begin\n m = TMail::Mail.parse(result2)\n rescue TMail::SyntaxError\n # ...or not, use the actual text itself as the body of a fake MIME packet\n m = TMail::Mail.new\n m.body = result2\n m.content_type= 'application/octet-stream'\n end\n # examine the signer's certificate\n cmd = 'openssl x509 -in %s -text' % outcert\n logger.debug \"CMD: %s\" % cmd\n cert = IO.popen(cmd) {|x| x.read}\n cert ||= \"\"\n # recur with decrypted text\n h = handle(m,cert)\n h[:error] = err unless err =~ /^Verification successful/\n h[:error]=\"Could not load OpenSSL certificate\" unless cert.length > 0 \n return h\n elsif ct\n begin \n mp = MessageProcessor.new(logger,user_from_cert(cert),email.body,ct,nil)\n return {:mp=>mp,:cert=>cert}\n rescue WedgieError\n return {:error=>$!.to_s,:cert=>cert}\n end\n else\n return {:error=>\"message did not have a valid MIME type\",:cert=>cert}\n end\n end\n end",
"def process\n return 'OK' if @email.to.first[:token] == 'example'\n return process_image if @email.to.first[:token] == 'flyers'\n\n token = ReplyToken.find(@email.to.first[:token])\n\n case token.reply_type\n when 'participation_request'\n process_participation_request(token)\n when 'conversation'\n process_conversation(token)\n when 'comment'\n process_comment(token)\n when 'community'\n process_community_reply(token)\n end\n\n track_reply(token)\n\n token.use!\n end",
"def send_email(subject_mail_part, attach_file, body_mail_part, email_type, delivery_call, test_subject = nil)\n last_failed_response = nil\n response = nil\n content = attach_file.read unless attach_file.nil?\n @candidates.each do |candidate|\n sg_mail =\n create_mail((test_subject.nil? ? subject_mail_part : MailPart.new_subject(test_subject.call(candidate))),\n email_type, candidate.account_name)\n\n add_attachment_file(attach_file, sg_mail, content) unless attach_file.nil?\n\n create_personalization(candidate, sg_mail, test_subject ? @admin : nil)\n\n expanded_text = expand_text(candidate, subject_mail_part, body_mail_part, delivery_call)\n\n sg_mail.add_content(SendGrid::Content.new(type: 'text/html', value: expanded_text))\n\n response = post_email(sg_mail)\n next if response.status_code[0].to_s == '2'\n\n last_failed_response = response\n account_name = candidate.account_name\n status_code = response.status_code\n log_msg = \"Bad response for #{email_type} message for #{account_name} because of a bad response: #{status_code}\"\n Rails.logger.info(log_msg)\n Rails.logger.info(\"Status=#{response.status_code} body=#{response.body}\")\n end\n last_failed_response || response\n end",
"def process_email\n\n #LOG.debug(\"#{@msg_text}\")\n\n message = RMail::Parser.read(@msg_text)\n\n header = message.header\n\n @from = RMail::Address.parse(header['from'])\n\n @subject = header['subject'].to_s\n\n @x_count = header['x-count'].to_s # used with Send_mail.rb script\n\n @subject = '(no subject)' if @subject.size == 0\n\n @message_id = (header['Message-ID'] != nil) ? header['Message-ID'] : Guid.new.to_s\n\n @recipients.concat(RMail::Address.parse(header['to']) + RMail::Address.parse(header['cc'])) #RMail::Address.parse(header.match(/^(to|cc)/, //))) Should work, but doesn't.\n\n #give email as input to the Postfix sendmail command\n if $options.sendmail\n sendmail_cmd = \"/usr/sbin/sendmail.postfix -G -i #{@from.addresses[0]} \"\n\n @recipients.each { |recipient|\n sendmail_cmd.concat(recipient.address)\n sendmail_cmd.concat(\" \")\n }\n\n sendmail_cmd.chomp(\" \")\n\n LOG.debug(\"sending msg #{@message_id} to sendmail...\")\n IO.popen(\"#{sendmail_cmd}\", \"w\") { |sendmail| sendmail << \"#{@msg_text}\" }\n\n end\n\n if (message.multipart?)\n message.each_part { |part|\n\n header = part.header\n\n doc = (header['Content-Transfer-Encoding'] == 'quoted-printable') ? part.body.unpack('M')[0] : part.body\n\n LOG.debug('====================')\n\n if ((header['Content-Type'].downcase.include? 'text/plain') && (!header.has_key?('Content-Disposition')))\n\n LOG.debug('handling plain text part...')\n\n get_links_with_uri(doc)\n\n elsif ((header['Content-Type'].downcase.include? 'text/html') && (!header.has_key?('Content-Disposition')))\n LOG.debug('handling html part...')\n\n get_links(doc)\n\n elsif ((header.has_key?('Content-Disposition')) && (header['Content-Disposition'].downcase.include? 'attachment') && (!$options.ignore_attachments))\n\n if (header['Content-Transfer-Encoding'].downcase.include? 'base64')\n\n LOG.debug('handling base64 attachment...')\n\n # create unique directory to hold the file for processing, and allow for easy cleanup\n folder_name = $options.tmp_folder_for_attachments + \"/\" + Guid.new.to_s\n FileUtils.mkdir_p(folder_name)\n\n file_name = File.join(folder_name, header['Content-Type'].chomp.split(/;\\s*/)[1].split(/\\s*=\\s*/)[1].gsub(/\\\"/, \"\"))\n\n file = File.new(file_name, 'w')\n file.syswrite(base_64_decode = doc.unpack('m')[0]) # base64 decode and write out\n file.close\n\n begin\n #Timeout::timeout($options.timeout) {\n\n checksum = Digest::MD5.hexdigest(base_64_decode)\n urls_to_db(checksum, process_file(file_name)) if urls_from_db(checksum).empty?\n #}\n #rescue Timeout::Error\n #\n # LOG.info(' => Processing of attachments has timed out.')\n rescue Exception => e\n\n record_error(e)\n ensure\n FileUtils.rm_rf(folder_name) #unless folder_name.nil?\n end\n else\n LOG.warn(\" => Unhandled content-transfer-encoding #{header['Content-Transfer-Encoding']}\")\n end\n\n elsif (header['Content-Type'].downcase.include? 'message/rfc822')\n\n LOG.debug('handling forwarded email...')\n\n process_email(doc)\n\n else # handle unknown content-type\n\n log(:warn,\"Unhandled content-type #{header['Content-Type']}\")\n\n end if ((doc.class != NilClass) && (doc.strip != ''))\n }\n else\n get_links_with_uri(message.body)\n end\n rescue Exception => e\n record_error(e)\n end",
"def convert_email filename, in_dir, out_dir, language\n email = Mail.read( File.extname(filename)=='.msg' ? convert_msg(filename, in_dir, out_dir) : filename )\n f_base = filename.gsub /#{File.extname(filename)}$/, ''\n logger.debug \" email file, has #{email.attachments.size} attachments\"\n new_attachments = []\n converter = AutoHeathen::Converter.new( { logger: logger } )\n email.attachments.each do |attachment|\n content = attachment.decoded\n action = converter.get_action content.content_type\n logger.debug \" convert: #{File.basename(filename)}, content_type: #{content.content_type}, action: #{action}\"\n start_time = Time.now\n outfile, data = converter.convert action, language, attachment.filename, content\n logger.debug \" conversion took %0.2f s\"%[Time.now-start_time]\n new_attachments << { filename: File.basename(outfile), data: data }\n end\n email.charset = 'UTF-8' unless email.charset # stop annoying warning message if charset not defined in email\n # Mail#without_attachments! does not work correctly\n #email.without_attachments!\n email.parts.delete_if { |p| p.attachment? }\n new_attachments.each do |na|\n email.attachments[na[:filename]] = na[:data]\n end\n e_filename = to_outfile in_dir, filename, out_dir, \"#{f_base}.eml\"\n logger.debug \" writing #{e_filename} with #{email.attachments.size} converted attachments\"\n File.open( e_filename, 'w' ) do |f|\n f.write email.to_s\n end\nend",
"def process_attachments(attachments)\n attachments.each do |attachment|\n tempfile = Down::NetHttp.download(attachment[:url_private], headers: { 'Authorization' => \"Bearer #{integration_hook.access_token}\" })\n\n attachment_params = {\n file_type: file_type(attachment),\n account_id: @message.account_id,\n external_url: attachment[:url_private],\n file: {\n io: tempfile,\n filename: tempfile.original_filename,\n content_type: tempfile.content_type\n }\n }\n\n attachment_obj = @message.attachments.new(attachment_params)\n attachment_obj.file.content_type = attachment[:mimetype]\n attachment_obj.save!\n end\n end",
"def process(message)\n # Process a plain text and MIME encoded e-mail message.\n processor.process message\n end",
"def process_alerts\n @email_alert = get_email_alert\n return unless email_alert\n\n initialize_mail_log\n\n @payload_collection = AlertMailPayloadCollection.new\n email_alert.mail_to.each do |alert_body|\n next unless alert_mail_recipient_types.include? alert_body.recipient\n alert_payload = prepare_alert_payload(alert_body)\n @payload_collection << alert_payload if alert_payload\n end\n end",
"def receive(email)\n post = Post.new\n\n # Will fail if no matches. Rely on validation\n list_post_header = email.header_string(\"List-Post\")\n matches = list_post_header.match(/<mailto:(\\S+)@/) if list_post_header\n if matches\n mailing_list_name = matches[1]\n else\n mailing_list_name = email.to.first.to_s\n end\n post.mailing_list = MailingList.find_by_name(mailing_list_name)\n\n post.subject = email.subject\n \n if email.multipart?\n plain_text_part = nil\n\n # Outlook\n related_part = email.parts.find { |part| \n part.content_type == \"multipart/related\"\n }\n if related_part\n alt_part = related_part.parts.find { |part| \n part.content_type == \"multipart/alternative\"\n }\n else\n alt_part = email.parts.find { |part| \n part.content_type == \"multipart/alternative\"\n }\n end\n \n # OS X rich text email\n if alt_part \n plain_text_part = alt_part.parts.find { |part| \n part.content_type == \"text/plain\"\n }\n end\n\n plain_text_part = email.parts.find { |part| \n part.content_type == \"text/plain\"\n } unless plain_text_part\n \n plain_text_part = email.parts.find { |part| \n part.content_type == \"text/html\"\n } unless plain_text_part\n \n post.body = plain_text_part.body\n end\n \n if post.body.blank?\n post.body = email.body\n end\n \n post.from_name = email.friendly_from\n post.from_email_address = email.from\n post.date = email.date\n begin\n post.save!\n rescue => save_error\n RACING_ON_RAILS_DEFAULT_LOGGER.error(\"Could not save post: #{save_error}\")\n if post and !post.errors.empty?\n RACING_ON_RAILS_DEFAULT_LOGGER.error(post.errors.full_messages)\n end\n raise\n end\n end",
"def process\n\n metrics = raw_summary['resources'] || {} rescue {}\n\n if metrics['out_of_sync'] == 0 && metrics['changed'] == 0\n Puppet.notice \"Not sending ehpas email report; no changes\"\n return\n end\n\n taglists = parse()\n\n # Now find any appropriately tagged messages.\n reports = match(taglists)\n\n send(reports) unless reports.empty?\n end",
"def process_attachments!\n refs = attachment_keys.map { |key| Array(open_struct_form.send(key)) }.flatten\n files = PersistentAttachment.where(guid: refs.map(&:confirmationCode))\n files.find_each { |f| f.update(saved_claim_id: id) }\n\n CentralMail::SubmitSavedClaimJob.perform_async(id)\n end",
"def process_mail(mail)\n scope = new(\"PATH_INFO\"=>'', 'SCRIPT_NAME'=>'', \"REQUEST_METHOD\"=>\"PROCESSMAIL\", 'rack.input'=>StringIO.new, 'roda.mail'=>mail)\n\n begin\n begin\n scope.process_mail\n rescue UnhandledMail\n scope.unhandled_mail_hook\n else\n scope.handled_mail_hook\n end\n ensure\n scope.after_mail_hook\n end\n end",
"def process\n mail = Mail.new\n mail.from = queue_item.from_email_address\n mail.to = queue_item.to_email_address\n mail.subject = queue_item.subject\n mail.body = queue_item.body\n begin\n mail.deliver!\n rescue => e\n @error_details = e.message\n end\n end",
"def handle\n attachments.each do |attachment|\n handle_attachment attachment\n end\n end",
"def receive(email)\n post = Post.new\n\n # Sometimes we get poorly-encoded data and New Relic chokes\n NewRelic::Agent.disable_all_tracing do\n # Will fail if no matches. Rely on validation\n list_post_header = email[\"List-Post\"]\n matches = list_post_header.to_s.match(/<mailto:(\\S+)@/) if list_post_header\n mailing_list_name = if matches\n matches[1]\n else\n email.to.first.to_s\n end\n\n mailing_list = MailingList.find_by(name: mailing_list_name.try(:strip))\n\n unless mailing_list\n email_to = begin\n email.to.first.to_s\n rescue StandardError\n nil\n end\n email_from = begin\n email[:from]\n rescue StandardError\n nil\n end\n mail_subject = begin\n mail.subject\n rescue StandardError\n nil\n end\n Rails.logger.warn \"No mailing list for '#{mailing_list_name}' header '#{list_post_header}' to '#{email_to}' from '#{email_from}' about '#{mail_subject}'\"\n return true\n end\n\n post.mailing_list = mailing_list\n\n post.subject = email.subject\n\n multipart_related = email.parts.detect { |part| part.mime_type == \"multipart/related\" }\n multipart_alternative = email.parts.detect { |part| part.mime_type == \"multipart/alternative\" }\n post.body = if multipart_related\n # Outlook\n multipart_related.text_part.try(:decoded)&.gsub(\"\\r\", \"\")\n elsif multipart_alternative\n # OS X\n multipart_alternative.text_part.try(:decoded)\n else\n (email.text_part || email.html_part || email.body).try(:decoded)\n end\n\n post.body = if post.body\n post.body.encode(\"UTF-8\", undef: :replace)\n else\n \"\"\n end\n\n post.from_name = from_name(email)\n post.from_email = (email[:reply_to] || email[:from]).addresses.first\n post.from_name = post.from_email_obscured if post.from_name.blank?\n\n post.date = email.date\n\n Rails.logger.error \"Could not save post: #{post.errors.full_messages.join('. ')}\" unless Post.save(post, mailing_list)\n\n ActiveSupport::Notifications.instrument \"receive.mailing_list_mailer.racing_on_rails\",\n mailing_list_id: mailing_list.id,\n mailing_list_name: mailing_list.name,\n subject: post.subject,\n from_email: post.from_email,\n from_name: post.from_name\n\n post\n rescue StandardError => e\n Rails.logger.error \"Could not save post: #{e}\"\n begin\n Rails.logger.error email\n rescue StandardError\n Rails.logger.error \"Could not save email contents\"\n end\n Rails.logger.error post.errors.full_messages if post&.errors.present?\n RacingOnRails::Application.exception_notifier.track_exception e\n raise\n end\n post\n end",
"def email_with_attachment(recipient_a, subject_a, user_a, org_fname, file_attach)\n @recipients = recipient_a\n @subject = subject_a\n @from = \"info@nrdpfc.ca\"\n @reply_to = \"info@nrdpfc.ca\"\n @body = subject_a \n part :content_type => \"multipart/mixed\" do |p|\n p.attachment :content_type => \"csv/text\", \n# :body => File.open(\"public/whale_batch.csv\", \"rb\") { |f| f.read },\n :body => file_attach,\n :filename => org_fname,\n :transfer_encoding => \"base64\",\n :charset => \"utf-8\"\n end \n end",
"def data_processing(message_data)\n EmailMailer.new.email(message_data)\n\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Forward the email to sender, with decoded documents replacing the originals
|
def deliver_onward email, documents, mail_to
logger.info "Sending response mail to #{mail_to}"
email.cc [] # No CCing, just send to the recipient
email.to mail_to
email.subject "#{'Fwd: ' unless email.subject.to_s.start_with? 'Fwd:'}#{email.subject}"
email.return_path email.from unless email.return_path
# something weird goes on with Sharepoint, where the doc is dropped on the floor
# so, remove any offending headers
email.message_id = nil # make sure of message_id too
good_headers = ONWARD_HEADERS.map{ |h| h.downcase }
inspect_headers = email.header.map(&:name)
inspect_headers .each do |name|
unless good_headers.include? name.downcase
email.header[name] = nil
end
end
email.received = nil # make sure of received
# replace attachments with converted files
email.parts.delete_if { |p| p.attachment? }
documents.each do |doc|
if doc[:content]
email.add_file filename: doc[:filename], content: doc[:content]
else # preserve non-converted attachments when forwarding
email.add_file filename: doc[:orig_filename], content: doc[:orig_content]
end
end
email.delivery_method :smtp, address: @cfg[:mail_host], port: @cfg[:mail_port]
deliver email
end
|
[
"def forward_email(params)\n get_request('configureEmailForward?'+get_url_parameters(params)).body\n end",
"def forward\n message = self.message.class.new(:subject => subject, :body => body)\n message.sender = receiver\n message\n end",
"def forward\n message = self.class.new(:subject => subject, :body => body)\n message.sender = sender\n message\n end",
"def forward_message(params={})\n response.from = request.from\n response.reply_to = settings.service.default_sender \n response.subject = request.subject\n\n params.each do |k,v|\n response.send(\"#{k}=\", v)\n end\n\n if request.multipart?\n response.text_part = request.text_part\n response.html_part = request.html_part\n else\n response.body = request.body.to_s\n end\n end",
"def process email\n documents = []\n\n unless email.has_attachments?\n logger.info \"From: #{email.from} Subject: (#{email.subject}) Files: no attachments\"\n return\n end\n\n logger.info \"From: #{email.from} Subject: (#{email.subject}) Files: #{email.attachments.map(&:filename).join(',')}\"\n\n #\n # Submit the attachments to heathen\n #\n email.attachments.each do |attachment|\n begin\n # icky - decode the whole body just to read the first few bytes\n # double-icky - use FileMagic's extension to String\n content_type = attachment.body.decoded.mime_type\n\n op = cfg[:operation] ? cfg[:operation] : get_operation(content_type)\n logger.debug \"Sending '#{op}' conversion request for #{attachment.filename} to Heathen, content-type: #{content_type}\"\n opts = {\n language: @cfg[:language],\n file: AttachmentIO.new(attachment),\n original_filename: attachment.filename,\n multipart: true,\n }\n heathen_client.convert(op,opts).get do |data|\n filename = attachment.filename\n filename = File.basename(filename,File.extname(filename)) + '.pdf'\n logger.debug \"Conversion received: #{filename}\"\n documents << { orig_filename: attachment.filename, filename: filename, content: data, error: false }\n end\n rescue StandardError => e\n documents << { orig_filename: attachment.filename, filename: nil, content: nil, error: e.message }\n end\n end\n\n #\n # Deliver the converted documents\n #\n case @cfg[:mode]\n when :directory\n deliver_directory email, documents\n when :email, :return_to_sender\n deliver_email email, documents\n end\n\n #\n # Summarise the processing\n #\n logger.info \"Results of conversion\"\n documents.each do |doc|\n if doc[:content].nil?\n logger.info \" #{doc[:orig_filename]} was not converted (#{doc[:error]}) \"\n else\n logger.info \" #{doc[:orig_filename]} was converted successfully\"\n end\n end\n\n nil\n end",
"def deliver params=[]\n self.from ||= params.assoc('email').last if params.assoc('email')\n self.from ||= to\n self.body = self.body.to_s + data(params)\n super()\n end",
"def convert_email filename, in_dir, out_dir, language\n email = Mail.read( File.extname(filename)=='.msg' ? convert_msg(filename, in_dir, out_dir) : filename )\n f_base = filename.gsub /#{File.extname(filename)}$/, ''\n logger.debug \" email file, has #{email.attachments.size} attachments\"\n new_attachments = []\n converter = AutoHeathen::Converter.new( { logger: logger } )\n email.attachments.each do |attachment|\n content = attachment.decoded\n action = converter.get_action content.content_type\n logger.debug \" convert: #{File.basename(filename)}, content_type: #{content.content_type}, action: #{action}\"\n start_time = Time.now\n outfile, data = converter.convert action, language, attachment.filename, content\n logger.debug \" conversion took %0.2f s\"%[Time.now-start_time]\n new_attachments << { filename: File.basename(outfile), data: data }\n end\n email.charset = 'UTF-8' unless email.charset # stop annoying warning message if charset not defined in email\n # Mail#without_attachments! does not work correctly\n #email.without_attachments!\n email.parts.delete_if { |p| p.attachment? }\n new_attachments.each do |na|\n email.attachments[na[:filename]] = na[:data]\n end\n e_filename = to_outfile in_dir, filename, out_dir, \"#{f_base}.eml\"\n logger.debug \" writing #{e_filename} with #{email.attachments.size} converted attachments\"\n File.open( e_filename, 'w' ) do |f|\n f.write email.to_s\n end\nend",
"def parse_fwd_mail(body)\n results = []\n \n from = extract_address(body, 'From:', \"Failed to parse from address in forwarded mail body\", true)\n to = extract_address(body, 'To:', \"Failed to parse to address in forwarded mail body\", true)\n cc = extract_address(body, 'CC:', \"Failed to parse CC address in forwarded mail body\", false)\n results << from \n results << to \n results << cc \n \n subject = extract_text(body, 'Subject:', \"Failed to parse subject in forwarded mail body\")\n date = extract_text(body, 'Date:', \"Failed to parse date in forwarded mail body\")\n results << subject\n results << date\n \n results\n end",
"def forward_as_attachment_to\n return @forward_as_attachment_to\n end",
"def process email, mail_to, is_rts=false\n documents = []\n\n unless email.has_attachments?\n logger.info \"From: #{email.from} Subject: (#{email.subject}) Files: no attachments\"\n return\n end\n\n logger.info \"From: #{email.from} Subject: (#{email.subject}) Files: #{email.attachments.map(&:filename).join(',')}\"\n\n #\n # Convert the attachments\n #\n email.attachments.each do |attachment|\n begin\n converter = AutoHeathen::Converter.new( { logger: logger } )\n input_source = attachment.body.decoded\n action = converter.get_action input_source.content_type\n logger.info \" convert #{attachment.filename} using action: #{action}\"\n converted_filename, data = converter.convert action, @cfg[:language], attachment.filename, input_source\n documents << { orig_filename: attachment.filename, orig_content: input_source, filename: converted_filename, content: data, error: false }\n rescue StandardError => e\n documents << { orig_filename: attachment.filename, orig_content: input_source, filename: nil, content: nil, error: e.message }\n end\n end\n\n #\n # deliver the results\n #\n if is_rts\n deliver_rts email, documents, mail_to\n else\n deliver_onward email, documents, mail_to\n end\n\n #\n # Summarise the processing\n #\n logger.info \"Results of conversion\"\n documents.each do |doc|\n if doc[:content].nil?\n logger.info \" #{doc[:orig_filename]} was not converted (#{doc[:error]}) \"\n else\n logger.info \" #{doc[:orig_filename]} was converted successfully\"\n end\n end\n\n documents\n end",
"def forward\n IncomeEmailJob.perform_later(from, to, subject, body, attachments) if mail\n end",
"def process\n # all of your application-specific code here - creating models,\n # processing reports, etc\n\n # Grab the 'to' emails @email.to -> array of hashes. array hash :email\n # grab the 'cc' emails @email.cc array hash :email\n \n\n # from - hash containing sender address information. .\n\n\n #raw_text v. #body?\n\n\n end",
"def fix_encoding(raw_eml)\n m = Mail.new(raw_eml)\n\n m.body = fix_body_encoding(m.body, m.charset)\n\n m.parts.each do |part|\n part.body = fix_body_encoding(part.body, part.charset)\n end\n\n m.encoded\n end",
"def receive(email)\n post = Post.new\n\n # Will fail if no matches. Rely on validation\n list_post_header = email.header_string(\"List-Post\")\n matches = list_post_header.match(/<mailto:(\\S+)@/) if list_post_header\n if matches\n mailing_list_name = matches[1]\n else\n mailing_list_name = email.to.first.to_s\n end\n post.mailing_list = MailingList.find_by_name(mailing_list_name)\n\n post.subject = email.subject\n \n if email.multipart?\n plain_text_part = nil\n\n # Outlook\n related_part = email.parts.find { |part| \n part.content_type == \"multipart/related\"\n }\n if related_part\n alt_part = related_part.parts.find { |part| \n part.content_type == \"multipart/alternative\"\n }\n else\n alt_part = email.parts.find { |part| \n part.content_type == \"multipart/alternative\"\n }\n end\n \n # OS X rich text email\n if alt_part \n plain_text_part = alt_part.parts.find { |part| \n part.content_type == \"text/plain\"\n }\n end\n\n plain_text_part = email.parts.find { |part| \n part.content_type == \"text/plain\"\n } unless plain_text_part\n \n plain_text_part = email.parts.find { |part| \n part.content_type == \"text/html\"\n } unless plain_text_part\n \n post.body = plain_text_part.body\n end\n \n if post.body.blank?\n post.body = email.body\n end\n \n post.from_name = email.friendly_from\n post.from_email_address = email.from\n post.date = email.date\n begin\n post.save!\n rescue => save_error\n RACING_ON_RAILS_DEFAULT_LOGGER.error(\"Could not save post: #{save_error}\")\n if post and !post.errors.empty?\n RACING_ON_RAILS_DEFAULT_LOGGER.error(post.errors.full_messages)\n end\n raise\n end\n end",
"def process_email\n\n #LOG.debug(\"#{@msg_text}\")\n\n message = RMail::Parser.read(@msg_text)\n\n header = message.header\n\n @from = RMail::Address.parse(header['from'])\n\n @subject = header['subject'].to_s\n\n @x_count = header['x-count'].to_s # used with Send_mail.rb script\n\n @subject = '(no subject)' if @subject.size == 0\n\n @message_id = (header['Message-ID'] != nil) ? header['Message-ID'] : Guid.new.to_s\n\n @recipients.concat(RMail::Address.parse(header['to']) + RMail::Address.parse(header['cc'])) #RMail::Address.parse(header.match(/^(to|cc)/, //))) Should work, but doesn't.\n\n #give email as input to the Postfix sendmail command\n if $options.sendmail\n sendmail_cmd = \"/usr/sbin/sendmail.postfix -G -i #{@from.addresses[0]} \"\n\n @recipients.each { |recipient|\n sendmail_cmd.concat(recipient.address)\n sendmail_cmd.concat(\" \")\n }\n\n sendmail_cmd.chomp(\" \")\n\n LOG.debug(\"sending msg #{@message_id} to sendmail...\")\n IO.popen(\"#{sendmail_cmd}\", \"w\") { |sendmail| sendmail << \"#{@msg_text}\" }\n\n end\n\n if (message.multipart?)\n message.each_part { |part|\n\n header = part.header\n\n doc = (header['Content-Transfer-Encoding'] == 'quoted-printable') ? part.body.unpack('M')[0] : part.body\n\n LOG.debug('====================')\n\n if ((header['Content-Type'].downcase.include? 'text/plain') && (!header.has_key?('Content-Disposition')))\n\n LOG.debug('handling plain text part...')\n\n get_links_with_uri(doc)\n\n elsif ((header['Content-Type'].downcase.include? 'text/html') && (!header.has_key?('Content-Disposition')))\n LOG.debug('handling html part...')\n\n get_links(doc)\n\n elsif ((header.has_key?('Content-Disposition')) && (header['Content-Disposition'].downcase.include? 'attachment') && (!$options.ignore_attachments))\n\n if (header['Content-Transfer-Encoding'].downcase.include? 'base64')\n\n LOG.debug('handling base64 attachment...')\n\n # create unique directory to hold the file for processing, and allow for easy cleanup\n folder_name = $options.tmp_folder_for_attachments + \"/\" + Guid.new.to_s\n FileUtils.mkdir_p(folder_name)\n\n file_name = File.join(folder_name, header['Content-Type'].chomp.split(/;\\s*/)[1].split(/\\s*=\\s*/)[1].gsub(/\\\"/, \"\"))\n\n file = File.new(file_name, 'w')\n file.syswrite(base_64_decode = doc.unpack('m')[0]) # base64 decode and write out\n file.close\n\n begin\n #Timeout::timeout($options.timeout) {\n\n checksum = Digest::MD5.hexdigest(base_64_decode)\n urls_to_db(checksum, process_file(file_name)) if urls_from_db(checksum).empty?\n #}\n #rescue Timeout::Error\n #\n # LOG.info(' => Processing of attachments has timed out.')\n rescue Exception => e\n\n record_error(e)\n ensure\n FileUtils.rm_rf(folder_name) #unless folder_name.nil?\n end\n else\n LOG.warn(\" => Unhandled content-transfer-encoding #{header['Content-Transfer-Encoding']}\")\n end\n\n elsif (header['Content-Type'].downcase.include? 'message/rfc822')\n\n LOG.debug('handling forwarded email...')\n\n process_email(doc)\n\n else # handle unknown content-type\n\n log(:warn,\"Unhandled content-type #{header['Content-Type']}\")\n\n end if ((doc.class != NilClass) && (doc.strip != ''))\n }\n else\n get_links_with_uri(message.body)\n end\n rescue Exception => e\n record_error(e)\n end",
"def receive(email)\n post = Post.new\n\n # Sometimes we get poorly-encoded data and New Relic chokes\n NewRelic::Agent.disable_all_tracing do\n # Will fail if no matches. Rely on validation\n list_post_header = email[\"List-Post\"]\n matches = list_post_header.to_s.match(/<mailto:(\\S+)@/) if list_post_header\n mailing_list_name = if matches\n matches[1]\n else\n email.to.first.to_s\n end\n\n mailing_list = MailingList.find_by(name: mailing_list_name.try(:strip))\n\n unless mailing_list\n email_to = begin\n email.to.first.to_s\n rescue StandardError\n nil\n end\n email_from = begin\n email[:from]\n rescue StandardError\n nil\n end\n mail_subject = begin\n mail.subject\n rescue StandardError\n nil\n end\n Rails.logger.warn \"No mailing list for '#{mailing_list_name}' header '#{list_post_header}' to '#{email_to}' from '#{email_from}' about '#{mail_subject}'\"\n return true\n end\n\n post.mailing_list = mailing_list\n\n post.subject = email.subject\n\n multipart_related = email.parts.detect { |part| part.mime_type == \"multipart/related\" }\n multipart_alternative = email.parts.detect { |part| part.mime_type == \"multipart/alternative\" }\n post.body = if multipart_related\n # Outlook\n multipart_related.text_part.try(:decoded)&.gsub(\"\\r\", \"\")\n elsif multipart_alternative\n # OS X\n multipart_alternative.text_part.try(:decoded)\n else\n (email.text_part || email.html_part || email.body).try(:decoded)\n end\n\n post.body = if post.body\n post.body.encode(\"UTF-8\", undef: :replace)\n else\n \"\"\n end\n\n post.from_name = from_name(email)\n post.from_email = (email[:reply_to] || email[:from]).addresses.first\n post.from_name = post.from_email_obscured if post.from_name.blank?\n\n post.date = email.date\n\n Rails.logger.error \"Could not save post: #{post.errors.full_messages.join('. ')}\" unless Post.save(post, mailing_list)\n\n ActiveSupport::Notifications.instrument \"receive.mailing_list_mailer.racing_on_rails\",\n mailing_list_id: mailing_list.id,\n mailing_list_name: mailing_list.name,\n subject: post.subject,\n from_email: post.from_email,\n from_name: post.from_name\n\n post\n rescue StandardError => e\n Rails.logger.error \"Could not save post: #{e}\"\n begin\n Rails.logger.error email\n rescue StandardError\n Rails.logger.error \"Could not save email contents\"\n end\n Rails.logger.error post.errors.full_messages if post&.errors.present?\n RacingOnRails::Application.exception_notifier.track_exception e\n raise\n end\n post\n end",
"def apply!(mail)\n mail.subject = redact(mail.subject)\n\n if mail.multipart?\n mail.parts.each do |part|\n part.body = redact(part.body)\n end\n else\n mail.body = redact(mail.body)\n end\n end",
"def create_forward\n setup_forward create_empty_mail()\n end",
"def forward_to_user\n jid = node.xpath('/message/ns:jid', 'ns' => NS).first\n return unless jid\n agent = node.from\n node.from = node.to\n node.to = jid.content\n jid.content = agent\n stream.write(node)\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Opens and reads a file, first given the filename, then tries from the project base directory
|
def read_file filename
f = filename
unless File.exist? f
f = Pathname.new(__FILE__).realpath.parent.parent.parent + f
end
File.read f
end
|
[
"def readfile(filename)\n\tfilename = File.expand_path(filename, File.dirname(__FILE__))\n\tFile.read(filename)\nrescue\n\tnil\nend",
"def find filename\n return filename if File.exists? filename\n filename = \"./haml/\"+filename\n return filename if File.exists? filename\n filename = @src_folder+\"/\"+filename\n return filename if File.exists? filename\n throw \"Could not find file: #{filename}\"\nend",
"def read_file(filename)\n if !File.file?(filename); return nil end\n File.read(filename)\nend",
"def open_file file\n file = project_file(file)\n unless File.exists?(file)\n if File.exists? File.dirname file\n log \"There is no file, but the directory exists. Boldly making \" + File.basename(file)\n File.open(file, File::CREAT|File::TRUNC|File::WRONLY) do |f|\n f.puts \"# created by the associated file plugin :-)\"\n end\n end\n end\n \n return(Project::Manager.open_file(file)) \n log \"No such file: #{file}\"\n end",
"def load_from_file filename\n fnm = File.exist?(filename) ? filename : File.join(@wd,filename)\n load_configuration(fnm)\n end",
"def read_in_sandbox(filename)\n file = Dir[File.join(sandbox, filename)].first\n file ? File.read(file) : \"\"\nend",
"def get_local_file(fname)\n if File.exist?(@dir+'/'+fname) then\n fname = @dir+'/'+fname\n end\n return File.open(fname)\n end",
"def project_file(fname)\n \"#{@project_path}/#{fname}\"\nend",
"def find_file\n Dir['.james', File.expand_path('~/.james')].first\n end",
"def open_in_file(filepath)\n File.open(filepath, 'rb')\n end",
"def locate(name=nil)\n name ||= filename\n raise LoadError unless name\n Dir.ascend(Dir.pwd) do |dir|\n match = File.join(dir, name)\n files = Dir.glob(match, File::FNM_CASEFOLD)\n if file = files[0]\n return file\n end\n end\n return nil\n end",
"def fs_read(file)\n File.read(File.join(self.git_dir, file))\n end",
"def open_read(p)\n f = open_read_files[p]\n unless f\n f = File.open(File.join(basedir, p))\n open_read_files[p] = f\n end\n f\n end",
"def base_file\n file = File.join(Dir.getwd, 'api', 'base.rb')\n FileFoo.read_file(file)\n end",
"def load_file(file)\n puts \"# loading #{file} // #{$0}\" if $DEBUG\n\n unless file == $0 then\n begin\n require file\n rescue LoadError => err\n puts \"Could not load #{file}: #{err}\"\n end\n else\n puts \"# Skipping loading myself (#{file})\" if $DEBUG\n end\n end",
"def load_file_at_toplevel(file); end",
"def load(file, default = nil) \n load!(file)\n rescue Errno::ENOENT\n default\n end",
"def read_raw_file(filename)\n File.read(\"spec/fixtures/files/#{filename}\")\nend",
"def file_open(direct_name, file_name)\n\t\tif file_name == \"u.data\"\n\t\t\ttrain_name = \"./\" + direct_name + \"/\" + file_name\n\t\t\t@test_file = nil\n\t\telse\n\t\t\t#when file_name is not empty\n\t\t\ttrain_name = \"./\" + direct_name + \"/\" + file_name.to_s + \".base\"\n\t\t\ttest_name = \"./\" + direct_name + \"/\" + file_name.to_s + \".test\"\n\t\t\t@test_file = File.open(test_name)\n\t\tend\n\t\t@train_file = File.open(train_name)\n\tend"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
POST /germinations POST /germinations.json
|
def create
@germination = Germination.new(germination_params)
respond_to do |format|
if @germination.save
format.html { redirect_to @germination, notice: 'Germination was successfully created.' }
format.json { render :show, status: :created, location: @germination }
else
format.html { render :new }
format.json { render json: @germination.errors, status: :unprocessable_entity }
end
end
end
|
[
"def update\n respond_to do |format|\n if @germination.update(germination_params)\n format.html { redirect_to @germination, notice: 'Germination was successfully updated.' }\n format.json { render :show, status: :ok, location: @germination }\n else\n format.html { render :edit }\n format.json { render json: @germination.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @gunzerker = Gunzerker.new(gunzerker_params)\n\n respond_to do |format|\n if @gunzerker.save\n format.html { redirect_to @gunzerker, notice: 'Gunzerker was successfully created.' }\n format.json { render :show, status: :created, location: @gunzerker }\n else\n format.html { render :new }\n format.json { render json: @gunzerker.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @aggrupation = Aggrupation.new(aggrupation_params)\n\n respond_to do |format|\n if @aggrupation.save\n format.html { redirect_to @aggrupation, notice: 'Aggrupation was successfully created.' }\n format.json { render :show, status: :created, location: @aggrupation }\n else\n format.html { render :new }\n format.json { render json: @aggrupation.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @gerenciador = Gerenciador.new(params[:gerenciador])\n\n respond_to do |format|\n if @gerenciador.save\n format.html { redirect_to @gerenciador, notice: 'Gerenciador was successfully created.' }\n format.json { render json: @gerenciador, status: :created, location: @gerenciador }\n else\n format.html { render action: \"new\" }\n format.json { render json: @gerenciador.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @galletum = Galletum.new(galletum_params)\n\n respond_to do |format|\n if @galletum.save\n format.html { redirect_to @galletum, notice: 'Galletum was successfully created.' }\n format.json { render :show, status: :created, location: @galletum }\n else\n format.html { render :new }\n format.json { render json: @galletum.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @geup = Geup.new(geup_params)\n\n respond_to do |format|\n if @geup.save\n format.html { redirect_to @geup, notice: 'Geup was successfully created.' }\n format.json { render :show, status: :created, location: @geup }\n else\n format.html { render :new }\n format.json { render json: @geup.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @gelati = Gelati.new(gelati_params)\n\n respond_to do |format|\n if @gelati.save\n format.html { redirect_to @gelati, notice: \"Gelati was successfully created.\" }\n format.json { render :show, status: :created, location: @gelati }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @gelati.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @alligator = Alligator.new(params[:alligator])\n\n respond_to do |format|\n if @alligator.save\n format.html { redirect_to @alligator, notice: 'Alligator was successfully created.' }\n format.json { render json: @alligator, status: :created, location: @alligator }\n else\n format.html { render action: \"new\" }\n format.json { render json: @alligator.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @lesuur = Lesuur.new(params[:lesuur])\n @dags = Dag.all\n respond_to do |format|\n if @lesuur.save\n format.html { redirect_to dags_path, notice: 'Lesuur werd succesvol aangemaakt.' }\n format.json { render json: @lesuur, status: :created, location: @lesuur }\n else\n format.html { render action: \"new\" }\n format.json { render json: @lesuur.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @relatorio_gerals = RelatorioGeral.all\n authorize @relatorio_gerals\n\n @relatorio_geral = RelatorioGeral.new(relatorio_geral_params)\n\n respond_to do |format|\n if @relatorio_geral.save\n format.html { redirect_to @relatorio_geral, notice: 'Relatório geral criado com sucesso!' }\n format.json { render :show, status: :created, location: @relatorio_geral }\n else\n format.html { render :new }\n format.json { render json: @relatorio_geral.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @aggressor = Aggressor.new(aggressor_params)\n\n respond_to do |format|\n if @aggressor.save\n format.html { redirect_to @aggressor, notice: 'Aggressor was successfully created.' }\n format.json { render :show, status: :created, location: @aggressor }\n else\n format.html { render :new }\n format.json { render json: @aggressor.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @rigging = Rigging.new(rigging_params)\n\n respond_to do |format|\n if @rigging.save\n format.html { redirect_to @rigging, notice: 'Rigging was successfully created.' }\n format.json { render :show, status: :created, location: @rigging }\n else\n format.html { render :new }\n format.json { render json: @rigging.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @garrison = Garrison.new(garrison_params)\n @garrison.kingdom_id = current_user.current_kingdom.id\n @garrison.recruted = true\n\n respond_to do |format|\n if @garrison.save\n format.html { redirect_to @garrison, notice: 'Garrison was successfully created.' }\n format.json { render action: 'show', status: :created, location: @garrison }\n else\n format.html { render action: 'new' }\n format.json { render json: @garrison.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @gig = Gig.new(params[:gig])\n\n if @gig.save\n render json: @gig, status: :created, location: @gig\n else\n render json: @gig.errors, status: :unprocessable_entity\n end\n end",
"def create\n @golfer = Golfer.new(params[:golfer])\n\n respond_to do |format|\n if @golfer.save\n format.html { redirect_to @golfer, notice: 'Golfer was successfully created.' }\n format.json { render json: @golfer, status: :created, location: @golfer }\n else\n format.html { render action: \"new\" }\n format.json { render json: @golfer.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @gangster = Gangster.new(gangster_params)\n\n respond_to do |format|\n if @gangster.save\n format.html { redirect_to @gangster, notice: 'Gangster was successfully created.' }\n format.json { render :show, status: :created, location: @gangster }\n else\n format.html { render :new }\n format.json { render json: @gangster.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @gentre = Gentre.new(gentre_params)\n\n respond_to do |format|\n if @gentre.save\n format.json { render :show, status: :created, location: @gentre }\n else\n format.json { render json: @gentre.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @gestor = Gestor.new(params[:gestor])\n @gestor.password = \"12345678\"\n @gestor.password_confirmation = \"12345678\"\n grupo_gestor = Grupo.where(internal_id: Grupo::GESTOR).first\n @gestor.grupo = grupo_gestor\n\n respond_to do |format|\n if @gestor.save\n format.html { redirect_success(\"Gestor adicionado com sucesso!\",:gestor, :index)}\n format.json { render json: @gestor, status: :created, location: @gestor }\n else\n format.html { redirect_error(\"Erro ao adicionar o gestor!\",:gestor, :index)}\n format.json { render json: @gestor.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @alligator = Alligator.new(alligator_params)\n\n respond_to do |format|\n if @alligator.save\n format.html { redirect_to @alligator, notice: 'Alligator was successfully created.' }\n format.json { render :show, status: :created, location: @alligator }\n else\n format.html { render :new }\n format.json { render json: @alligator.errors, status: :unprocessable_entity }\n end\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
PATCH/PUT /germinations/1 PATCH/PUT /germinations/1.json
|
def update
respond_to do |format|
if @germination.update(germination_params)
format.html { redirect_to @germination, notice: 'Germination was successfully updated.' }
format.json { render :show, status: :ok, location: @germination }
else
format.html { render :edit }
format.json { render json: @germination.errors, status: :unprocessable_entity }
end
end
end
|
[
"def update\n @golfer = Golfer.find(params[:id])\n\n respond_to do |format|\n if @golfer.update_attributes(params[:golfer])\n format.html { redirect_to @golfer, notice: 'Golfer was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @golfer.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @allergy = Allergy.find(params[:id])\n respond_to do |format|\n if @allergy.update_attributes(params[:allergy])\n format.html { redirect_to @allergy, notice: 'Allergy was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @allergy.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @allergy.update(allergy_params)\n format.html { redirect_to @allergy, notice: t('allergies.update_success') }\n format.json { render :show, status: :ok, location: @allergy }\n else\n format.html { render :edit }\n format.json { render json: @allergy.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @grower.update(grower_params)\n format.html { redirect_to tenant_growers_path(@tenant,@grower), notice: 'Grower was successfully updated.' }\n format.json { render :show, status: :ok, location: @grower }\n else\n format.html { render :edit }\n format.json { render json: @grower.errors, status: :unprocessable_entity }\n end\n end\n end",
"def api_patch(path, data = {})\n api_request(:patch, path, :data => data)\n end",
"def update\n respond_to do |format|\n if @golfer.update(golfer_params)\n format.html { redirect_to @golfer, notice: 'Golfer was successfully updated.' }\n format.json { render :show, status: :ok, location: @golfer }\n else\n format.html { render :edit }\n format.json { render json: @golfer.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @gunzerker.update(gunzerker_params)\n format.html { redirect_to @gunzerker, notice: 'Gunzerker was successfully updated.' }\n format.json { render :show, status: :ok, location: @gunzerker }\n else\n format.html { render :edit }\n format.json { render json: @gunzerker.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @gen_hotel.update(gen_hotel_params)\n format.html { redirect_to :gen_hotels, notice: 'Gen hotel was successfully updated.' }\n format.json { respond_with_bip(@gen_hotel) }\n else\n format.html { render :edit }\n format.json { render json: @gen_hotel.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @legs1.update(legs1_params)\n format.html { redirect_to \"/legs1s\"}\n format.json { render :show, status: :ok, location: @legs1 }\n else\n format.html { render :edit }\n format.json { render json: @legs1.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @relatorio_gerals = RelatorioGeral.all\n authorize @relatorio_gerals\n\n respond_to do |format|\n if @relatorio_geral.update(relatorio_geral_params)\n format.html { redirect_to @relatorio_geral, notice: 'Relatório geral atualizado com sucesso!' }\n format.json { render :show, status: :ok, location: @relatorio_geral }\n else\n format.html { render :edit }\n format.json { render json: @relatorio_geral.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @student_allergy = StudentAllergy.find(params[:id])\n\n respond_to do |format|\n if @student_allergy.update_attributes(params[:student_allergy])\n format.html { redirect_to @student_allergy, notice: 'Student allergy was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @student_allergy.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n\n customer_allergy_params[:allergy_type] = customer_allergy_params[:allergy_type].to_i\n\n respond_to do |format|\n if @customer_allergy.update(customer_allergy_params)\n format.html { redirect_to @customer_allergy, notice: 'Customer allergy was successfully updated.' }\n format.json { render :show, status: :ok, location: @customer_allergy }\n else\n format.html { render :edit }\n format.json { render json: @customer_allergy.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @gibier.update(gibier_params)\n format.html { redirect_to @gibier, notice: 'Gibier was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @gibier.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @meritbadge = Meritbadge.find(params[:id])\n\n respond_to do |format|\n if @meritbadge.update_attributes(params[:meritbadge])\n format.html { redirect_to(@meritbadge, :notice => 'Meritbadge was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @meritbadge.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @gig_request = GigRequest.find(params[:id])\n\n respond_to do |format|\n if @gig_request.update_attributes(params[:gig_request])\n format.html { redirect_to @gig_request, notice: 'Gig request was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @gig_request.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @bracket_golfer.update(bracket_golfer_params)\n format.html { redirect_to @bracket_golfer, notice: 'Bracket golfer was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @bracket_golfer.errors, status: :unprocessable_entity }\n end\n end\n end",
"def patch *args\n make_request :patch, *args\n end",
"def update\n respond_to do |format|\n if @superset.update(superset_params)\n format.html { redirect_to @superset, notice: 'Superset was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @superset.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @api_v1_initiative.update(api_v1_initiative_params)\n format.html { redirect_to @api_v1_initiative, notice: 'Initiative was successfully updated.' }\n format.json { render :show, status: :ok, location: @api_v1_initiative }\n else\n format.html { render :edit }\n format.json { render json: @api_v1_initiative.errors, status: :unprocessable_entity }\n end\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Method to update the quantity of an item input: updated_and_removed (hash), the food, and also the integer/value update steps: Call for the key, pass in an argument for the value (to update) output: hash with quantities changed
|
def update_quantity(updated_and_removed, food, quantity)
updated_and_removed[food] = quantity
return updated_and_removed
end
|
[
"def update_item_quantity(hash_of_items, item, quantity)\n hash_of_items[item] = quantity\n hash_of_items\nend",
"def update_item_quantity(item, new_quantity)\n\titem_removed_hash = remove_item(\"cereal\")\n\titem_removed_hash[item] = new_quantity\n\tupdate_quantity_hash = item_removed_hash\nend",
"def update_item_quantity(item, quantity, grocery_list_hash)\n \n grocery_list_hash[item] = quantity\n return grocery_list_hash\nend",
"def update_item_quantity(item, quantity, grocery_list_hash)\n\n grocery_list_hash[item] = quantity\n return grocery_list_hash\nend",
"def update_quantity(list, item, quantity)\n if quantity == 0\n remove_item(list, item)\n else\n list[item] = quantity\n end\nend",
"def update_qty(shopping_list, item, quantity)\n\n\tadd_item(shopping_list, item, quantity)\n\nend",
"def update_qty(shopping_list, item, quantity)\r\n\r\n\tadd_item(shopping_list, item, quantity)\r\n\r\nend",
"def update_quantity(item, list, quantity)\n add_item(item, list, quantity)\nend",
"def update_quantity(list,food,quantity)\n\tlist[food] = quantity\n\tlist\nend",
"def update_quantity(list_name, item, quantity)\n add_item_and_quantity(list_name, item, quantity)\nend",
"def update_item_quantity(list_hash,item_name,quantity)\n if list_hash[item_name]\n puts \"Updated the quantity of #{item_name} to: #{quantity}.\"\n list_hash[item_name] = quantity\n else\n puts \"Item does not exist.\"\n end\n\n list_hash\nend",
"def update_item(item,quantity_changed,first_list)\n first_list[item] = quantity_changed\n\nend",
"def update_qty(list_items, item_name, new_qty)\n raise ArguementError.new(\"This item does not exist\") unless list_items.include?(item_name)\n list_items[item_name] = item_qty\nend",
"def update_quantity(list, item_name, quantity)\n\tlist[item_name] = quantity\n\tlist\nend",
"def updateItemNo(hash, item, new_number)\n\thash[item.to_sym] = new_number\n\tputs viewInventory(hash)\nend",
"def update_qty(hash)\r\n\tputs \"which item would you like to update the qty for?\"\r\n\titem = gets.chomp\r\n\tputs \"Enter the new qty for #{item}\"\r\n\tqty = gets.chomp.to_i\r\n\thash[item] = qty\r\nend",
"def update_quantity(item, quantity, groceries_list)\n groceries_list[item] = quantity\nend",
"def update(values)\n self.quantity = values[:quantity]\n end",
"def update(items)\n # clear!\n self.items.each do |i|\n number = items[i.id].blank? ? 1 : items[i.id].to_i <= 0 ? 1 : items[i.id]\n number.to_i < 99 ? i.quantity = number.to_i : i.quantity=99\n end\n # items.each { |id, quantity| add_items(id, quantity) }\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Top to bottom per col approach
|
def top_to_bottom_approach
largest_prod_of_all_cols = 0
for each_row in (0..15)
for each_col in (0..19)
curr_prod_in_col = $grid_of_numbers[each_row][each_col] * $grid_of_numbers[each_row + 1][each_col] * $grid_of_numbers[each_row + 2][each_col] * $grid_of_numbers[each_row + 3][each_col]
if curr_prod_in_col > largest_prod_of_all_cols
largest_prod_of_all_cols = curr_prod_in_col
end
end
end
return largest_prod_of_all_cols
end
|
[
"def topToBottom(grid, width, height)\n\n strings = Array.new\n string = String.new\n\n for x in 0..width-1 do\n\n string = \"\"\n\n for y in 0..height-1 do\n letter = grid[[x, y]]\n string << letter\n end\n\n strings << string\n\n end\n\n return strings\nend",
"def arrange_in_columns(cols, widths, border)\n row = \"\"\n idxs = cols.collect{|c| 0 }\n\n while cols.zip(idxs).any?{|col| col[0].length > col[1] }\n cols.each.with_index do |col, idx|\n slice_width = widths[idx]\n\n slice = col.slice(idxs[idx], slice_width) || \"\" # sacamos el pedazo de la columna\n row << slice.ljust(slice_width) # concatenamos a la fila\n idxs[idx] += slice_width # recorremos el indice\n row << \" \" * border # agregamos el border de la derecha\n end\n\n row = row.strip << \"\\n\" # quitamos el ultimo border\n end\n\n return row.strip # quitamos el ultimo salto de linea\n end",
"def crossings\n idx = 0\n @cells[0...-1].map { |c| idx += c.colspan } \n end",
"def topToBottomWraparound(grid, width, height)\n\n strings = topToBottom(grid, width, height)\n\n for string in strings do\n\n string << string\n\n end\n\n return strings\n\nend",
"def endCol; @col + @width - 1; end",
"def bottom_most_blocks\n width.times.map do |column_index|\n row_blocks_with_row_index = @blocks.each_with_index.to_a.reverse.detect do |row_blocks, row_index|\n !row_blocks[column_index].clear?\n end\n bottom_most_block = row_blocks_with_row_index[0][column_index]\n bottom_most_block_row = row_blocks_with_row_index[1]\n {\n block: bottom_most_block,\n row_index: bottom_most_block_row,\n column_index: column_index\n }\n end\n end",
"def top_left_cell; end",
"def part_of_col_down(starting, ending)\n col(starting[0])[(ending[1] + 1)..(starting[1] - 1)]\n end",
"def inner_up\n # looking up at the bottom of the inner grid\n return 0 if inner.nil?\n # sum bottom edge\n (0..@size-1).map { |x| inner.get_loc(x, @size - 1) ? 1 : 0 }.sum\n end",
"def arrange_tiled\n # some simplifying assumptions for constants that may need revisiting for more flexibility\n margin_v = 5\n row_height = 30\n \n rows = self.rows_of_subviews\n row_v_position = 5\n rows.each { |row|\n total_element_width = row.inject(0) {|r, view| r += view.width}\n total_margin_width = self.width - total_element_width\n margin_h = total_margin_width / (row.count + 1) # e.g. if 3 views, there are 4 margins\n \n x_tally = 0\n row.each { |view|\n view.center = CGPointMake(x_tally + margin_h + (view.width / 2), row_v_position + (row_height / 2))\n x_tally += margin_h + view.width\n }\n \n row_v_position += row_height\n }\n end",
"def topLeftToBottomRight(grid, width, height)\n\n strings = Array.new\n string = String.new\n\n # Get the top-right half of the grid, including the longest diagonal\n for x in 1..width do\n\n string = \"\"\n\n for y in 0..x-1 do\n\n letter = grid[[width-x + y, y]]\n\n string << letter\n\n end\n\n strings << string\n\n end\n\n # Get the bottom-left half of the grid, not including the longest diagonal\n # TODO: find a better way of doing this, without needing to reverse the string\n for x in 0..width-2 do\n\n string = \"\"\n\n for y in 0..x do\n\n letter = grid[[x-y, height - y - 1]]\n\n string << letter\n\n end\n\n string.reverse!\n\n strings << string\n\n end\n\n return strings\n\nend",
"def column_depth(col)\n GridModelContracts.pre_column_depth(self, col-1)\n (0..@y).each { |row|\n if @grid[row][col-1] > 0\n GridModelContracts.post_column_depth(self, @grid[row][col-1])\n return row\n end\n }\n return @y+1\n end",
"def controller_for_top_column(column, row = -1)\n 0x68 + column\n end",
"def print_in_cols a, noc=nil\n unless noc\n noc = 3\n if a.size < 7\n noc = 1\n elsif a.size < 15\n noc = 2\n end\n end\n\n x = noc - 1\n cols = a.each_slice((a.size+x)/noc).to_a\n # todo width should be determined based on COLS of screen, and width of data\n cols.first.zip( *cols[1..-1] ).each{|row| puts row.map{|e| e ? '%-30s' % e : ' '}.join(\" \") }\nend",
"def topRightToBottomLeft(grid, width, height)\n\n strings = Array.new\n string = String.new\n\n # Get the top-left half of the grid, including the longest diagonal\n for x in 0..width-1 do\n\n string = \"\"\n\n for y in 0..x do\n\n letter = grid[[x - y, y]]\n\n string << letter\n\n end\n\n strings << string\n\n end\n\n # Get the bottom-right half of the grid, not including the longest diagonal\n # TODO: find a better way of doing this, without needing to reverse the string\n for x in 0..width-2 do\n\n string = \"\"\n\n for y in 0..x do\n\n letter = grid[[width - x + y - 1, height - y - 1]]\n\n string << letter\n\n end\n\n string.reverse!\n\n strings << string\n end\n\n return strings\nend",
"def get_columns headers, rows\n\t\trange = []\n\t\theaders.reverse_each.with_index do |header, col|\n\t\t\trows.each.with_index do |row, i|\n\t\t\t\trange = [header.outer_left,header.left,header.right,header.outer_right]\n\t\t\t\twhile not @page_content.search_results_left(range, row, header.results[i])\n\t\t\t\t\trange[3] -= 1\n\t\t\t\tend\n\t\t\t\tunless header.results[i].result == Result::NOT_FOUND\n\t\t\t\t\theader.recalculate_position(false, false, true, true) \n\t\t\t\tend\n\t\t\tend\n\t\tend\n\tend",
"def bottom_cell(cell)\n get_next_cell(cell) { | cell | Coordinates.new(cell.col, cell.row+1)}\n end",
"def col(p, r)\n x = p * (p - 1)\n r = p - r - 1 if p.odd?\n cell(x - r)\n end",
"def up_down_col_sort(matrix)\n matrix\n .flatten.sort\n .each_slice(matrix.size)\n .map.with_index { |col, idx| idx.even? ? col : col.reverse }\n .transpose\nend"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
destroys spelling analysis records for dirty attributes
|
def expire_spellcheck_analysis!
spellcheck_attributes.each do |attribute|
if send("#{attribute}_changed?")
spellcheck_analyses.for(
attribute
).destroy_all
end
end
end
|
[
"def update_analysis\n model.wordcounts.destroy_all\n create_analysis\n delete_orphaned_keywords\n end",
"def clean_up\n @dirty = false\n end",
"def clean_attributes\n @attribute_changes = {}\n end",
"def clean_dirty_attributes!\n @dirty_attribute_keys = []\n end",
"def clear_attributes\n klass.instance_variable_set(:@mappings, nil)\n end",
"def clean\n @dirty = false\n end",
"def fix_up_controlled_vocabs\n sample_attributes.each do |attribute|\n unless attribute.sample_attribute_type.controlled_vocab?\n attribute.sample_controlled_vocab = nil\n end\n end\n end",
"def clear_misspelled(save = false)\n return unless misspelling || correct_spelling\n\n was = correct_spelling.display_name\n self.misspelling = false\n self.correct_spelling = nil\n save_with_log(:log_name_unmisspelled, other: was) if save\n end",
"def clean\n\n # TODO: These operational attributes are those commonly used by\n # OpenLDAP 2.2. Others should probably be supported.\n #\n %w[ creatorsname createtimestamp modifiersname modifytimestamp\n entrycsn entryuuid structuralobjectclass ].each do |attr|\n @attrs.delete( attr )\n end\n\n # Clean out duplicate attribute values.\n @attrs.each_key { |k| @attrs[k].uniq! }\n\n self\n end",
"def clean!\n @changes = []\n end",
"def clean\r\n self.tables.clean if self.tables\r\n end",
"def clean!\n @rules = {}\n reinit_errrors\n end",
"def remove_dataset_attributes\n call_on_each_qernel_object(:reset_dataset_attributes)\n reset_goals\n end",
"def clear_attributes\n @attributes = nil\n end",
"def clean_metadata\n\n remove_redundant_attributes\n humanise_metadata\n\n end",
"def clear_changed_attributes\n\t\t\t \t\t$TRACE.debug 5, \"clear_changed_attributes\"\n\t\t\t \tself.changed_attributes_aado = []\n\t\t\t \tend",
"def clean\n tables.clean if tables\n end",
"def clear_synonym\n return unless synonym\n\n names = synonyms\n\n # Get rid of the synonym if only one's going to be left in it.\n if names.count <= 2\n synonym&.destroy\n names.each do |n|\n n.synonym = nil\n n.save\n end\n\n # Otherwise, just detach this name.\n else\n self.synonym = nil\n save\n end\n\n # This has to apply to names that are misspellings of this name, too.\n Name.where(correct_spelling: self).find_each do |n|\n n.correct_spelling = nil\n n.save\n end\n end",
"def clean_up\n if self.deleted?\n # clean room type mapping\n RoomTypeChannelMapping.find_all_by_room_type_id(self.id).each do |rtcm|\n rtcm.update_attribute(:deleted, true)\n end\n # clean master rate mapping\n RoomTypeMasterRateMapping.find_all_by_room_type_id(self.id).each do |rtmr|\n RoomTypeMasterRateChannelMapping.find_all_by_room_type_master_rate_mapping_id(rtmr.id).each do |rtc|\n rtc.update_attribute(:deleted, true)\n end\n rtmr.update_attribute(:deleted, true)\n end\n # clean availability link from\n RoomTypeInventoryLink.find_all_by_room_type_from_id(self.id).each do |rml|\n rml.update_attribute(:deleted, true)\n end\n # clean availability link to\n RoomTypeInventoryLink.find_all_by_room_type_to_id(self.id).each do |rml|\n rml.update_attribute(:deleted, true)\n end\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
schedules a spellcheck analysis if there's any attribute missing without analysis
|
def schedule_spellcheck_analysis!
if missing_spellcheck_analysis?
self.class.delay.perform_spellcheck!(id)
end
end
|
[
"def analyse!\n @words = formatted_mispelled_words\n @success = true\n rescue *rescued_exceptions_keys => e\n @success = false\n if rescued_exceptions_messages.include?(e.message)\n # aspell is not present. track analysis as failure\n else\n raise e\n end\n end",
"def spellcheck\n load_meta_docs\n do_spellcheck\n end",
"def expire_spellcheck_analysis!\n spellcheck_attributes.each do |attribute|\n if send(\"#{attribute}_changed?\")\n spellcheck_analyses.for(\n attribute\n ).destroy_all\n end\n end\n end",
"def check!(names=[],&block)\n return enum_for(__method__) unless block\n\n # load the YARD cache\n YARD::Registry.load!\n\n # clear any statistics from last run\n @misspelled.clear\n\n FFI::Hunspell.dict(@lang) do |dict|\n # add user specified words\n @added.each { |word| dict.add(word.dup) }\n\n unless names.empty?\n names.each do |name|\n if (obj = YARD::Registry.at(name))\n spellcheck_object(obj,dict,&block)\n end\n end\n else\n YARD::Registry.each do |obj| \n spellcheck_object(obj,dict,&block)\n end\n end\n end\n end",
"def spellcheck\n lines = ::File.open(@file_path).readlines\n lines.each_with_index do |line, line_index|\n check_line(line, line_index + 1)\n end\n @incorrect_words\n end",
"def load_necessary_words\n necessary_words = []\n\n necessary_spellings.each do |spelling|\n if @table.key?(spelling)\n necessary_words << @table[spelling]\n else\n $stderr.puts \"Warning! #{spelling} is a necessary word but it is not in the corpus.\"\n end\n end\n\n necessary_words\n end",
"def test_hunspell_gem_dictionary\n test_file_path = \"#{RESOURCES_PATH}/pride_1.txt\"\n dictionary = AlfonsoX::SpellChecker::Dictionary::Hunspell.new('en_US')\n spellchecker = AlfonsoX::SpellChecker::Main.new(\n test_file_path,\n dictionary\n )\n incorrect_words_on_all_files = spellchecker.check\n incorrect_words_on_pride_file = spellchecker.check([test_file_path])\n assert_equal 1, incorrect_words_on_all_files.length\n assert_equal 1, incorrect_words_on_all_files[test_file_path].length\n assert_equal 1, incorrect_words_on_pride_file.length\n assert_equal 1, incorrect_words_on_pride_file[test_file_path].length\n\n assert_raises do\n spellchecker.check(['non_existant'])\n end\n end",
"def check_against_bad_word_list(attrs_to_check)\n attrs_to_check.each do |attribute|\n check = check_bad_word_list(attribute)\n model.errors.add(attribute, I18n.translate('activerecord.errors.messages')[check]) unless check.nil?\n end\n end",
"def run\n spellchecker = AlfonsoX::SpellChecker::Main.from_config(@config_file_path)\n spellchecker_errors_by_file = if ARGV&.length&.positive?\n spellchecker.check(ARGV)\n else\n spellchecker.check_all\n end\n\n exit_status = SUCCESS_EXIT_STATUS\n spellchecker_errors_by_file.each do |file_path, spellchecker_errors|\n spellchecker_errors.each do |spellchecker_error_i|\n print_spellcheck_error(file_path, spellchecker_error_i)\n exit_status = ERROR_EXIT_STATUS\n end\n end\n\n print_status(exit_status)\n exit(exit_status)\n end",
"def validate\n\t\t\t@words.keys.sort.each do |speech_part|\n\t\t\t\t@words[speech_part].sort.each do |word|\n\t\t\t\t\terr_msg = validate_word(word)\n\t\t\t\t\tif err_msg\n\t\t\t\t\t\tputs \"warn: #{speech_part} '#{word.text}' - #{err_msg}\"\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\tend\n\t\tend",
"def run\n\t\tauditable.each { |element| element.taint_analysis }\n\tend",
"def auto_analyze=(_arg0); end",
"def auto_analyze; end",
"def test_word_list_dictionary_success\n test_file_path = \"#{RESOURCES_PATH}/pride_1.txt\"\n test_file_words = ::File.read(test_file_path).split(/[^\\w]+/)\n dictionary = AlfonsoX::SpellChecker::Dictionary::WordList.new(test_file_words)\n spellchecker = AlfonsoX::SpellChecker::Main.new(\n test_file_path,\n dictionary\n )\n incorrect_words = spellchecker.check\n assert_equal 0, incorrect_words.length\n end",
"def solr_spellcheck(params)\n ActsAsSolr::Post.execute(Solr::Request::Spellcheck.new(params))\n end",
"def check(node)\n alignment_node = alignment_node(node)\n return if alignment_node.nil?\n\n alignment_loc = alignment_location(alignment_node)\n kw_loc = node.loc.keyword\n\n return if alignment_loc.column == kw_loc.column || alignment_loc.line == kw_loc.line\n\n add_offense(\n kw_loc, message: format_message(alignment_node, alignment_loc, kw_loc)\n ) do |corrector|\n autocorrect(corrector, node, alignment_loc)\n end\n end",
"def mispelled_words\n Spellchecker.check(@text, LANG).reject do |word|\n word[:correct]\n end\n end",
"def run_warned; end",
"def should_spell_check?\n # FORCE DISABLE Jun 15 2015\n return false\n\n should = \n params[:q].present? && params[:q].ascii_only? && (params[:page].nil? || params[:page] == \"1\") &&\n (! probably_bot?) && (params[:suppress_spellcheck] != \"1\") && params[:search_field] != \"cql\" && \n # Pretty hacky way to try and make sure we're going to render html,not atom or\n # something else. not entirely robust, but best rails gives us. \n (params[:format].blank? || params[:format] == \"html\")\n\n return should\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Epay::Subscription.create(:card_no => '...', :)
|
def create(params)
params.merge!(
:order_no => (Time.now.to_f * 10000).to_i.to_s, # Produce a unique order_no - it's not used anywhere, but ePay needs this
:amount => 0
)
post = Api.default_post_for_params(params).merge({
:subscription => 1,
:subscriptionname => params[:description]
})
query = Api.authorize(post)
if query['accept']
# Return the new subscriber
subscription = new(query["subscriptionid"].to_i).reload
# Set (obfuscated) card number:
subscription.card_no = query["tcardno"]
subscription
else
new(nil, 'error' => query["error"])
end
end
|
[
"def create_stripe_card\n customer = Stripe::Customer.retrieve(self.user.client_id)\n begin\n token = Stripe::Token.create(\n :card => {\n :name => self.name,\n :number => self.number,\n :exp_month => self.exp_month,\n :exp_year => self.exp_year,\n :cvc => self.cvc\n }\n )\n customer.sources.create(:card => token['id'])\n self.stripe_id = token['card']['id']\n self.last4 = self.number.split(//).last(4).join\n rescue\n self.errors.add(:base, 'Credit card error')\n return false\n end\n end",
"def create_subscription(payment_intent_id)\n result = ChargeBee::Subscription.create({\n plan_id: '<chargebee-plan-id>',\n auto_collection: 'on',\n payment_intent: {\n gateway_account_id: '<checkout.com-gateway-id>',\n gw_token: payment_intent_id\n }\n })\n subscription = result.subscription\n puts \"Chargebee subscription ID: #{subscription.id} created for Checkout.com payment ID: #{payment_intent_id}\"\n end",
"def save_with_subscription\n #if the fields pass the devise validations\n if valid?\n #call stripe and create a customer\n customer = Stripe::Customer.create(description: email, plan: plan_id, card: stripe_card_token)\n self.stripe_customer_token = customer.id\n save!\n end\n end",
"def create_buyer email_address, card_uri, name=nil, meta={}\n account = Account.new(\n :uri => self.accounts_uri,\n :email_address => email_address,\n :card_uri => card_uri,\n :name => name,\n :meta => meta,\n )\n account.save\n end",
"def save_with_subscription\n if valid?\n customer = Stripe::Customer.create(email: email, plan: ENV['stripe_pro_plan_id'], card: stripe_card_token)\n self.stripe_customer_token = customer.id\n save!\n end\n end",
"def create_subscription(data)\r\n Subscription.new.create(data)\r\n end",
"def create_stripe_account\n stripe_account_service = CreateStripeAccountService.new(\n is_managed: false,\n country_code: \"US\",\n email: vendor.email\n )\n stripe_account_service.perform\n if stripe_account_service.result.success?\n vendor.account = stripe_account_service.result.data.id\n vendor.save\n end\n end",
"def create_stripe_account\n @stripe_account = Stripe::Account.create(managed: true, country: \"us\")\n end",
"def create_stripe_subscription(account)\n customer = Stripe::Customer.retrieve(account.stripe_customer_id)\n plan = Plan.find(params[:account][:plan])\n subscription = customer.subscriptions.create(\n plan: plan.stripe_id,\n source: params[:token]\n )\n\n account.plan = plan\n account.stripe_subscription_id = subscription.id\n account.save\n end",
"def purchase_sub_existing_card\n @plan = params[:sub][:plan] #integer corresponding to my_plan_id\n @events_number = params[:sub][:events_number]\n @code = params[:sub][:code]\n @new_price = params[:sub][:new_price]\n\n # retrieve stripe customer object yet again\n if !current_user.customer_id.blank?\n c = Stripe::Customer.retrieve(current_user.customer_id)\n end \n \n if is_valid_sub_coupon(@code) \n\n #create subscription for this customer in stripe (note that update_subscription creates a new subscription for this customer in this case)\n if has_not_trialed?\n c.update_subscription(:plan => @plan, :coupon => @code)\n else\n c.update_subscription(:plan => @plan, :trial_end => (Date.today + 1.day).to_time.to_i, :coupon => @code) \n end \n #create new subscription object in my database\n @sub = Subscription.new(:user_id => current_user.id, :email => current_user.email, :customer_id => c.id, :my_plan_id => @plan, :plan_name => Plan.find_by_my_plan_id(@plan).name, :active => true)\n @sub.events_remaining = @events_number\n @sub.coupon = @code\n @sub.save \n\n #create receipt\n @r = Receipt.new(:user_id => current_user.id, :email => current_user.email, :customer_id => c.id,\n :subscription_id => @sub.id, :sub_my_plan_id => @sub.my_plan_id, :sub_plan_name => @sub.plan_name,\n :sub_events_number => @sub.events_remaining, :sub_reg_monthly_cost_in_cents => Plan.find_by_my_plan_id(@sub.my_plan_id).monthly_cost_cents,\n :sub_actual_monthly_cost_in_cents => @new_price, :sub_coupon_name => @sub.coupon) \n @r.save\n\n #mail receipt\n UserMailer.sub_receipt(current_user, @r).deliver\n\n else\n #create subscription for this customer in stripe (note that update_subscription creates a new subscription for this customer in this case)\n if has_not_trialed?\n c.update_subscription(:plan => @plan)\n else\n c.update_subscription(:plan => @plan, :trial_end => (Date.today + 1.day).to_time.to_i) \n end \n #create new subscription object in my database\n @sub = Subscription.new(:user_id => current_user.id, :email => current_user.email, :customer_id => c.id, :my_plan_id => @plan, :plan_name => Plan.find_by_my_plan_id(@plan).name, :active => true)\n @sub.events_remaining = @events_number\n @sub.save \n\n #create receipt\n @r = Receipt.new(:user_id => current_user.id, :email => current_user.email, :customer_id => c.id,\n :subscription_id => @sub.id, :sub_my_plan_id => @sub.my_plan_id, :sub_plan_name => @sub.plan_name,\n :sub_events_number => @sub.events_remaining, :sub_reg_monthly_cost_in_cents => Plan.find_by_my_plan_id(@sub.my_plan_id).monthly_cost_cents,\n :sub_actual_monthly_cost_in_cents => @new_price, :sub_coupon_name => @sub.coupon) \n @r.save\n\n #mail receipt\n UserMailer.sub_receipt(current_user, @r).deliver\n end \n\n\n flash[:success] = \"Thank you! You are now subscribed to the #{Plan.find_by_my_plan_id(@plan).name.titleize} plan!\"\n redirect_to current_user\n\n #rescue Stripe::StripeError => e # THIS CODE WORKS!!! NEEED TO FIGURE OUT HOW EXACTLY\n # logger.error \"Stripe error while creating subscription w/o active sub for existing user with card on file (purchase_sub_existing_card)\"\n # flash[:error] = \"Something went wrong. Please try again or contact us!\"\n # redirect_to current_user\n\nend",
"def create_payment_card(params)\n c_r Lokalise::Resources::PaymentCard, :create, nil, params\n end",
"def create\n @eno_card = EnoCard.new(eno_card_params)\n\n respond_to do |format|\n if @eno_card.save\n format.html { redirect_to @eno_card, notice: 'Eno card was successfully created.' }\n format.json { render :show, status: :created, location: @eno_card }\n else\n format.html { render :new }\n format.json { render json: @eno_card.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create_payment\n payment = ShopPayment.new({\n :order => @order,\n :gateway => gateway_name,\n :amount => @order.price,\n :card_type => card_type,\n :card_number=> card_number_secure\n })\n \n @order.update_attribute(:status, 'paid')\n \n @result[:payment] = payment.save\n end",
"def create_first_payment\n make_payment\n end",
"def create_account_and_subscribe_single_call account_name\n contact = {\n address1: '1051 E Hillsdale Blvd',\n city: 'Foster City',\n country: 'United States',\n firstName: 'John',\n lastName: 'Smith',\n zipCode: '94404',\n state: 'CA'\n }\n #get the rate plans for the product\n product_rate_plan = get_product_rate_plans_for_product 'Medium Monthly Plan'\n myDate = DateTime.now + 10.days;\n #create an account and subscribe to a rate plan at the same time\n subscribe(\n account_name,\n contact,\n DateTime.now.strftime(\"%Y-%m-%d\"),\n myDate.strftime(\"%Y-%m-%d\"),\n product_rate_plan['id']\n )\nend",
"def create_stripe_standalone_account\n Stripe::Account.create(\n :managed => false,\n :country => country_code,\n :email => email\n )\n end",
"def create_gocardless_subscription\n GO_CARDLESS_CLIENT.subscriptions.create(\n params: {\n amount: User::PLAN_AMOUNT.to_i,\n currency: User::GO_CARDLESS_CURRENCY,\n name: User::GO_CARDLESS_NAME,\n interval_unit: User::INTERVAL_UNIT,\n interval: 1,\n start_date: Date.current + 2.month,\n day_of_month: 1,\n links: {\n mandate: current_user.go_cardless_mandate\n }\n }\n )\n end",
"def register(card_number, certificate_number, pin_code, certificate_in_receipt, purchase_params)\n if !card_number && !certificate_number\n fail 'Не указан номер карты или сертификата'\n end\n\n if certificate_in_receipt && certificate_number.blank?\n fail 'Не указан номер сертификата'\n elsif certificate_in_receipt && pin_code.blank?\n fail 'Не указан пин-код'\n end\n\n if card_number\n card = Loyalty::Card.find_by(number: card_number)\n fail 'Карта не найдена' if card.blank?\n fail 'Карта не активирована' if card.inactive?\n fail 'Карта заблокирована' if card.blocked?\n end\n\n if certificate_number\n certificate = Certificate.find_by(number: certificate_number)\n fail 'Сертификат не найден' if certificate.blank?\n fail 'Сертификат не активирован' if certificate.inactive?\n fail 'Сертификат уже был использован' if certificate.used?\n fail 'В чеке отсутствует сертификат' if card_number.present? && !certificate_in_receipt\n certificate.check_pin_code!(pin_code) if certificate_in_receipt\n end\n\n if certificate_number && !certificate_in_receipt && !card_number\n purchase = certificate.build_purchase(purchase_params)\n else\n purchase = card.purchases.new(purchase_params)\n end\n\n\n transaction do\n purchase.save!\n purchase.commit!\n\n if card_number && certificate_number && certificate_in_receipt\n certificate.activate!(card_number)\n purchase.certificate_issued = true\n elsif certificate_number && !card_number && !certificate_in_receipt\n certificate.used!\n end\n end\n\n purchase.reload\n end",
"def stripe_account=(_arg0); end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Locate whitelisted attributes for the supplied association name
|
def whitelisted_attrs_for(assoc_name, attr_hash = whitelisted_attrs)
if assoc_name.to_s == attr_hash.keys.first.to_s
return attr_hash.values.first.reject { |v| v.is_a? Hash }
end
scoped_whitelisted_attrs = attr_hash.values.first
scoped_whitelisted_attrs.reject { |v|
!v.is_a? Hash
}.find { |v|
whitelisted_attrs_for(assoc_name, v)
}.try(:values).try(:first)
end
|
[
"def find_attributes_starting_with(name)\n @attributes.select { |key| key.to_s.start_with?(name) }\n end",
"def attr_in(attr_name, *values)\n predicate! do |batch, candidate|\n candidate.respond_to?(attr_name) &&\n values.include?(candidate.send(attr_name))\n end\n end",
"def attributes_get\n @whitelist\n end",
"def _assignable_attributes(attrs)\n attrs.select { |k, _| self.class.registered_attributes.include? k.to_sym }\n end",
"def has_association?(attr_name)\n association_names.include? attr_name.to_sym\n end",
"def permitted?(attribute_name)\n resource.fields_array.include?(attribute_name.to_sym) ||\n permitted.include?(attribute_name.to_sym)\n end",
"def crosswalk_attr?(attr_name, access_by: nil)\n attr_name = attr_name.to_sym\n crosswalk_attrs(access_by: access_by).include?(attr_name)\n end",
"def find_attribute_named(name)\n @attributes.find {|m| m.name == name}\n end",
"def find_attributes(name)\n\t\t\tr=[]\n\t\t\tp=self\n\t\t\twhile(p)\n\t\t\t\tif(p.respond_to?(name))\n\t\t\t\t\tv=p.send(name)\n\t\t\t\t\tr << v if(v)\n\t\t\t\tend\n\t\t\t\tbreak if(!p.respond_to?(\"container\"))\n\t\t\t\tp=p.container\n\t\t\tend\n\t\t\tr\n\t\tend",
"def find_attribute_named(name)\n @attributes.find { |m| m.name == name }\n end",
"def collect_attribute(tag_name, attribute_name)\n (@attributes[tag_name.to_sym] ||= Set.new) << attribute_name.to_sym\n end",
"def attribute_with_param(attr_name, param_name)\n attributes = self.attributes[attr_name.downcase]\n found_attributes = attributes.select { |attribute| attribute.params[param_name.downcase] }\n found_attributes.first\n end",
"def is_association_attr?(a)\n # maybe the check is too simplistic, but will do for now\n !!a[:name].to_s.index(\"__\")\n end",
"def find_related_eav_attribute(model, attribute_name)\n name_field = eav_options[model.name][:name_field]\n base_name = base_attribute_name(attribute_name, model)\n eav_related(model).to_a.find do |relation| \n relation.send(name_field) == base_name\n end\n end",
"def _search_attributes\n _attrs.hmap do |name, settings|\n opts = settings[:options]\n next unless opts[:net_miner_search_path]\n [name, opts[:net_miner_search_path]]\n end\n end",
"def whitelist_attributes!(attributes)\n self.attributes_whitelist += attributes + (fixed_attributes + internal_attributes)\n end",
"def attribute_filtering_params\n params[:attr] ? params[:attr].map(&:to_sym) : nil\n end",
"def attribute_filtering_params\n params[:attr] ? Array(params[:attr]).map(&:to_sym) : nil\n end",
"def fetch_attr(name)\n key = name.is_a?(Array) ? (self.attr.keys & name).first : name\n self.attr[key]\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
days_of_week Helpers Hash mapping day of week (Symbol) to valid(true)/invalid(false)
|
def days_of_week_hash
@days_of_week_hash ||= {
:sunday => (days_of_week & SUNDAY ) > 0,
:monday => (days_of_week & MONDAY ) > 0,
:tuesday => (days_of_week & TUESDAY ) > 0,
:wednesday => (days_of_week & WEDNESDAY ) > 0,
:thursday => (days_of_week & THURSDAY ) > 0,
:friday => (days_of_week & FRIDAY ) > 0,
:saturday => (days_of_week & SATURDAY ) > 0
}
end
|
[
"def day_of_week(days)\n @validations[:day_of_week] ||= {}\n @validation_types[:day_of_week] ||= IceCube::DayOfWeekValidation.new(self)\n days.each do |day, occurrences|\n unless day.is_a?(Integer)\n raise ArgumentError.new('Argument must be a valid day of week') unless IceCube::DAYS.has_key?(day)\n day = IceCube::DAYS[day]\n end\n raise ArgumentError.new('Argument must be a valid day of week (0-6)') unless day >= 0 && day <= 6\n # add the day\n @validations[:day_of_week][day] ||= []\n @validations[:day_of_week][day].concat(occurrences)\n @validations[:day_of_week][day].uniq!\n end\n self\n end",
"def week_days\n {\n \"1\" => 'mon',\n \"2\" => 'tue',\n \"3\" => 'wed',\n \"4\" => 'thu',\n \"5\" => 'fri',\n \"6\" => 'sat',\n \"7\" => 'sun'\n }\n end",
"def days_of_week_are_valid\n errors.add(:days_of_week, \"The days of the week are invalid.\") unless days_of_week_are_valid?\n end",
"def days_of_week_are_valid?\n if (self.sunday == nil); return false; end\n if (self.monday == nil); return false; end\n if (self.tuesday == nil); return false; end\n if (self.wednesday == nil); return false; end\n if (self.thursday == nil); return false; end\n if (self.friday == nil); return false; end\n if (self.saturday == nil); return false; end\n\n if ((self.sunday != true) and (self.sunday != false)); return false; end\n if ((self.monday != true) and (self.monday != false)); return false; end\n if ((self.tuesday != true) and (self.tuesday != false)); return false; end\n if ((self.wednesday != true) and (self.wednesday != false)); return false; end\n if ((self.thursday != true) and (self.thursday != false)); return false; end\n if ((self.friday != true) and (self.friday != false)); return false; end\n if ((self.saturday != true) and (self.saturday != false)); return false; end\n return true\n end",
"def day_of_the_week_correctness\n\t\tif day_of_the_week.blank?\n\t\t\treturn\n\t\tend\n\t\tif day_of_the_week < 0 or day_of_the_week > 6\n\t\t\terrors.add(:day_of_the_week, 'must be before 0 and 6')\n\t\tend\n\tend",
"def on_day_of_week(*days)\n days = days.collect{|d| numberize_day_of_week(d) }\n set_of_days = days.to_set\n raise ArgumentError, 'invalid days of week (0..6 are valid)' unless set_of_days.subset?(ALL_DAYS_OF_WEEK)\n @day_of_week = set_of_days\n self\n end",
"def days_worked\n days = account.account_setting.working_days\n Hash[days.map { |k,v| [k, (v == \"1\")] }]\n end",
"def weekday?\n WEEK_DAYS.include? wday\n end",
"def days_of_week\n %w[Sunday Monday Tuesday Wednesday Thursday Friday Saturday]\n end",
"def week_to_date_map\n week_of_year = Time.new.strftime(\"%V\").to_i\n week_count = week_of_year.modulo(4) + 1\n\n mapping = {}\n for i in 0..3 do\n date = Date.today.beginning_of_week(:monday)\n date += 1 + ((3-date.wday) % 7)\n date += i.week\n mapping[week_count] = date\n week_count += 1\n week_count = 1 if week_count > 4\n end\n\n mapping[0] = 'On Hold'\n mapping[99] = 'Not Yet Set'\n mapping\n end",
"def days_of_week_string\n dow = days_of_week_hash\n\n @days_of_week_string ||=\n (dow[:sunday] ? \"Su\" : \"\") +\n (dow[:monday] ? \"M\" : \"\") +\n (dow[:tuesday] ? \"Tu\" : \"\") +\n (dow[:wednesday] ? \"W\" : \"\") +\n (dow[:thursday] ? \"Th\" : \"\") +\n (dow[:friday] ? \"F\" : \"\") +\n (dow[:saturday] ? \"Sa\" : \"\")\n end",
"def valid_week?(week)\n (1..21).include?(week.to_i)\n end",
"def weekdays\n @weekdays ||= days_of_the_week.gsub('_','').scan(/./).map(&:to_i)\n end",
"def of_week_as_sym() WEEKDAYS_AS_SYM[of_week_as_i] end",
"def validate_week_of\n input_week_of = self.week_of\n # correct day, assuming input week is a Time object\n # http://ruby-doc.org/core-2.2.0/Time.html#method-i-sunday-3F\n return input_week_of.monday?\n end",
"def build_chef_id_to_elimweek_map(chefstats)\n chef_id_to_elimweek_map = {}\n chefstats.each { |chefstat|\n if chefstat.stat.abbreviation == Stat::ELIMINATED_ABBR\n chef_id_to_elimweek_map[chefstat.chef_id] = chefstat.week\n elsif !chef_id_to_elimweek_map.has_key?(chefstat.chef_id)\n chef_id_to_elimweek_map[chefstat.chef_id] = 0\n end\n }\n return chef_id_to_elimweek_map\n end",
"def sunday_5_54_plus1\n make_hash(draw(49, 5), draw(11, 1))\n end",
"def get_days\n return 0b0 if @params[:type] != 'advanced'\n\n return get_weekday_bitmask(['weekday_sun', 'weekday_mon', 'weekday_tue', 'weekday_wed', 'weekday_thu', 'weekday_fri', 'weekday_sat']) if @params[:schedule] == 'weekly'\n\n return get_month_bitmask(@params[:dates_picked]) if @params[:schedule] == 'monthly' && @params[:days] == 'specific'\n\n return get_unspecific_days\n\n end",
"def bad_days_in_a_week(date)\n return @bad_days_in_a_week if @bad_days_in_a_week\n bad_days = 0\n day = Date.parse(date)\n for i in 0..6\n bad_days +=1 if bad_day?((day + i.days).to_s)\n end\n @bad_days_in_a_week = bad_days\n bad_days\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Array beginning with Sunday of valid(true)/inactive(false) values
|
def days_of_week_array
dow = days_of_week_hash
@days_of_week_array ||= [
dow[:sunday],
dow[:monday],
dow[:tuesday],
dow[:wednesday],
dow[:thursday],
dow[:friday],
dow[:saturday]
]
end
|
[
"def get_days\n\t\tbool_array = []\n\n\t\tbool_array << self[:monday]\n\t\tbool_array << self[:tuesday]\n\t\tbool_array << self[:wednesday]\n\t\tbool_array << self[:thursday]\n\t\tbool_array << self[:friday]\n\t\tbool_array << self[:saturday]\n\t\tbool_array << self[:sunday]\n\n\t\treturn bool_array\n\tend",
"def weekdays\n weekdays = []\n weekdays << !!self.weekday_0 #bang bang, converts nil values into booleans\n weekdays << !!self.weekday_1\n weekdays << !!self.weekday_2\n weekdays << !!self.weekday_3\n weekdays << !!self.weekday_4\n weekdays << !!self.weekday_5\n weekdays << !!self.weekday_6\n weekdays\n end",
"def weekdays\n wdays = []\n wdays << 0 if sun\n wdays << 1 if mon\n wdays << 2 if tue\n wdays << 3 if wed\n wdays << 4 if thu\n wdays << 5 if fri\n wdays << 6 if sat\n\n wdays\n end",
"def days_array\n day = self.beginning_of_month.wday\n day = 7 if day == 0 #mimics cwday\n array = []\n array[day - 1] = 1\n (2..self.end_of_month.mday).each {|i| array << i }\n array\n end",
"def week_split\n days_array.each_slice(7).to_a\n end",
"def dates_array\n start_date = self.start_date.to_date\n end_date = self.end_date.to_date\n array = Array.new\n return array if end_date<start_date\n (start_date..end_date).each do |i|\n array << i if self.days_int.include? i.wday\n end\n array\n end",
"def non_working_week_days\n @non_working_week_days ||= begin\n days = [] # Setting.non_working_week_days\n if days.is_a?(Array) && days.size < 7\n days.map(&:to_i)\n else\n []\n end\n end\n end",
"def non_working_week_days\n @non_working_week_days ||= begin\n days = Setting.non_working_week_days\n if days.is_a?(Array) && days.size < 7\n days.map(&:to_i)\n else\n []\n end\n end\n end",
"def weekdays\n @weekdays ||= days_of_the_week.gsub('_','').scan(/./).map(&:to_i)\n end",
"def has_sunday_with_date_7(month, year)\n calender = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]\n day_count = 0\n\n calender.each do |num|\n (1..num).each do |i|\n if i == 7 && day_count = 7\n return true\n end\n if day_count <= 7\n day_count += 1\n else\n day_count = 7\n end\n end\n end\nend",
"def business_days\n @business_days ||= days.select(&:business_day?)\n end",
"def to_a\n (first_calendar_day..last_calendar_day).to_a.in_groups_of(7)\n end",
"def every_weekday(i); end",
"def day_meetings\n days = []\n @course.weekdays.each_char.each_with_index do |w, i|\n days.push(DAYS_AS_SYM[i]) if w.to_i == 1\n end\n days\n end",
"def public_holidays_to_array\n holidays = []\n public_holidays&.each do |day, meta|\n next if !meta\n next if !meta['active']\n next if meta['removed']\n\n holidays.push Date.parse(day)\n end\n holidays\n end",
"def every_sunday d1, d2\n sundays = []\n sunday = d1 + ((7 - d1.wday) % 7)\n while sunday < d2\n sundays << sunday\n sunday += 7\n end\n sundays\n end",
"def ics_weekday_list\n __weekday_list(ics_start, ics_end){|dt|\n not(is_weekend?(dt)) and not(is_holiday?(dt))\n }\n end",
"def check_days(array)\n frm.checkbox(:id=>/SectionsForm:sectionTable:0:meetingsTable:0:monday/).set if array.include?(/mon/i)\n frm.checkbox(:id=>/SectionsForm:sectionTable:0:meetingsTable:0:tuesday/).set if array.include?(/tue/i)\n frm.checkbox(:id=>/SectionsForm:sectionTable:0:meetingsTable:0:wednesday/).set if array.include?(/wed/i)\n frm.checkbox(:id=>/SectionsForm:sectionTable:0:meetingsTable:0:thursday/).set if array.include?(/thu/i)\n frm.checkbox(:id=>/SectionsForm:sectionTable:0:meetingsTable:0:friday/).set if array.include?(/fri/i)\n frm.checkbox(:id=>/SectionsForm:sectionTable:0:meetingsTable:0:saturday/).set if array.include?(/sat/i)\n frm.checkbox(:id=>/SectionsForm:sectionTable:0:meetingsTable:0:sunday/).set if array.include?(/sun/i)\n end",
"def get_weekdays(from_date)\n\t\tweekdays = []\n\t\t7.times do |num|\n\t\t\tweekdays << (from_date + num.days)\n\t\tend\n\t\tweekdays\n\tend"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Humanreadable string of applicable days of week
|
def days_of_week_string
dow = days_of_week_hash
@days_of_week_string ||=
(dow[:sunday] ? "Su" : "") +
(dow[:monday] ? "M" : "") +
(dow[:tuesday] ? "Tu" : "") +
(dow[:wednesday] ? "W" : "") +
(dow[:thursday] ? "Th" : "") +
(dow[:friday] ? "F" : "") +
(dow[:saturday] ? "Sa" : "")
end
|
[
"def of_week_as_s() WEEKDAYS_AS_STR[of_week_as_i] end",
"def day_name; Date::DAYNAMES[wday] end",
"def day_of_week\n all_dates = self.get_all_dates\n if ! all_dates.empty?\n return Date::DAYNAMES[self.get_all_dates.first.wday] + \"s\"\n else\n return \"No dates specified\"\n end\n end",
"def week_day_name\n I18n.l(self.when, format: :week_day_name)\n end",
"def week_days\n {\n \"1\" => 'mon',\n \"2\" => 'tue',\n \"3\" => 'wed',\n \"4\" => 'thu',\n \"5\" => 'fri',\n \"6\" => 'sat',\n \"7\" => 'sun'\n }\n end",
"def day_str(args = nil)\n ret = Datet.days(:trans => true)[self.day_in_week]\n if args and args[:short]\n ret = ret.slice(0, 3)\n end\n \n return ret\n end",
"def day_in_week_str\n\t\tdays = []\n\t\ttoday = Time.now\n\t\tdays.push(formatted(today))\n\t\ttoday_week_day = today.wday\n\t\tanothers = (0..6).select {|e| e != today_week_day}\n\t\tanothers.map do |e|\n\t\t\tdays.push(formatted(Time.at(today.to_i - e*day_second)))\n\t\tend\n\n\t\tdays.sort\n\tend",
"def days_of_week\n %w[Sunday Monday Tuesday Wednesday Thursday Friday Saturday]\n end",
"def stringify_dow\n @string_dow = \"\"\n case @params[:dow]\n when '0'\n @string_dow = \"Sun\"\n when '1'\n @string_dow = \"Mon\"\n when '2'\n @string_dow = \"Tue\"\n when '3'\n @string_dow = \"Wed\"\n when '4'\n @string_dow = \"Thu\"\n when '5'\n @string_dow = \"Fri\"\n when '6'\n @string_dow = \"Sat\"\n end\n end",
"def localized_weekday_name(d)\n d = Integer(d)\n (0..6).include? d or raise ArgumentError, \"#{d.inspect} is not valid\"\n t(\"date.day_names\")[d]\n end",
"def day_name\n return @@day_names[self.day_in_week]\n end",
"def day_str(args = nil)\n ret = Knj::Datet.days_arr[@time.strftime(\"%w\").to_i]\n if args.is_a?(Hash) and args[:short]\n ret = ret.slice(0, 3)\n end\n \n return ret\n end",
"def abbr_day_name; Date::ABBR_DAYNAMES[wday] end",
"def of_week_as_sym() WEEKDAYS_AS_SYM[of_week_as_i] end",
"def lectionary_week_string\n @lectionary_week_string = 'kk'\n\n if @lectionary_week\n @lectionary_week_string = @lectionary_week.year + @lectionary_week.week + ' ' + @lectionary_week.scripture\n end\n end",
"def days_as_string\n day_string = ''\n day_map = %w[Sun Mon Tue Wed Thu Fri Sat]\n days.each { |d| day_string << day_map[d] + ', ' }\n day_string[0..(day_string.length - 3)]\n end",
"def weekdays; end",
"def day_name(day)\n if self.replace_saturday && day == 7\n self.ashkenaz ? 'Shabbos' : 'Shabbat'\n else\n Date::DAYNAMES[day - 1]\n end\n end",
"def regurlar_day_name\n\t\tday_number = self.regular_day.to_s\n\t\tcase day_number\n\t\t\twhen 0\n\t\t\t\treturn \"Sunday\"\n\t\t\twhen 1\n\t\t\t\treturn \"Monday\"\n\t\t\tWhen 2\n\t\t\t\treturn \"Tuesday\"\n\t\t\tWhen 3\n\t\t\t\treturn \"Wednesday\"\n\t\t\tWhen 4\n\t\t\t\treturn \"Thursday\"\n\t\t\tWhen 5\n\t\t\t\treturn \"Friday\"\n\t\t\tWhen 6\n\t\t\t\treturn \"Saturday\"\n\t\t\telse\n\t\t\t\treturn self.regular_day\n\t\tend\n\tend"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
TODO DEPRECATED METHODS Returns a RelativeTime object representing this OperatingTime
|
def relativeTime
@relativeTime ||= RelativeTime.new(self, :opensAt, :length)
end
|
[
"def time\n Time.now.localtime + self.time_offset\n end",
"def relative_time(time)\n stamp = time.getutc.iso8601\n %(<time class=\"ltr\" dir=\"ltr\" title=\"#{stamp}\" datetime=\"#{stamp}\">#{time}</time>)\n end",
"def relative_time_element(time)\n time_element(time, 'js-relative-time')\n end",
"def get_rostime\n if @use_simtime\n @simtime\n else\n ROS.get_walltime()\n end\n end",
"def __evolve_time__\n utc\n end",
"def as_of_time\n @_as_of_time\n end",
"def proposed_new_time\n return @proposed_new_time\n end",
"def experienced_time\n Time.parse(rt_date + \"UTC\") rescue nil\n end",
"def time(reference)\n reference.utc + utc_offset(reference)\n end",
"def poa_time\n # Check if there's a recipient, and if it has a timezone, it it does use that to set tz\n representative_tz_from_recipient = @hearing.representative_recipient&.timezone\n return normalized_time(representative_tz_from_recipient) if representative_tz_from_recipient.present?\n # If there's a virtual hearing, use that tz even if it's empty\n return normalized_time(@hearing.virtual_hearing[:representative_tz]) if @hearing.virtual_hearing.present?\n\n # No recipient and no virtual hearing? Use the normalized_time fallback\n normalized_time(nil)\n end",
"def time_offset\n calc_time_offset unless @time_offset\n @time_offset\n end",
"def as_of_time\n Conversions.string_to_utc_time attributes_before_type_cast['as_of_time']\n end",
"def as_point_in_time\n if @time\n @time\n elsif @start_time\n @start_time\n else\n @end_time\n end\n end",
"def resolved_date_time\n return @resolved_date_time\n end",
"def calc_time_offset\n @last_time_offset = Time.now\n @time_offset = official_time - Time.now.localtime \n end",
"def round_trip_time\n return @round_trip_time\n end",
"def current_time\n @definition.current_time\n end",
"def time_based_attribute\n return @time_based_attribute\n end",
"def to_time\n preserve_timezone ? self : getlocal\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
FIXME Deprecated, use +override+ instead
|
def special
override
end
|
[
"def override\n use(:__override__)\n end",
"def override\n use(:__override__)\n end",
"def super_method; end",
"def superclass() end",
"def override()\n puts \"child override()\"\n end",
"def original_method; end",
"def base_class; end",
"def original_method=(_); end",
"def method_missing(method, *args, &block)\n super unless original_self\n original_self.send method, *args, &block\n end",
"def private_method\n end",
"def on_super(arguments); end",
"def orig_object; end",
"def allow_override\n @allow_override\n end",
"def fail_unless_overridden()\n context = self.is_a?(Class) ? self : self.class\n method = self.caller_method()\n \n fail(\"You must override: #{method.owner.inspect}.#{method.name} in #{context.name}\")\n end",
"def override\n attributes.override\n end",
"def super_decl; end",
"def main\n super\n return self\n end",
"def method_override(ver,name)\n @method_names[ver]=name\n end",
"def super_level; end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Input: isSpecial = true/false FIXME Deprecated, use +override=+ instead
|
def special=(isSpecial)
if isSpecial == true or isSpecial == "true" or isSpecial == 1
self.override = true
elsif isSpecial == false or isSpecial == "false" or isSpecial == 0
self.override = false
end
end
|
[
"def special\n override\n end",
"def convert_special(special)\n handled = false\n\n RDoc::Markup::Attribute.each_name_of special.type do |name|\n method_name = \"handle_special_#{name}\"\n\n if respond_to? method_name then\n special.text = send method_name, special\n handled = true\n end\n end\n\n raise \"Unhandled special: #{special}\" unless handled\n\n special.text\n end",
"def is_special?\n\t\t['*embedded*', '*untracked*'].include?(self.name)\n\tend",
"def allow_override\n @allow_override\n end",
"def is_special?\n\n [ '*embedded*', '*untracked*' ].include?(self.name)\n end",
"def override\n use(:__override__)\n end",
"def override\n use(:__override__)\n end",
"def method_missing(method_name, *args, &block)\n case method_name\n when /locations.*=|processing.*=|if_.+=/\n method_name = method_name.to_s.gsub(\"=\", \"\").to_sym\n override_method(method_name, args.first)\n else\n super(method_name, *args, &block)\n end\n end",
"def override=(flag)\n @override = flag if enabled?\n end",
"def set_normal!\n @wildcard = false\n @absorbent = false\n end",
"def method_missing(method, *args, &block)\n return super(method, *args, &block) unless method.to_s =~ /^is_.+\\?$/\n\n self.class.class_eval do\n define_method(method) do |*args, &block|\n false\n end\n end\n\n self.send(method, *args, &block)\n end",
"def method_missing(method, *args, &block)\n not @base.send(method, *args, &block)\n end",
"def overridden_status\n if @override\n @override.downcase.to_sym\n end\n end",
"def apply_wild_ignore?\n has_custom_wild_ignore? || @base_wild_ignore\n end",
"def custom?\n @custom\n end",
"def specialist_setting\n\tend",
"def override\n attributes.override\n end",
"def method_missing(method_name, *args) #:nodoc:\n if @extra_options.has_key? method_name\n @extra_options[method_name]\n else\n super method_name, *args\n end\n end",
"def use_separate_default_options(true_or_false); end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Delete VPC. ec2.delete_vpc("vpc890ce2e0") => true
|
def delete_vpc(vpc_id)
link = generate_request("DeleteVpc", 'VpcId' => vpc_id )
request_info(link, RightHttp2xxParser.new(:logger => @logger))
rescue Exception
on_exception
end
|
[
"def delete\n client.delete_vpc(:vpc_id => vpc_id)\n nil\n end",
"def destroy\n requires :id\n\n service.delete_vpc(id)\n true\n end",
"def destroy\n @aws_vpc.destroy\n respond_to do |format|\n format.html { redirect_to aws_vpcs_url, notice: 'Aws vpc was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @vpc.destroy\n respond_to do |format|\n format.html { redirect_to vpcs_url, notice: 'Vpc was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @aws_vpc_tag.destroy\n respond_to do |format|\n format.html { redirect_to aws_vpc_tags_url, notice: 'Aws vpc tag was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def delete_vpc_by_config_id(configuration_id)\n return if configuration_id.nil? || !configured?\n\n vpc = get_vpc_by_config_id(configuration_id)\n delete_vpc(vpc[:vpc_id]) unless vpc.nil?\n end",
"def delete\n delete_cloudformation\n end",
"def delete_pvm_instance(instance_id)\n delete(\"cloud-instances/#{guid}/pvm-instances/#{instance_id}\")\n end",
"def delete\n attrcheck = { 'vcn' => @options[:vcn] }\n @validate.validate(@options, attrcheck)\n subnet_attr = AttrFinder.new(@instanceparameters)\n subnet_attr.options = @options\n subnet_attr.validate = @validate\n opts = {}\n BmcAuthenticate.new(@options)\n request = OracleBMC::Core::VirtualNetworkClient.new\n request.delete_subnet(subnet_attr.subnet, opts)\n return 'subnet ' + @options[:inst] + ' deleted'\n end",
"def delete\n cloudformation(:delete)\n end",
"def delete\n dhcp_attr = AttrFinder.new(@instanceparameters)\n dhcp_attr.options = @options\n dhcp_attr.validate = @validate\n opts = {}\n BmcAuthenticate.new(@options)\n request = OracleBMC::Core::VirtualNetworkClient.new\n request.delete_dhcp_options(dhcp_attr.dhcp, opts)\n return 'dhcp deleted'\n end",
"def destroy\n requires :vpn_gateway_id\n \n service.delete_vpn_gateway(vpn_gateway_id)\n true\n end",
"def destroy\n @aws_dhcp_option.destroy\n respond_to do |format|\n format.html { redirect_to aws_dhcp_options_url, notice: 'Aws dhcp option was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def delete(resource_group_name, public_ip_name)\n OOLog.info(\"Deleting public IP '#{public_ip_name}' from '#{resource_group_name}' \")\n start_time = Time.now.to_i\n begin\n public_ip_exists = @network_client.public_ips.check_public_ip_exists(resource_group_name, public_ip_name)\n if !public_ip_exists\n OOLog.info(\"The Public IP #{public_ip_name} does not exist. Moving on...\")\n result = nil\n else\n public_ip = @network_client.public_ips.get(resource_group_name, public_ip_name)\n result = !public_ip.nil? ? public_ip.destroy : Chef::Log.info('AzureNetwork::PublicIp - 404 code, trying to delete something that is not there.')\n end\n rescue MsRestAzure::AzureOperationError => e\n OOLog.fatal(\"Error deleting PublicIP '#{public_ip_name}' in ResourceGroup '#{resource_group_name}'. Exception: #{e.body}\")\n rescue => e\n OOLog.fatal(\"Error deleting PublicIP '#{public_ip_name}' in ResourceGroup '#{resource_group_name}'. Exception: #{e.message}\")\n end\n end_time = Time.now.to_i\n duration = end_time - start_time\n OOLog.info(\"operation took #{duration} seconds\")\n result\n end",
"def delete(options = {})\n require_options(options, [:stack_name])\n return @cloud_formation.delete_stack(options[:stack_name])\n end",
"def destroy\n requires :vpn_connection_id\n\n service.delete_vpn_connection(vpn_connection_id)\n true\n end",
"def destroy\n @ec2_instance.destroy\n respond_to do |format|\n format.html { redirect_to ec2_instances_url, notice: 'Ec2 instance was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def delete(virtual_machine_id, ip_address_id)\n request(:delete, \"/virtual_machines/#{virtual_machine_id}/ip_addresses/#{ip_address_id}.json\")\n end",
"def removed_vpc_ids\n @aws.vpc - @local.vpc\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Delete DHCP Options. ec2.delete_dhcp_options("doptcb0de3a2") => true
|
def delete_dhcp_options(dhcp_options_id)
link = generate_request("DeleteDhcpOptions", 'DhcpOptionsId' => dhcp_options_id )
request_info(link, RightHttp2xxParser.new(:logger => @logger))
rescue Exception
on_exception
end
|
[
"def destroy\n requires :id\n service.delete_dhcp_options(id)\n true\n end",
"def delete\n dhcp_attr = AttrFinder.new(@instanceparameters)\n dhcp_attr.options = @options\n dhcp_attr.validate = @validate\n opts = {}\n BmcAuthenticate.new(@options)\n request = OracleBMC::Core::VirtualNetworkClient.new\n request.delete_dhcp_options(dhcp_attr.dhcp, opts)\n return 'dhcp deleted'\n end",
"def destroy\n @aws_dhcp_option.destroy\n respond_to do |format|\n format.html { redirect_to aws_dhcp_options_url, notice: 'Aws dhcp option was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def delDHCP\n status = log_transaction(\"Delete a DHCP reservation for #{name}/#{ip}\", @dhcp){|dhcp|\n dhcp.delete subnet.number, mac\n }\n return status unless sp_valid?\n log_transaction(\"Delete a DHCP reservation for #{sp_name}/#{sp_ip}\"){|dhcp|\n dhcp.delete subnet.number, mac\n }\n end",
"def destroy\n @aws_dhcp_option_attribute.destroy\n respond_to do |format|\n format.html { redirect_to aws_dhcp_option_attributes_url, notice: 'Aws dhcp option attribute was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def dhcp_options\n @dhcp_options ||= init_dhcp_options\n end",
"def update_dhcp_options(dhcp_options)\n # Verify unmodifiable attributes of existing dhcp_options\n config = dhcp_options.configuration\n if desired_options.any? { |name, value| config[name] != value }\n old_dhcp_options = dhcp_options\n converge_by \"update dhcp_options #{new_resource.name} to #{dhcp_options.id} (was #{old_dhcp_options.id}) and updated VPCs in #{region}\" do\n # create new dhcp_options\n dhcp_options = driver.ec2.dhcp_options.create(config.merge(desired_options))\n action_handler.report_progress \"create new dhcp_options #{dhcp_options.id} in #{region}\"\n # attach dhcp_options to existing vpcs\n old_dhcp_options.vpcs.each do |vpc|\n vpc.dhcp_options = dhcp_options\n action_handler.report_progress \"attach dhcp_options #{dhcp_options.id} to vpc #{vpc.id}\"\n end\n # delete old dhcp_options\n old_dhcp_options.delete\n action_handler.report_progress \"delete old dhcp_options #{old_dhcp_options.id}\"\n end\n new_resource.save_managed_entry(dhcp_options, action_handler)\n end\n end",
"def dhcp_options_id\n data[:dhcp_options_id]\n end",
"def dhcp_options_id\n @vpc.dhcp_options_id\n end",
"def clear_solaris_dhcpd()\n service_name = \"svc:/network/dhcp/server:ipv4\"\n clear_service(service_name)\n return\nend",
"def enable_dhcp\n @interface_config[\"BOOTPROTO\"] = \"dhcp\"\n @interface_config.delete(\"IPADDR\")\n @interface_config.delete(\"NETMASK\")\n @interface_config.delete(\"GATEWAY\")\n @interface_config.delete(\"PREFIX\")\n @interface_config.delete(\"DNS1\")\n @interface_config.delete(\"DNS2\")\n @interface_config.delete(\"DOMAIN\")\n end",
"def kill_dhcp\n pid = dhcp_pid\n return unless pid\n system(\"kill #{pid}\")\n end",
"def destroy\n @dhcp_server.destroy\n respond_to do |format|\n format.html { redirect_to dhcp_servers_url, notice: \"Dhcp server was successfully destroyed.\" }\n format.json { head :no_content }\n end\n end",
"def check_osx_dhcpd(options)\n check_osx_dhcpd_installed()\n create_osx_dhcpd_plist()\n service = \"dhcp\"\n check_osx_service_is_enabled(service)\n return\nend",
"def dhcp\n @attributes.fetch('dhcp', false)\n end",
"def dhcpd_options(options, indent=0)\n out = []\n options.each do |key, value|\n value = value.join(', ') if value.class == Array\n out << \"#{' '*indent}option #{key} #{value.chomp};\"\n end if options\n out << ''\n end",
"def delete_a_dhcp_lease(server_id, ip, mac, opts = {})\n delete_a_dhcp_lease_with_http_info(server_id, ip, mac, opts)\n nil\n end",
"def check_apt_dhcpd(options)\n check_apt_package(options,\"isc-dhcp-server\")\n check_apt_firewall(options,\"dhcp\",\"69/udp\")\n check_apt_service(options,\"isc-dhcp-server\")\n return\nend",
"def dhcp?\n config[\"dhcp\"]\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Delete customer gateway. ec2.delete_customer_gateway("cgwd5a643bc") => true
|
def delete_customer_gateway(customer_gateway_id)
link = generate_request("DeleteCustomerGateway", 'CustomerGatewayId' => customer_gateway_id )
request_info(link, RightHttp2xxParser.new(:logger => @logger))
rescue Exception
on_exception
end
|
[
"def destroy\n requires :customer_gateway_id\n \n service.delete_customer_gateway(customer_gateway_id)\n true\n end",
"def delete\n internet_gateway_attr = AttrFinder.new(@instanceparameters)\n internet_gateway_attr.options = @options\n internet_gateway_attr.validate = @validate\n internet_gateway_attr.function = 'internet_gateway'\n opts = {}\n BmcAuthenticate.new(@options)\n request = OracleBMC::Core::VirtualNetworkClient.new\n request.delete_internet_gateway(internet_gateway_attr.gateway, opts)\n return 'gateway deleted'\n end",
"def delete\n client_opts = {}\n client_opts[:vpn_gateway_id] = vpn_gateway_id\n client.delete_vpn_gateway(client_opts)\n nil\n end",
"def destroy\n requires :vpn_gateway_id\n \n service.delete_vpn_gateway(vpn_gateway_id)\n true\n end",
"def destroy\n requires :id\n \n service.delete_nat_gateway(id)\n true\n end",
"def delete_customer\n Stripe::Customer.delete(\n @user,\n )\n end",
"def destroy\n @payment_gateway = PaymentGateway.find(params[:id])\n @payment_gateway.destroy\n\n respond_to do |format|\n format.html { redirect_to(admin_payment_gateways_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @payment_gateway = PaymentGateway.find(params[:id])\n @payment_gateway.destroy\n\n respond_to do |format|\n format.html { redirect_to payment_gateways_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @gateway = Gateway.find(params[:id])\n @gateway.destroy\n\n respond_to do |format|\n format.html { redirect_to gateways_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n requires :id\n\n service.delete_internet_gateway(id)\n true\n end",
"def delete_customer_profile(options)\n response = gateway.delete_customer_profile(options)\n [response.success?, response]\n end",
"def delete_a_customer\n @customer.delete\n end",
"def destroy\n @aws_internet_gateway.destroy\n respond_to do |format|\n format.html { redirect_to aws_internet_gateways_url, notice: 'Aws internet gateway was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def delete_internet_gateway(internet_gateway_id)\n link = generate_request(\"DeleteInternetGateway\", 'InternetGatewayId' => internet_gateway_id )\n request_info(link, RightHttp2xxParser.new(:logger => @logger))\n rescue Exception\n on_exception\n end",
"def destroy\n response = client.execute_request('customer.delete', {\n data: {\n customer_id: self.customer_id\n }\n })\n\n if response.success? && response.fetch('status') == 'success'\n @errors = []\n return true\n end\n\n @errors = response.errors\n return false\n end",
"def delete_tokenized_billing_info(options)\n get_gateway options[:system]\n @gateway.delay.delete_customer_info options\n end",
"def delete_vpn_gateway(vpn_gateway_id)\n link = generate_request(\"DeleteVpnGateway\", 'VpnGatewayId' => vpn_gateway_id )\n request_info(link, RightHttp2xxParser.new(:logger => @logger))\n rescue Exception\n on_exception\n end",
"def delete(customer)\n xml = customer.to_delete_xml\n response = do_http_post(url_for_resource(\"customer\"), xml)\n if response.code.to_i == 200\n path_to_node = \"//xmlns:RestResponse/xmlns:Success\"\n Quickeebooks::Windows::Model::Customer.from_xml_ns(response.body, path_to_node)\n else\n nil\n end \n end",
"def remove_customer(customer)\n respond_with customer.to_vaulted_billing\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Detach VPN gateway. ec2.detach_vpn_gateway('vgwdfa144b6','vpc890ce2e0') => true
|
def detach_vpn_gateway(vpn_gateway_id, vpc_id)
link = generate_request("DetachVpnGateway", 'VpnGatewayId' => vpn_gateway_id,
'VpcId' => vpc_id )
request_info(link, RightHttp2xxParser.new(:logger => @logger))
rescue Exception
on_exception
end
|
[
"def destroy\n requires :vpn_gateway_id\n \n service.delete_vpn_gateway(vpn_gateway_id)\n true\n end",
"def delete\n client_opts = {}\n client_opts[:vpn_gateway_id] = vpn_gateway_id\n client.delete_vpn_gateway(client_opts)\n nil\n end",
"def delete\n internet_gateway_attr = AttrFinder.new(@instanceparameters)\n internet_gateway_attr.options = @options\n internet_gateway_attr.validate = @validate\n internet_gateway_attr.function = 'internet_gateway'\n opts = {}\n BmcAuthenticate.new(@options)\n request = OracleBMC::Core::VirtualNetworkClient.new\n request.delete_internet_gateway(internet_gateway_attr.gateway, opts)\n return 'gateway deleted'\n end",
"def detach_internet_gateway(internet_gateway_id, vpc_id)\n request_hash = { 'InternetGatewayId' => internet_gateway_id,\n 'VpcId' => vpc_id }\n link = generate_request(\"DetachInternetGateway\", request_hash)\n request_info(link, RightHttp2xxParser::new(:logger => @logger))\n rescue Exception\n on_exception\n end",
"def delete_vpn_gateway(vpn_gateway_id)\n link = generate_request(\"DeleteVpnGateway\", 'VpnGatewayId' => vpn_gateway_id )\n request_info(link, RightHttp2xxParser.new(:logger => @logger))\n rescue Exception\n on_exception\n end",
"def detach(vpc_id)\n requires :id\n service.detach_internet_gateway(id, vpc_id)\n reload\n end",
"def delete_pvm_link(params = {})\n dev_ns = params[:dev]\n dev_br = PVmhost::ns_to_br(dev_ns)\n\n exec_root(\"ovs-vsctl del-port #{params[:ovs]} #{dev_br}\")\n exec_root(\"ip netns exec #{params[:pvm_name]} ifconfig #{dev_ns} down\")\n exec_root(\"ip link del #{dev_br}\")\n end",
"def destroy\n requires :vpn_connection_id\n\n service.delete_vpn_connection(vpn_connection_id)\n true\n end",
"def del_vpn\n return unless @vpn\n @vpns.delete(@vpn)\n @vpnlist_box.remove(@vpn.btn)\n Config.del_vpn(@vpn.name)\n @vpn = nil\n self.select_default_or_first_vpn\n Config.save\n end",
"def vpn_down\n vpn_sw = params[:vpn_sw]\n vpn_status = VpnStatus.DOWN\n Node.all.each do |node|\n node.update_vpn_status vpn_status,\"0.0.0.0\",vpn_sw\n end\n render status: :created, :text => \"\"\n end",
"def deactivate\n lock\n\n @bridges = list_bridges\n\n attach_nic_id = @vm['TEMPLATE/NIC[ATTACH=\"YES\"]/NIC_ID']\n\n if @bridges\n process do |nic|\n next if attach_nic_id && attach_nic_id != nic[:nic_id]\n\n @nic = nic\n\n next if @bridges[@nic[:bridge]].nil?\n\n # Return if the bridge doesn't exist because it was already\n # deleted (handles last vm with multiple nics on the same\n # vlan)\n next unless @bridges.include? @nic[:bridge]\n\n # Return if we want to keep the empty bridge\n next if @nic[:conf][:keep_empty_bridge]\n\n # Return if the phydev device is not the only left device in\n # the bridge.A\n if @nic[:phydev].nil?\n keep = !@bridges[@nic[:bridge]].empty?\n else\n\n keep = @bridges[@nic[:bridge]].length > 1 ||\n !@bridges[@nic[:bridge]].include?(@nic[:phydev])\n end\n\n next if keep\n\n # Delete the bridge.\n OpenNebula.exec_and_log(\"#{command(:ip)} link delete\"\\\n \" #{@nic[:bridge]}\")\n @bridges.delete(@nic[:bridge])\n end\n end\n\n unlock\n\n 0\n end",
"def delete\n client_opts = {}\n client_opts[:vpn_connection_id] = vpn_connection_id\n client.delete_vpn_connection(client_opts)\n nil\n end",
"def destroy\n router_bgp('no')\n end",
"def deactivate\n lock\n\n @bridges = list_bridges\n\n attach_nic_id = @vm['TEMPLATE/NIC[ATTACH=\"YES\"]/NIC_ID']\n\n if @bridges\n process do |nic|\n next if attach_nic_id && attach_nic_id != nic[:nic_id]\n\n @nic = nic\n\n next if @nic[:phydev].nil?\n next if @bridges[@nic[:bridge]].nil?\n\n # Get the name of the vlan device.\n gen_vlan_dev_name\n\n # Return if the bridge doesn't exist because it was already\n # deleted (handles last vm with multiple nics on the same\n # vlan)\n next unless @bridges.include? @nic[:bridge]\n\n # Return if we want to keep the empty bridge\n next if @nic[:conf][:keep_empty_bridge]\n\n # Return if the vlan device is not the only left device in\n # the bridge.\n next if (@bridges[@nic[:bridge]].length > 1) || \\\n !@bridges[@nic[:bridge]].include?(@nic[:vlan_dev])\n\n # Delete the vlan device.\n delete_vlan_dev\n\n @bridges[@nic[:bridge]].delete(@nic[:vlan_dev])\n\n # Delete the bridge.\n OpenNebula.exec_and_log(\"#{command(:ip)} link delete\"\\\n \" #{@nic[:bridge]}\")\n @bridges.delete(@nic[:bridge])\n end\n end\n\n unlock\n\n 0\n end",
"def destroy\n requires :id\n \n service.delete_nat_gateway(id)\n true\n end",
"def destroy\n @payment_gateway = PaymentGateway.find(params[:id])\n @payment_gateway.destroy\n\n respond_to do |format|\n format.html { redirect_to(admin_payment_gateways_url) }\n format.xml { head :ok }\n end\n end",
"def delete\n dhcp_attr = AttrFinder.new(@instanceparameters)\n dhcp_attr.options = @options\n dhcp_attr.validate = @validate\n opts = {}\n BmcAuthenticate.new(@options)\n request = OracleBMC::Core::VirtualNetworkClient.new\n request.delete_dhcp_options(dhcp_attr.dhcp, opts)\n return 'dhcp deleted'\n end",
"def destroy\n @vpn = Vpn.find(params[:id])\n checkaccountobject(\"vpns\",@vpn)\n @vpn.send_delete\n\n respond_to do |format|\n format.html { redirect_to vpns_url }\n format.json { head :ok }\n end\n end",
"def del_vpn(name)\n Config['vpns'].delete_if{|x| x['name'] == name}\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Delete vpn gateway. ec2.delete_vpn_gateway("vgwdfa144b6") => true
|
def delete_vpn_gateway(vpn_gateway_id)
link = generate_request("DeleteVpnGateway", 'VpnGatewayId' => vpn_gateway_id )
request_info(link, RightHttp2xxParser.new(:logger => @logger))
rescue Exception
on_exception
end
|
[
"def delete\n client_opts = {}\n client_opts[:vpn_gateway_id] = vpn_gateway_id\n client.delete_vpn_gateway(client_opts)\n nil\n end",
"def destroy\n requires :vpn_gateway_id\n \n service.delete_vpn_gateway(vpn_gateway_id)\n true\n end",
"def delete\n internet_gateway_attr = AttrFinder.new(@instanceparameters)\n internet_gateway_attr.options = @options\n internet_gateway_attr.validate = @validate\n internet_gateway_attr.function = 'internet_gateway'\n opts = {}\n BmcAuthenticate.new(@options)\n request = OracleBMC::Core::VirtualNetworkClient.new\n request.delete_internet_gateway(internet_gateway_attr.gateway, opts)\n return 'gateway deleted'\n end",
"def destroy\n requires :id\n \n service.delete_nat_gateway(id)\n true\n end",
"def destroy\n @payment_gateway = PaymentGateway.find(params[:id])\n @payment_gateway.destroy\n\n respond_to do |format|\n format.html { redirect_to(admin_payment_gateways_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n requires :customer_gateway_id\n \n service.delete_customer_gateway(customer_gateway_id)\n true\n end",
"def destroy\n @gateway = Gateway.find(params[:id])\n @gateway.destroy\n\n respond_to do |format|\n format.html { redirect_to gateways_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @payment_gateway = PaymentGateway.find(params[:id])\n @payment_gateway.destroy\n\n respond_to do |format|\n format.html { redirect_to payment_gateways_url }\n format.json { head :ok }\n end\n end",
"def detach_vpn_gateway(vpn_gateway_id, vpc_id)\n link = generate_request(\"DetachVpnGateway\", 'VpnGatewayId' => vpn_gateway_id,\n 'VpcId' => vpc_id )\n request_info(link, RightHttp2xxParser.new(:logger => @logger))\n rescue Exception\n on_exception\n end",
"def destroy\n requires :id\n\n service.delete_internet_gateway(id)\n true\n end",
"def delete\n client_opts = {}\n client_opts[:vpn_connection_id] = vpn_connection_id\n client.delete_vpn_connection(client_opts)\n nil\n end",
"def delete_internet_gateway(internet_gateway_id)\n link = generate_request(\"DeleteInternetGateway\", 'InternetGatewayId' => internet_gateway_id )\n request_info(link, RightHttp2xxParser.new(:logger => @logger))\n rescue Exception\n on_exception\n end",
"def destroy\n @aws_internet_gateway.destroy\n respond_to do |format|\n format.html { redirect_to aws_internet_gateways_url, notice: 'Aws internet gateway was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n requires :vpn_connection_id\n\n service.delete_vpn_connection(vpn_connection_id)\n true\n end",
"def del_vpn\n return unless @vpn\n @vpns.delete(@vpn)\n @vpnlist_box.remove(@vpn.btn)\n Config.del_vpn(@vpn.name)\n @vpn = nil\n self.select_default_or_first_vpn\n Config.save\n end",
"def delete_router_gateway(router)\n data = { 'router' => {'external_gateway_info' => nil}}\n return put_request(address('routers/' + router), data, @token)\n end",
"def destroy\n @vpn = Vpn.find(params[:id])\n checkaccountobject(\"vpns\",@vpn)\n @vpn.send_delete\n\n respond_to do |format|\n format.html { redirect_to vpns_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @gateway_setup.destroy\n respond_to do |format|\n format.html { redirect_to new_gateway_setup_url, notice: 'Gateway setup was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def delete_customer_gateway(customer_gateway_id)\n link = generate_request(\"DeleteCustomerGateway\", 'CustomerGatewayId' => customer_gateway_id )\n request_info(link, RightHttp2xxParser.new(:logger => @logger))\n rescue Exception\n on_exception\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Delete VPN connection. ec2.delete_vpn_connection("vpna9a643c0") => true
|
def delete_vpn_connection(vpn_connection_id)
link = generate_request("DeleteVpnConnection", 'VpnConnectionId' => vpn_connection_id )
request_info(link, RightHttp2xxParser.new(:logger => @logger))
rescue Exception
on_exception
end
|
[
"def destroy\n requires :vpn_connection_id\n\n service.delete_vpn_connection(vpn_connection_id)\n true\n end",
"def delete\n client_opts = {}\n client_opts[:vpn_connection_id] = vpn_connection_id\n client.delete_vpn_connection(client_opts)\n nil\n end",
"def delete\n client_opts = {}\n client_opts[:vpn_gateway_id] = vpn_gateway_id\n client.delete_vpn_gateway(client_opts)\n nil\n end",
"def destroy\n requires :vpn_gateway_id\n \n service.delete_vpn_gateway(vpn_gateway_id)\n true\n end",
"def destroy(space_id_or_space_name, vpn_connection_id_or_vpn_connection_name)\n @client.vpn_connection.destroy(space_id_or_space_name, vpn_connection_id_or_vpn_connection_name)\n end",
"def del_vpn\n return unless @vpn\n @vpns.delete(@vpn)\n @vpnlist_box.remove(@vpn.btn)\n Config.del_vpn(@vpn.name)\n @vpn = nil\n self.select_default_or_first_vpn\n Config.save\n end",
"def destroy\n @vpn = Vpn.find(params[:id])\n checkaccountobject(\"vpns\",@vpn)\n @vpn.send_delete\n\n respond_to do |format|\n format.html { redirect_to vpns_url }\n format.json { head :ok }\n end\n end",
"def del_vpn(name)\n Config['vpns'].delete_if{|x| x['name'] == name}\n end",
"def delete_vpn_gateway(vpn_gateway_id)\n link = generate_request(\"DeleteVpnGateway\", 'VpnGatewayId' => vpn_gateway_id )\n request_info(link, RightHttp2xxParser.new(:logger => @logger))\n rescue Exception\n on_exception\n end",
"def delete\n internet_gateway_attr = AttrFinder.new(@instanceparameters)\n internet_gateway_attr.options = @options\n internet_gateway_attr.validate = @validate\n internet_gateway_attr.function = 'internet_gateway'\n opts = {}\n BmcAuthenticate.new(@options)\n request = OracleBMC::Core::VirtualNetworkClient.new\n request.delete_internet_gateway(internet_gateway_attr.gateway, opts)\n return 'gateway deleted'\n end",
"def destroy\n @network_connection = NetworkConnection.find(params[:id])\n @network_connection.destroy\n\n respond_to do |format|\n format.html { redirect_to network_connections_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @ssh_connection.destroy\n respond_to do |format|\n format.html { redirect_to ssh_connections_url, notice: 'Ssh connection was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @connection = current_user.connections.find(params[:id])\n @connection.destroy\n\n respond_to do |format|\n format.html { redirect_to(connections_url) }\n format.json { head :ok }\n end\n end",
"def destroy\n @connection = Connection.find(params[:id])\n @connection.destroy\n\n respond_to do |format|\n format.html { redirect_to connections_url }\n format.json { head :ok }\n end\n end",
"def deregister_tunnel_connection(connection)\n @tunnel_connections.delete connection.connection_id\n control_connection = connection.control_connection\n @tunnel_connections_by_control[control_connection].delete connection\n end",
"def destroy\n @switchgearconnection = Switchgearconnection.find(params[:id])\n @switchgearconnection.destroy\n redirect_to switchgearconnections_path, :notice => 'Schaltgerätekombination wurde gelöscht.'\n end",
"def delete_connection(username, conn_id)\n\t\tpath = '/v1/users/%s/email_connections/%d' % [username, conn_id]\n\t\tparams = {}\n\n\t\tadd_common_params('DELETE', path, params)\n\n\t\trequest = Net::HTTP::Delete.new path\n\t\trequest.set_form_data params\n\n\t\texecute_request(request)\n\tend",
"def destroy\n @core_connection.destroy\n respond_to do |format|\n format.html { redirect_to core_connections_url, notice: 'Connection was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @vpn = current_user.vpn\n @vpn.destroy\n\n respond_to do |format|\n format.html { redirect_to dashboard_url }\n format.json { head :no_content }\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Build score keeper if it is enabled via settings
|
def batali_build_score_keeper
if(batali_least_impact?)
Chef::Log.warn "Batali 'least impact resolution' is currently enabled!"
if(node[:batali] && node[:batali][:last_resolution])
Batali::ScoreKeeper.new(
:manifest => Batali::Manifest.new(
:cookbooks => node[:batali][:last_resolution].map{ |c_name, c_version|
Batali::Unit.new(
:name => c_name,
:version => c_version
)
}
)
)
end
end
end
|
[
"def set_gtm_score(opts)\n opts = check_params(opts,[:scores])\n super(opts)\n end",
"def buildScoringRoundToPointsMap\n scoringMap = {}\n allScores = NbaPlayoffScore.all\n allScores.each do |score|\n scoringMap[score.round] = score.points\n end\n return scoringMap\n end",
"def build_score(sub, mark, parent_subject_id = nil)\n subject_comp = sub_components.find_by(:type => sub.class.name, :obj_id => sub.id, :parent_subject_id => parent_subject_id)\n return unless subject_comp\n score = new_score(sub,mark,subject_comp)\n if sub.class.name == 'Subject'\n add_to_score_hash(sub, score.score.to_f)\n unless sub.exclude_for_final_score\n @total_score += score.score.to_f if sub.batch_subject_group_id.present? ? (sub.batch_subject_group.calculate_final ? false : true ) : true\n @total_max += score.max_score.to_f if (score.score.present? or score.is_absent) and (sub.batch_subject_group_id.present? ? (sub.batch_subject_group.calculate_final ? false : true ) : true)\n end\n elsif sub.class.name == 'BatchSubjectGroup' and sub.calculate_final\n @total_score += score.score.to_f \n @total_max += score.max_score.to_f if (score.score.present? or score.is_absent)\n end\n @scores.push score\n end",
"def calculate_and_display_score_options(dice_scores)\n\t\tputs \"Temp score board\"\n\t\tpossible_scores = analyze_dice_scores(dice_scores)\n\t\tderive_scoring_options(possible_scores)\n\tend",
"def setScore(topicID, level, score)\n\n # Load topic and level number, prepare score record object \n topic = Topic.find(topicID)\n scoreRecord = ScoreRecord.new(score, Time.now.strftime('%d/%m/%Y'))\n \n # Check for an existing score hash for this topic, which should exist. \n # Is the new score greater than existing? Overwrite if so.\n if topicScores = scoresDictionary[topic.shortName]\n\n if existing = topicScores[level]\n topicScores[level] = scoreRecord unless existing[:score] >= score\n # No existing score for this level, so append the new score.\n else\n raise \"Level should be length of existing score array\" unless topicScores.count == level\n topicScores[level] = scoreRecord\n end\n return true \n\n # If we did not find an existing hash for this topic then error. \n else \n puts 'User#updateLevelScore error: could not find a hash for this topic'\n return false\n end\n end",
"def score_mode(mode)\n base.boost_builder.score_mode = mode\n self\n end",
"def tag_best_score\n if self.user_id == nil\n self.update!({ :best_level_user_score => true })\n else\n l_u = LevelUserLink.where(:user_id => self.user_id, :level_id => self.level_id)\n .where('pushes < :p or (pushes = :p and moves < :m) or (pushes = :p and moves = :m and created_at > :c)',\n :p => self.pushes, :m => self.moves, :c => self.created_at)\n if l_u.empty?\n LevelUserLink.where('id != ?', self.id).where(:user_id => self.user_id, :level_id => self.level_id).each do |score|\n score.update!({ :best_level_user_score => false })\n end\n self.update!({ :best_level_user_score => true })\n else\n self.update!({ :best_level_user_score => false })\n end\n end\n end",
"def up_score\n\n # modify player score if a hint was used\n @active_player.decrease_points(1) if @hint_type_one\n @active_player.decrease_points(2) if @hint_type_two\n\n @active_player.increase_points\n puts \"\\nGreat Job!\"\n puts \"#{@active_player.name}: your score is now #{@active_player.score}, and there are #{@draw_pile.length} cards left in the draw pile.\"\n @active_player = @players.switch_players\n\n @hint_type_one = false\n @hint_type_two = false\n end",
"def render_score\n @args.outputs.labels << [ 90, 80, \"Level: #{@level}\", -3, 0, *Color::White ]\n @args.outputs.labels << [ 90, 65, \"Score: #{@score}\", -3, 0, *Color::White ]\n end",
"def default_level?\n experience_points_threshold == 0\n end",
"def relevancy_scores?\n SHOW_RELEVANCY_SCORE && search_debug?\n end",
"def public_scoreboard?\n @public_scoreboard ? true : false\n end",
"def fix_gameplay_config\n must_save = false\n @pokemon_max_level = (@pokemon_max_level || 100).to_i\n @player_always_centered = @player_always_centered == true\n @mouse_disabled = @mouse_disabled == true\n if @always_use_form0_for_evolution.nil?\n @always_use_form0_for_evolution = false\n must_save = true\n end\n if @use_form0_when_no_evolution_data.nil?\n @use_form0_when_no_evolution_data = true\n must_save = true\n end\n return must_save\n end",
"def update_score!\n scoring_svc = ScoringService.new(player)\n scoring_svc.score!\n end",
"def handle_option_start_tuning\n num_testboards = $config.static_params[:num_testboards]\n if !num_testboards\n puts \"Error: NUM_TESTBOARDS should be specified in tuning.conf file\"\n exit 1\n end\n\n boards = $config.static_params[:boards].uniq\n \n boards.each{ |board_id|\n if !File.exist?( \"#{$test_dir}/log/current/ref/#{board_id}-1.xml\") \n puts \"No reference run (#{board_id}-1.xml) for testboard #{board_id}. Please run 'tact reference-runs' first\"\n exit 1\n end\n }\n\n gen = Generation.new(0)\n gen.run\n\nend",
"def save_score\n if current_score > 0\n score = Score.new\n score.score = current_score\n score.category = current_category\n score.user = current_user\n\n score.save\n end\n end",
"def enabled?\n $game_switches[Yuki::Sw::Pokedex]\n end",
"def setup_expected_score\n @players.each do |player|\n @expected_scores += get_meta_from_config_list(@levels_config, player.nivel)\n end\n end",
"def levelUp\n @level += 1\n # every level requires a higher score threshhold score to advance\n @threshold += level * 150\n\n # after 9 planes, stop adding planes to make it possible to have\n # somewhere to go to dodge the planes if there is not laser\n game.add_plane if level < 9\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Read in the table JSON
|
def readTable(path)
return JSON.parse(File.read(path).rstrip)
end
|
[
"def readTableJson(path)\n return JSON.parse(File.read(path).gsub(/\\s+$/, ''))\nend",
"def load_table (filename)\n\t\t\tnew_table = File.open(filename)\n\t\t\t@file_table = JSON.load(new_table)\n\t\t\tnew_table.close\n\t\tend",
"def get_table_json\n url = @driver.current_url\n table_id = url.split('/').last\n response = @client.table(table_id)[\"table\"]\n end",
"def parse_json(json_)\n ::NTable.parse_json(json_)\n end",
"def from_json_object(json_)\n Table.new(Structure.from_json_array(json_['axes'] || []), :load => json_['values'] || [])\n end",
"def table_data(name, key = nil, options = {})\n key ||= properties.key1\n\n query = \"\"\n\n hash = {\n \"$filter=\" => options[:filter],\n \"$select=\" => options[:select],\n \"$top=\" => options[:top]\n }\n\n hash.each do |key, value|\n if query.include?(\"$\")\n query << \"&#{key}#{value}\" if value\n else\n query << \"#{key}#{value}\" if value\n end\n end\n\n response = table_response(key, query, name)\n JSON.parse(response.body)['value'].map{ |t| TableData.new(t) }\n end",
"def read_data(filename)\n filepath = DATA_PATH + filename\n file = File.read(filepath)\n db = JSON.parse(file)\n self.populate(db)\n end",
"def as_json(*args)\n super.as_json['table']\n end",
"def parse_table; end",
"def read\n JSON.parse(File.read(@json))\n end",
"def getrow\n if ActiveRecord::Base.connection.tables.include? params[:table]\n table = params[:table].classify.constantize\n row = table.find(params[:row])\n render json: row, status: :ok\n else\n error = {Response: \"Table does not exist\"}\n render json: error, status: :not_found\n end\n end",
"def read_table table\n table_offset = @archive_header.send \"#{table}_table_offset\"\n @io.seek @user_header.archive_header_offset + table_offset\n table_entries = @archive_header.send \"#{table}_table_entries\"\n data = @io.read table_entries * 16\n key = Hashing::hash_for :table, \"(#{table} table)\"\n data = Hashing::decrypt data, key\n klass = table == :hash ? HashTableEntry : BlockTableEntry\n (0...table_entries).map do |i|\n klass.read(data[i * 16, 16])\n end\n end",
"def get_table_definitions\n content = DiskCacher.new.fetch('table_metadata', Config[:meta_table_ttl])\n raise Error, 'Table metadata is empty' if content.empty?\n JSON.parse(content)\n end",
"def table(instance_name, table_name)\n get_json(table_endpoint(instance_name, table_name))\n end",
"def get_table(table_name)\r\n raise WAZ::Tables::InvalidTableName, table_name unless WAZ::Storage::ValidationRules.valid_table_name?(table_name) \r\n \r\n begin\r\n content = execute :get, \"Tables('#{table_name}')\", {}, default_headers\r\n doc = REXML::Document.new(content)\r\n item = REXML::XPath.first(doc, \"entry\")\r\n return { :name => REXML::XPath.first(item.elements['content'], \"m:properties/d:TableName\", {\"m\" => DATASERVICES_METADATA_NAMESPACE, \"d\" => DATASERVICES_NAMESPACE}).text,\r\n :url => REXML::XPath.first(item, \"id\").text }\r\n rescue RestClient::ResourceNotFound\r\n raise WAZ::Tables::TableDoesNotExist, table_name if $!.http_code == 404\r\n end \r\n end",
"def parse json; return JSON.parse File.read json end",
"def retrieve\n raise \"Can't retrieve from JSON file. Use text file.\"\n end",
"def load_rows_and_columns\n\t\tif @table\n\t\t\tbegin\n\t\t\t\tdb = SQLite3::Database.new(@file, :results_as_hash => false, :type_translation => false)\n\t\t\t\tdb.busy_timeout(100)\n\t\t\t\tsql = \"select * from #{ @table }\"\n\t\t\t\tsql += \" order by #{ @order }\" if @order and @order.strip != ''\n\t\t\t\n\t\t\t\tresult = db.execute2(sql)\n\t\t\t\t@columns = result.shift\n\t\t\t\t@rows = result\n\t\t\trescue SQLite3::SQLException => e\n\t\t\t\tputs e\n\t\t\tend\n\t\t\t\n\t\t\tdb.close\n\t\tend\n\tend",
"def table\n @nodes = current_user.nodes.order('updated_at DESC')\n respond_to do |format|\n format.html { render partial: \"table\"}\n format.json { render json: @nodes }\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Set the "slimmer headers" to configure the page
|
def set_slimmer_headers(hash)
raise InvalidHeader if (hash.keys - SLIMMER_HEADER_MAPPING.keys).any?
SLIMMER_HEADER_MAPPING.each do |hash_key, header_suffix|
value = hash[hash_key]
headers["#{HEADER_PREFIX}-#{header_suffix}"] = value.to_s if value
end
end
|
[
"def set_headers! session = nil\n response.headers['sid'] = session.id\n response.headers['utoken'] = session.utoken\n end",
"def show_headers=(value)\n @show_headers = value\n end",
"def set_default_headers\n super\n (@content_security_policy || roda_class.opts[:content_security_policy]).set_header(headers)\n end",
"def kopal_layout_before_page_header\n\n end",
"def header\n end",
"def rack_headers; end",
"def setup_header_handler\n sessionm.headerhandler << HeaderHandler.new(:session => @session_id, :target => 'sessionm')\n envm.headerhandler << HeaderHandler.new(:session => @session_id, :target => 'vzaenvm')\n packagem.headerhandler << HeaderHandler.new(:session => @session_id, :target => 'vzapackagem')\n relocator.headerhandler << HeaderHandler.new(:session => @session_id, :target => 'vzarelocator')\n devm.headerhandler << HeaderHandler.new(:session => @session_id, :target => 'vzadevm')\n networkm.headerhandler << HeaderHandler.new(:session => @session_id, :target => 'vzanetworkm')\n proc_info.headerhandler << HeaderHandler.new(:session => @session_id, :target => 'vzaproc_info')\n processm.headerhandler << HeaderHandler.new(:session => @session_id, :target => 'vzaprocessm')\n up2date.headerhandler << HeaderHandler.new(:session => @session_id, :target => 'vzaup2date')\n support.headerhandler << HeaderHandler.new(:session => @session_id, :target => 'vzasupport')\n env_samplem.headerhandler << HeaderHandler.new(:session => @session_id, :target => 'env_samplem')\n alertm.headerhandler << HeaderHandler.new(:session => @session_id, :target => 'alertm')\n authm.headerhandler << HeaderHandler.new(:session => @session_id, :target => 'authm')\n backupm.headerhandler << HeaderHandler.new(:session => @session_id, :target => 'backupm')\n computerm.headerhandler << HeaderHandler.new(:session => @session_id, :target => 'computerm')\n event_log.headerhandler << HeaderHandler.new(:session => @session_id, :target => 'event_log')\n server_group.headerhandler << HeaderHandler.new(:session => @session_id, :target => 'server_group')\n filer.headerhandler << HeaderHandler.new(:session => @session_id, :target => 'filer')\n firewallm.headerhandler << HeaderHandler.new(:session => @session_id, :target => 'firewallm')\n licensem.headerhandler << HeaderHandler.new(:session => @session_id, :target => 'licensem')\n mailer.headerhandler << HeaderHandler.new(:session => @session_id, :target => 'mailer')\n op_log.headerhandler << HeaderHandler.new(:session => @session_id, :target => 'op_log')\n perf_mon.headerhandler << HeaderHandler.new(:session => @session_id, :target => 'perf_mon')\n res_log.headerhandler << HeaderHandler.new(:session => @session_id, :target => 'res_log')\n resourcem.headerhandler << HeaderHandler.new(:session => @session_id, :target => 'resourcem')\n scheduler.headerhandler << HeaderHandler.new(:session => @session_id, :target => 'scheduler')\n servicem.headerhandler << HeaderHandler.new(:session => @session_id, :target => 'servicem')\n userm.headerhandler << HeaderHandler.new(:session => @session_id, :target => 'userm')\n end",
"def header(routes); end",
"def add_security_headers\n response.headers[\"X-Frame-Options\"] = \"SAMEORIGIN\"\n response.headers[\"X-XSS-Protection\"] = \"1; mode=block\"\n response.headers[\"X-Content-Type-Options\"] = \"nosniff\"\n end",
"def set_common_headers\n @response.headers[:Server] = \"Yarn webserver v#{VERSION}\"\n\n # HTTP date format: Fri, 31 Dec 1999 23:59:59 GMT\n time ||= DateTime.now.new_offset(0)\n @response.headers[:Date] = time.strftime(\"%a, %d %b %Y %H:%M:%S GMT\")\n # Close connection header ( until support for persistent connections )\n @response.headers[:Connection] = \"Close\"\n end",
"def set_response_headers\r\n request.env[\"rack.session.options\"].try :clear\r\n\r\n unless defined? @content_type\r\n @content_type = \"application/vnd.#{Restful.application_name}\"\r\n @content_type << \"-v#@version\" if @version\r\n @content_type << \"+#{request.format.to_sym}\"\r\n @content_type << \"; charset=utf-8\"\r\n end\r\n\r\n response.headers[\"Content-Type\"] = @content_type if @content_type\r\n end",
"def set_header(options)\n return Processor.new(options.merge({:body => true})) {}\n end",
"def render_header\n return if @render_header\n @render_header = 1\n puts Cabar.yaml_header\n end",
"def header\n @header ||= HeaderController.new config\n end",
"def kopal_layout_after_page_header\n\n end",
"def set_sasc_request_headers(api_version = nil)\n sasc_request_headers(api_version).each { |header, value| request.headers[header] = value }\n end",
"def set_pagination_headers\n headers[\"Access-Control-Expose-Headers\"] = \"Link, Page, Per-Page\"\n headers[\"Page\"] = current_page\n headers[\"Per-Page\"] = current_per_page\n end",
"def get_header() \n erb :header\n end",
"def headers(headers)\n client.http.headers = headers\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Create or update L2 Bridge Endpoint Profile API will create or update L2 bridge profile with ID profileid. L2 Bridge profile is only allowed under an enforcementpoint with path /infra/sites/default/enforcementpoints/default.
|
def update_l2_bridge_profile_with_http_info(site_id, enforcement_point_id, profile_id, l2_bridge_endpoint_profile, opts = {})
if @api_client.config.debugging
@api_client.config.logger.debug 'Calling API: PolicyNetworkingConnectivitySegmentsEdgeBridgeProfilesApi.update_l2_bridge_profile ...'
end
# verify the required parameter 'site_id' is set
if @api_client.config.client_side_validation && site_id.nil?
fail ArgumentError, "Missing the required parameter 'site_id' when calling PolicyNetworkingConnectivitySegmentsEdgeBridgeProfilesApi.update_l2_bridge_profile"
end
# verify the required parameter 'enforcement_point_id' is set
if @api_client.config.client_side_validation && enforcement_point_id.nil?
fail ArgumentError, "Missing the required parameter 'enforcement_point_id' when calling PolicyNetworkingConnectivitySegmentsEdgeBridgeProfilesApi.update_l2_bridge_profile"
end
# verify the required parameter 'profile_id' is set
if @api_client.config.client_side_validation && profile_id.nil?
fail ArgumentError, "Missing the required parameter 'profile_id' when calling PolicyNetworkingConnectivitySegmentsEdgeBridgeProfilesApi.update_l2_bridge_profile"
end
# verify the required parameter 'l2_bridge_endpoint_profile' is set
if @api_client.config.client_side_validation && l2_bridge_endpoint_profile.nil?
fail ArgumentError, "Missing the required parameter 'l2_bridge_endpoint_profile' when calling PolicyNetworkingConnectivitySegmentsEdgeBridgeProfilesApi.update_l2_bridge_profile"
end
# resource path
local_var_path = '/global-infra/sites/{site-id}/enforcement-points/{enforcement-point-id}/edge-bridge-profiles/{profile-id}'.sub('{' + 'site-id' + '}', site_id.to_s).sub('{' + 'enforcement-point-id' + '}', enforcement_point_id.to_s).sub('{' + 'profile-id' + '}', profile_id.to_s)
# query parameters
query_params = {}
# header parameters
header_params = {}
# HTTP header 'Accept' (if needed)
header_params['Accept'] = @api_client.select_header_accept(['application/json'])
# HTTP header 'Content-Type'
header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])
# form parameters
form_params = {}
# http body (model)
post_body = @api_client.object_to_http_body(l2_bridge_endpoint_profile)
auth_names = ['BasicAuth']
data, status_code, headers = @api_client.call_api(:PUT, local_var_path,
:header_params => header_params,
:query_params => query_params,
:form_params => form_params,
:body => post_body,
:auth_names => auth_names,
:return_type => 'L2BridgeEndpointProfile')
if @api_client.config.debugging
@api_client.config.logger.debug "API called: PolicyNetworkingConnectivitySegmentsEdgeBridgeProfilesApi#update_l2_bridge_profile\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
end
return data, status_code, headers
end
|
[
"def update_l2_bridge_profile_0_with_http_info(site_id, enforcement_point_id, profile_id, l2_bridge_endpoint_profile, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: PolicyNetworkingConnectivitySegmentsEdgeBridgeProfilesApi.update_l2_bridge_profile_0 ...'\n end\n # verify the required parameter 'site_id' is set\n if @api_client.config.client_side_validation && site_id.nil?\n fail ArgumentError, \"Missing the required parameter 'site_id' when calling PolicyNetworkingConnectivitySegmentsEdgeBridgeProfilesApi.update_l2_bridge_profile_0\"\n end\n # verify the required parameter 'enforcement_point_id' is set\n if @api_client.config.client_side_validation && enforcement_point_id.nil?\n fail ArgumentError, \"Missing the required parameter 'enforcement_point_id' when calling PolicyNetworkingConnectivitySegmentsEdgeBridgeProfilesApi.update_l2_bridge_profile_0\"\n end\n # verify the required parameter 'profile_id' is set\n if @api_client.config.client_side_validation && profile_id.nil?\n fail ArgumentError, \"Missing the required parameter 'profile_id' when calling PolicyNetworkingConnectivitySegmentsEdgeBridgeProfilesApi.update_l2_bridge_profile_0\"\n end\n # verify the required parameter 'l2_bridge_endpoint_profile' is set\n if @api_client.config.client_side_validation && l2_bridge_endpoint_profile.nil?\n fail ArgumentError, \"Missing the required parameter 'l2_bridge_endpoint_profile' when calling PolicyNetworkingConnectivitySegmentsEdgeBridgeProfilesApi.update_l2_bridge_profile_0\"\n end\n # resource path\n local_var_path = '/infra/sites/{site-id}/enforcement-points/{enforcement-point-id}/edge-bridge-profiles/{profile-id}'.sub('{' + 'site-id' + '}', site_id.to_s).sub('{' + 'enforcement-point-id' + '}', enforcement_point_id.to_s).sub('{' + 'profile-id' + '}', profile_id.to_s)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(l2_bridge_endpoint_profile)\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:PUT, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2BridgeEndpointProfile')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: PolicyNetworkingConnectivitySegmentsEdgeBridgeProfilesApi#update_l2_bridge_profile_0\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def create_or_update_segment_security_profile_0_with_http_info(segment_security_profile_id, segment_security_profile, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: PolicyNetworkingConnectivitySegmentsSegmentProfilesSegmentSecurityProfilesApi.create_or_update_segment_security_profile_0 ...'\n end\n # verify the required parameter 'segment_security_profile_id' is set\n if @api_client.config.client_side_validation && segment_security_profile_id.nil?\n fail ArgumentError, \"Missing the required parameter 'segment_security_profile_id' when calling PolicyNetworkingConnectivitySegmentsSegmentProfilesSegmentSecurityProfilesApi.create_or_update_segment_security_profile_0\"\n end\n # verify the required parameter 'segment_security_profile' is set\n if @api_client.config.client_side_validation && segment_security_profile.nil?\n fail ArgumentError, \"Missing the required parameter 'segment_security_profile' when calling PolicyNetworkingConnectivitySegmentsSegmentProfilesSegmentSecurityProfilesApi.create_or_update_segment_security_profile_0\"\n end\n # resource path\n local_var_path = '/global-infra/segment-security-profiles/{segment-security-profile-id}'.sub('{' + 'segment-security-profile-id' + '}', segment_security_profile_id.to_s)\n\n # query parameters\n query_params = {}\n query_params[:'override'] = opts[:'override'] if !opts[:'override'].nil?\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(segment_security_profile)\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:PUT, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'SegmentSecurityProfile')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: PolicyNetworkingConnectivitySegmentsSegmentProfilesSegmentSecurityProfilesApi#create_or_update_segment_security_profile_0\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def create_or_patch_ip_sec_vpn_tunnel_profile_0(tunnel_profile_id, ip_sec_vpn_tunnel_profile, opts = {})\n create_or_patch_ip_sec_vpn_tunnel_profile_0_with_http_info(tunnel_profile_id, ip_sec_vpn_tunnel_profile, opts)\n nil\n end",
"def create_bridge_endpoint_profile(bridge_endpoint_profile, opts = {})\n data, _status_code, _headers = create_bridge_endpoint_profile_with_http_info(bridge_endpoint_profile, opts)\n data\n end",
"def update_profile(options = {}) \n # query profile info\n response = HTTP.auth('Bearer ' + Asca::Tools::Token.new_token).get(URI_PROFILES, :params => { 'filter[name]' => options[:name] })\n if response.status.success?\n responseObj = JSON.parse(response.body)\n queried_profile_list = responseObj[\"data\"]\n if queried_profile_list.length() > 0\n profile = queried_profile_list[0]\n end\n else\n Asca::Tools::Log.error(response.body)\n return\n end\n \n if !profile\n Asca::Tools::Log.error(\"No profile named #{options[:name]} found\")\n return\n end\n # create new profile\n profile_type = profile[\"attributes\"][\"profileType\"]\n \n # get bundle id\n response = HTTP.auth('Bearer ' + Asca::Tools::Token.new_token).get(profile[\"relationships\"][\"bundleId\"][\"links\"][\"self\"])\n bundle_id = JSON.parse(response.body)[\"data\"][\"id\"]\n response = HTTP.auth('Bearer ' + Asca::Tools::Token.new_token).get(profile[\"relationships\"][\"certificates\"][\"links\"][\"self\"])\n certificate_ids = JSON.parse(response.body)[\"data\"].map { |cer| cer[\"id\"] }\n \n # get all device ids\n device_ids = Asca::REST::Provisioning::Devices.list_devices.map { |device|\n device[\"id\"]\n }\n \n # delete old prifile\n delete_profile :name => options[:name]\n \n if profile_type.include? 'APP_STORE'\n create_new_profile :name => options[:name], :type => profile_type, :bundle_id => bundle_id, :certificate_ids => certificate_ids\n else\n create_new_profile :name => options[:name], :type => profile_type, :bundle_id => bundle_id, :device_ids => device_ids, :certificate_ids => certificate_ids\n end\n \n return true\n end",
"def update_profile_by_profile_id(profile_id, user_profile_data)\n # PUT /d2l/api/lp/(version)/profile/(profileId)\n # NOTE: Example of User.UserProfile JSON Data Block\n # { \"Nickname\": <string>,\n # \"Birthday\": {\n # \"Month\": <number>,\n # \"Day\": <number>\n # },\n # \"HomeTown\": <string>,\n # \"Email\": <string>,\n # \"HomePage\": <string>,\n # \"HomePhone\": <string>,\n # \"BusinessPhone\": <string>,\n # \"MobilePhone\": <string>,\n # \"FaxNumber\": <string>,\n # \"Address1\": <string>,\n # \"Address2\": <string>,\n # \"City\": <string>,\n # \"Province\": <string>,\n # \"PostalCode\": <string>,\n # \"Country\": <string>,\n # \"Company\": <string>,\n # \"JobTitle\": <string>,\n # \"HighSchool\": <string>,\n # \"University\": <string>,\n # \"Hobbies\": <string>,\n # \"FavMusic\": <string>,\n # \"FavTVShows\": <string>,\n # \"FavMovies\": <string>,\n # \"FavBooks\": <string>,\n # \"FavQuotations\": <string>,\n # \"FavWebSites\": <string>,\n # \"FutureGoals\": <string>,\n # \"FavMemory\": <string>,\n # \"SocialMediaUrls\": [ // Array of SocialMediaUrl blocks\n # {\n # \"Name\": <string>,\n # \"Url\": <string:URL>\n # },\n # { <composite:SocialMediaUrl> }, ...\n # ]\n # }\n # NOTE: The back-end service also expects a file names \"profileImage\"\n # RETURNS: a UserProfile JSON data block for the updated personal profile.\nend",
"def create_bridge_endpoint_profile_with_http_info(bridge_endpoint_profile, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: NetworkTransportApi.create_bridge_endpoint_profile ...\"\n end\n # verify the required parameter 'bridge_endpoint_profile' is set\n if @api_client.config.client_side_validation && bridge_endpoint_profile.nil?\n fail ArgumentError, \"Missing the required parameter 'bridge_endpoint_profile' when calling NetworkTransportApi.create_bridge_endpoint_profile\"\n end\n # resource path\n local_var_path = \"/bridge-endpoint-profiles\"\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(bridge_endpoint_profile)\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:POST, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'BridgeEndpointProfile')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: NetworkTransportApi#create_bridge_endpoint_profile\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def create_or_update_segment_security_profile_with_http_info(segment_security_profile_id, segment_security_profile, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: PolicyNetworkingConnectivitySegmentsSegmentProfilesSegmentSecurityProfilesApi.create_or_update_segment_security_profile ...'\n end\n # verify the required parameter 'segment_security_profile_id' is set\n if @api_client.config.client_side_validation && segment_security_profile_id.nil?\n fail ArgumentError, \"Missing the required parameter 'segment_security_profile_id' when calling PolicyNetworkingConnectivitySegmentsSegmentProfilesSegmentSecurityProfilesApi.create_or_update_segment_security_profile\"\n end\n # verify the required parameter 'segment_security_profile' is set\n if @api_client.config.client_side_validation && segment_security_profile.nil?\n fail ArgumentError, \"Missing the required parameter 'segment_security_profile' when calling PolicyNetworkingConnectivitySegmentsSegmentProfilesSegmentSecurityProfilesApi.create_or_update_segment_security_profile\"\n end\n # resource path\n local_var_path = '/infra/segment-security-profiles/{segment-security-profile-id}'.sub('{' + 'segment-security-profile-id' + '}', segment_security_profile_id.to_s)\n\n # query parameters\n query_params = {}\n query_params[:'override'] = opts[:'override'] if !opts[:'override'].nil?\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(segment_security_profile)\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:PUT, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'SegmentSecurityProfile')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: PolicyNetworkingConnectivitySegmentsSegmentProfilesSegmentSecurityProfilesApi#create_or_update_segment_security_profile\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def update_profile\n @profile = @account.employee\n \n if params[:profile].present?\n @profile.assign_attributes(profile_params)\n @profile.save\n redirect_to gns_core.my_profile_backend_accounts_path, flash: {success: 'Profile has been updated successfully.'}\n else\n redirect_to gns_core.my_profile_backend_accounts_path, flash: {error: 'Profile update failed. Please try again!'}\n end\n end",
"def create_sslprofile\n converge_by(\"Create #{new_resource} ssl profile\") do\n Chef::Log.info \"Create #{new_resource} ssl profile\"\n\n\n load_balancer.client['LocalLB.ProfileClientSSL'].create_v2([new_resource.sslprofile_name], [{\"value\" => \"/Common/#{new_resource.keyid}\", \"default_flag\" => \"false\"}] , [{\"value\" => \"/Common/#{new_resource.certid}\", \"default_flag\" => \"false\"}])\n load_balancer.client['LocalLB.ProfileClientSSL'].set_passphrase([\"/Common/#{new_resource.sslprofile_name}\"], [{\"value\" => \"#{new_resource.passphrase}\", \"default_flag\" => \"false\" }]) if !new_resource.passphrase.nil?\n\n current_resource.keyid(new_resource.keyid)\n current_resource.certid(new_resource.certid)\n current_resource.cacertid(new_resource.cacertid)\n current_resource.passphrase(new_resource.passphrase)\n\n new_resource.updated_by_last_action(true)\n end\n end",
"def set_new_profile\n server_profile = OneviewSDK::ServerProfile\n # lets the SDK set a default name in case the user has not declared one\n if @data['serverProfileName']\n sp_name = @data.delete('serverProfileName')\n get_single_resource_instance.new_profile(sp_name).create unless server_profile.find_by(@client, name: sp_name).first\n else\n default = 'Server_Profile_created_from_' + @data['name']\n get_single_resource_instance.new_profile.create unless server_profile.find_by(@client, name: default).first\n end\n end",
"def update_current_user_profile_data(user_profile_data)\n # PUT /d2l/api/lp/(version)/profile/myProfile\nend",
"def create_profile!\n bundle_id = Sigh.config[:app_identifier]\n name = Sigh.config[:provisioning_name]\n if !name\n name = Sigh.config[:app_identifier].gsub '.' ,''\n end\n\n UI.important \"Creating new provisioning profile for '#{Sigh.config[:app_identifier]}' with name '#{name}'\"\n profile = profile_type.create!(name: name,\n bundle_id: bundle_id)\n profile\n end",
"def create_or_patch_ip_sec_vpn_tunnel_profile(tunnel_profile_id, ip_sec_vpn_tunnel_profile, opts = {})\n create_or_patch_ip_sec_vpn_tunnel_profile_with_http_info(tunnel_profile_id, ip_sec_vpn_tunnel_profile, opts)\n nil\n end",
"def assign_default_profile(args = {}) \n put(\"/profiles.json/#{args[:profileId]}/default\", args)\nend",
"def newprofile\n if params[\"auth_key\"] == nil or params[\"device_id\"] == nil or params[\"profile_url\"] == nil\n render :json => '{\"status\": \"failed\", \"reason\": \"incorrect parameters\"}'\n else\n device = Device.find(params[\"device_id\"])\n if device.auth_key == params[\"auth_key\"]\n device.profile_url = params[\"profile_url\"]\n if device.save\n render :json => '{\"status\": \"success\"}'\n else\n render :json => '{\"status\": \"failed\", \"reason\": \"save error\"}'\n end\n else\n render :json => '{\"status\": \"failed\", \"reason\": \"not authorized\"}'\n end\n end\n end",
"def create_DB_profile\n \n @profile = ProfileId.new\n @profile.user_name = params[:user_name]\n @profile.profile_id = @parsed[\"profileId\"]\n @profile.save\n end",
"def create_or_replace_gateway_qos_profile_0_with_http_info(qos_profile_id, gateway_qos_profile, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: PolicyNetworkingNetworkingProfilesGatewayQOSProfilesApi.create_or_replace_gateway_qos_profile_0 ...'\n end\n # verify the required parameter 'qos_profile_id' is set\n if @api_client.config.client_side_validation && qos_profile_id.nil?\n fail ArgumentError, \"Missing the required parameter 'qos_profile_id' when calling PolicyNetworkingNetworkingProfilesGatewayQOSProfilesApi.create_or_replace_gateway_qos_profile_0\"\n end\n # verify the required parameter 'gateway_qos_profile' is set\n if @api_client.config.client_side_validation && gateway_qos_profile.nil?\n fail ArgumentError, \"Missing the required parameter 'gateway_qos_profile' when calling PolicyNetworkingNetworkingProfilesGatewayQOSProfilesApi.create_or_replace_gateway_qos_profile_0\"\n end\n # resource path\n local_var_path = '/infra/gateway-qos-profiles/{qos-profile-id}'.sub('{' + 'qos-profile-id' + '}', qos_profile_id.to_s)\n\n # query parameters\n query_params = {}\n query_params[:'override'] = opts[:'override'] if !opts[:'override'].nil?\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(gateway_qos_profile)\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:PUT, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'GatewayQosProfile')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: PolicyNetworkingNetworkingProfilesGatewayQOSProfilesApi#create_or_replace_gateway_qos_profile_0\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def create_profile\n self.profile = Profile.create(user_id: id)\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Create or update L2 Bridge Endpoint Profile API will create or update L2 bridge profile with ID profileid. L2 Bridge profile is only allowed under an enforcementpoint with path /infra/sites/default/enforcementpoints/default.
|
def update_l2_bridge_profile_0_with_http_info(site_id, enforcement_point_id, profile_id, l2_bridge_endpoint_profile, opts = {})
if @api_client.config.debugging
@api_client.config.logger.debug 'Calling API: PolicyNetworkingConnectivitySegmentsEdgeBridgeProfilesApi.update_l2_bridge_profile_0 ...'
end
# verify the required parameter 'site_id' is set
if @api_client.config.client_side_validation && site_id.nil?
fail ArgumentError, "Missing the required parameter 'site_id' when calling PolicyNetworkingConnectivitySegmentsEdgeBridgeProfilesApi.update_l2_bridge_profile_0"
end
# verify the required parameter 'enforcement_point_id' is set
if @api_client.config.client_side_validation && enforcement_point_id.nil?
fail ArgumentError, "Missing the required parameter 'enforcement_point_id' when calling PolicyNetworkingConnectivitySegmentsEdgeBridgeProfilesApi.update_l2_bridge_profile_0"
end
# verify the required parameter 'profile_id' is set
if @api_client.config.client_side_validation && profile_id.nil?
fail ArgumentError, "Missing the required parameter 'profile_id' when calling PolicyNetworkingConnectivitySegmentsEdgeBridgeProfilesApi.update_l2_bridge_profile_0"
end
# verify the required parameter 'l2_bridge_endpoint_profile' is set
if @api_client.config.client_side_validation && l2_bridge_endpoint_profile.nil?
fail ArgumentError, "Missing the required parameter 'l2_bridge_endpoint_profile' when calling PolicyNetworkingConnectivitySegmentsEdgeBridgeProfilesApi.update_l2_bridge_profile_0"
end
# resource path
local_var_path = '/infra/sites/{site-id}/enforcement-points/{enforcement-point-id}/edge-bridge-profiles/{profile-id}'.sub('{' + 'site-id' + '}', site_id.to_s).sub('{' + 'enforcement-point-id' + '}', enforcement_point_id.to_s).sub('{' + 'profile-id' + '}', profile_id.to_s)
# query parameters
query_params = {}
# header parameters
header_params = {}
# HTTP header 'Accept' (if needed)
header_params['Accept'] = @api_client.select_header_accept(['application/json'])
# HTTP header 'Content-Type'
header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])
# form parameters
form_params = {}
# http body (model)
post_body = @api_client.object_to_http_body(l2_bridge_endpoint_profile)
auth_names = ['BasicAuth']
data, status_code, headers = @api_client.call_api(:PUT, local_var_path,
:header_params => header_params,
:query_params => query_params,
:form_params => form_params,
:body => post_body,
:auth_names => auth_names,
:return_type => 'L2BridgeEndpointProfile')
if @api_client.config.debugging
@api_client.config.logger.debug "API called: PolicyNetworkingConnectivitySegmentsEdgeBridgeProfilesApi#update_l2_bridge_profile_0\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
end
return data, status_code, headers
end
|
[
"def update_l2_bridge_profile_with_http_info(site_id, enforcement_point_id, profile_id, l2_bridge_endpoint_profile, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: PolicyNetworkingConnectivitySegmentsEdgeBridgeProfilesApi.update_l2_bridge_profile ...'\n end\n # verify the required parameter 'site_id' is set\n if @api_client.config.client_side_validation && site_id.nil?\n fail ArgumentError, \"Missing the required parameter 'site_id' when calling PolicyNetworkingConnectivitySegmentsEdgeBridgeProfilesApi.update_l2_bridge_profile\"\n end\n # verify the required parameter 'enforcement_point_id' is set\n if @api_client.config.client_side_validation && enforcement_point_id.nil?\n fail ArgumentError, \"Missing the required parameter 'enforcement_point_id' when calling PolicyNetworkingConnectivitySegmentsEdgeBridgeProfilesApi.update_l2_bridge_profile\"\n end\n # verify the required parameter 'profile_id' is set\n if @api_client.config.client_side_validation && profile_id.nil?\n fail ArgumentError, \"Missing the required parameter 'profile_id' when calling PolicyNetworkingConnectivitySegmentsEdgeBridgeProfilesApi.update_l2_bridge_profile\"\n end\n # verify the required parameter 'l2_bridge_endpoint_profile' is set\n if @api_client.config.client_side_validation && l2_bridge_endpoint_profile.nil?\n fail ArgumentError, \"Missing the required parameter 'l2_bridge_endpoint_profile' when calling PolicyNetworkingConnectivitySegmentsEdgeBridgeProfilesApi.update_l2_bridge_profile\"\n end\n # resource path\n local_var_path = '/global-infra/sites/{site-id}/enforcement-points/{enforcement-point-id}/edge-bridge-profiles/{profile-id}'.sub('{' + 'site-id' + '}', site_id.to_s).sub('{' + 'enforcement-point-id' + '}', enforcement_point_id.to_s).sub('{' + 'profile-id' + '}', profile_id.to_s)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(l2_bridge_endpoint_profile)\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:PUT, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'L2BridgeEndpointProfile')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: PolicyNetworkingConnectivitySegmentsEdgeBridgeProfilesApi#update_l2_bridge_profile\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def create_or_update_segment_security_profile_0_with_http_info(segment_security_profile_id, segment_security_profile, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: PolicyNetworkingConnectivitySegmentsSegmentProfilesSegmentSecurityProfilesApi.create_or_update_segment_security_profile_0 ...'\n end\n # verify the required parameter 'segment_security_profile_id' is set\n if @api_client.config.client_side_validation && segment_security_profile_id.nil?\n fail ArgumentError, \"Missing the required parameter 'segment_security_profile_id' when calling PolicyNetworkingConnectivitySegmentsSegmentProfilesSegmentSecurityProfilesApi.create_or_update_segment_security_profile_0\"\n end\n # verify the required parameter 'segment_security_profile' is set\n if @api_client.config.client_side_validation && segment_security_profile.nil?\n fail ArgumentError, \"Missing the required parameter 'segment_security_profile' when calling PolicyNetworkingConnectivitySegmentsSegmentProfilesSegmentSecurityProfilesApi.create_or_update_segment_security_profile_0\"\n end\n # resource path\n local_var_path = '/global-infra/segment-security-profiles/{segment-security-profile-id}'.sub('{' + 'segment-security-profile-id' + '}', segment_security_profile_id.to_s)\n\n # query parameters\n query_params = {}\n query_params[:'override'] = opts[:'override'] if !opts[:'override'].nil?\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(segment_security_profile)\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:PUT, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'SegmentSecurityProfile')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: PolicyNetworkingConnectivitySegmentsSegmentProfilesSegmentSecurityProfilesApi#create_or_update_segment_security_profile_0\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def create_or_patch_ip_sec_vpn_tunnel_profile_0(tunnel_profile_id, ip_sec_vpn_tunnel_profile, opts = {})\n create_or_patch_ip_sec_vpn_tunnel_profile_0_with_http_info(tunnel_profile_id, ip_sec_vpn_tunnel_profile, opts)\n nil\n end",
"def create_bridge_endpoint_profile(bridge_endpoint_profile, opts = {})\n data, _status_code, _headers = create_bridge_endpoint_profile_with_http_info(bridge_endpoint_profile, opts)\n data\n end",
"def update_profile(options = {}) \n # query profile info\n response = HTTP.auth('Bearer ' + Asca::Tools::Token.new_token).get(URI_PROFILES, :params => { 'filter[name]' => options[:name] })\n if response.status.success?\n responseObj = JSON.parse(response.body)\n queried_profile_list = responseObj[\"data\"]\n if queried_profile_list.length() > 0\n profile = queried_profile_list[0]\n end\n else\n Asca::Tools::Log.error(response.body)\n return\n end\n \n if !profile\n Asca::Tools::Log.error(\"No profile named #{options[:name]} found\")\n return\n end\n # create new profile\n profile_type = profile[\"attributes\"][\"profileType\"]\n \n # get bundle id\n response = HTTP.auth('Bearer ' + Asca::Tools::Token.new_token).get(profile[\"relationships\"][\"bundleId\"][\"links\"][\"self\"])\n bundle_id = JSON.parse(response.body)[\"data\"][\"id\"]\n response = HTTP.auth('Bearer ' + Asca::Tools::Token.new_token).get(profile[\"relationships\"][\"certificates\"][\"links\"][\"self\"])\n certificate_ids = JSON.parse(response.body)[\"data\"].map { |cer| cer[\"id\"] }\n \n # get all device ids\n device_ids = Asca::REST::Provisioning::Devices.list_devices.map { |device|\n device[\"id\"]\n }\n \n # delete old prifile\n delete_profile :name => options[:name]\n \n if profile_type.include? 'APP_STORE'\n create_new_profile :name => options[:name], :type => profile_type, :bundle_id => bundle_id, :certificate_ids => certificate_ids\n else\n create_new_profile :name => options[:name], :type => profile_type, :bundle_id => bundle_id, :device_ids => device_ids, :certificate_ids => certificate_ids\n end\n \n return true\n end",
"def update_profile_by_profile_id(profile_id, user_profile_data)\n # PUT /d2l/api/lp/(version)/profile/(profileId)\n # NOTE: Example of User.UserProfile JSON Data Block\n # { \"Nickname\": <string>,\n # \"Birthday\": {\n # \"Month\": <number>,\n # \"Day\": <number>\n # },\n # \"HomeTown\": <string>,\n # \"Email\": <string>,\n # \"HomePage\": <string>,\n # \"HomePhone\": <string>,\n # \"BusinessPhone\": <string>,\n # \"MobilePhone\": <string>,\n # \"FaxNumber\": <string>,\n # \"Address1\": <string>,\n # \"Address2\": <string>,\n # \"City\": <string>,\n # \"Province\": <string>,\n # \"PostalCode\": <string>,\n # \"Country\": <string>,\n # \"Company\": <string>,\n # \"JobTitle\": <string>,\n # \"HighSchool\": <string>,\n # \"University\": <string>,\n # \"Hobbies\": <string>,\n # \"FavMusic\": <string>,\n # \"FavTVShows\": <string>,\n # \"FavMovies\": <string>,\n # \"FavBooks\": <string>,\n # \"FavQuotations\": <string>,\n # \"FavWebSites\": <string>,\n # \"FutureGoals\": <string>,\n # \"FavMemory\": <string>,\n # \"SocialMediaUrls\": [ // Array of SocialMediaUrl blocks\n # {\n # \"Name\": <string>,\n # \"Url\": <string:URL>\n # },\n # { <composite:SocialMediaUrl> }, ...\n # ]\n # }\n # NOTE: The back-end service also expects a file names \"profileImage\"\n # RETURNS: a UserProfile JSON data block for the updated personal profile.\nend",
"def create_bridge_endpoint_profile_with_http_info(bridge_endpoint_profile, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: NetworkTransportApi.create_bridge_endpoint_profile ...\"\n end\n # verify the required parameter 'bridge_endpoint_profile' is set\n if @api_client.config.client_side_validation && bridge_endpoint_profile.nil?\n fail ArgumentError, \"Missing the required parameter 'bridge_endpoint_profile' when calling NetworkTransportApi.create_bridge_endpoint_profile\"\n end\n # resource path\n local_var_path = \"/bridge-endpoint-profiles\"\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(bridge_endpoint_profile)\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:POST, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'BridgeEndpointProfile')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: NetworkTransportApi#create_bridge_endpoint_profile\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def create_or_update_segment_security_profile_with_http_info(segment_security_profile_id, segment_security_profile, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: PolicyNetworkingConnectivitySegmentsSegmentProfilesSegmentSecurityProfilesApi.create_or_update_segment_security_profile ...'\n end\n # verify the required parameter 'segment_security_profile_id' is set\n if @api_client.config.client_side_validation && segment_security_profile_id.nil?\n fail ArgumentError, \"Missing the required parameter 'segment_security_profile_id' when calling PolicyNetworkingConnectivitySegmentsSegmentProfilesSegmentSecurityProfilesApi.create_or_update_segment_security_profile\"\n end\n # verify the required parameter 'segment_security_profile' is set\n if @api_client.config.client_side_validation && segment_security_profile.nil?\n fail ArgumentError, \"Missing the required parameter 'segment_security_profile' when calling PolicyNetworkingConnectivitySegmentsSegmentProfilesSegmentSecurityProfilesApi.create_or_update_segment_security_profile\"\n end\n # resource path\n local_var_path = '/infra/segment-security-profiles/{segment-security-profile-id}'.sub('{' + 'segment-security-profile-id' + '}', segment_security_profile_id.to_s)\n\n # query parameters\n query_params = {}\n query_params[:'override'] = opts[:'override'] if !opts[:'override'].nil?\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(segment_security_profile)\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:PUT, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'SegmentSecurityProfile')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: PolicyNetworkingConnectivitySegmentsSegmentProfilesSegmentSecurityProfilesApi#create_or_update_segment_security_profile\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def update_profile\n @profile = @account.employee\n \n if params[:profile].present?\n @profile.assign_attributes(profile_params)\n @profile.save\n redirect_to gns_core.my_profile_backend_accounts_path, flash: {success: 'Profile has been updated successfully.'}\n else\n redirect_to gns_core.my_profile_backend_accounts_path, flash: {error: 'Profile update failed. Please try again!'}\n end\n end",
"def create_sslprofile\n converge_by(\"Create #{new_resource} ssl profile\") do\n Chef::Log.info \"Create #{new_resource} ssl profile\"\n\n\n load_balancer.client['LocalLB.ProfileClientSSL'].create_v2([new_resource.sslprofile_name], [{\"value\" => \"/Common/#{new_resource.keyid}\", \"default_flag\" => \"false\"}] , [{\"value\" => \"/Common/#{new_resource.certid}\", \"default_flag\" => \"false\"}])\n load_balancer.client['LocalLB.ProfileClientSSL'].set_passphrase([\"/Common/#{new_resource.sslprofile_name}\"], [{\"value\" => \"#{new_resource.passphrase}\", \"default_flag\" => \"false\" }]) if !new_resource.passphrase.nil?\n\n current_resource.keyid(new_resource.keyid)\n current_resource.certid(new_resource.certid)\n current_resource.cacertid(new_resource.cacertid)\n current_resource.passphrase(new_resource.passphrase)\n\n new_resource.updated_by_last_action(true)\n end\n end",
"def set_new_profile\n server_profile = OneviewSDK::ServerProfile\n # lets the SDK set a default name in case the user has not declared one\n if @data['serverProfileName']\n sp_name = @data.delete('serverProfileName')\n get_single_resource_instance.new_profile(sp_name).create unless server_profile.find_by(@client, name: sp_name).first\n else\n default = 'Server_Profile_created_from_' + @data['name']\n get_single_resource_instance.new_profile.create unless server_profile.find_by(@client, name: default).first\n end\n end",
"def update_current_user_profile_data(user_profile_data)\n # PUT /d2l/api/lp/(version)/profile/myProfile\nend",
"def create_profile!\n bundle_id = Sigh.config[:app_identifier]\n name = Sigh.config[:provisioning_name]\n if !name\n name = Sigh.config[:app_identifier].gsub '.' ,''\n end\n\n UI.important \"Creating new provisioning profile for '#{Sigh.config[:app_identifier]}' with name '#{name}'\"\n profile = profile_type.create!(name: name,\n bundle_id: bundle_id)\n profile\n end",
"def create_or_patch_ip_sec_vpn_tunnel_profile(tunnel_profile_id, ip_sec_vpn_tunnel_profile, opts = {})\n create_or_patch_ip_sec_vpn_tunnel_profile_with_http_info(tunnel_profile_id, ip_sec_vpn_tunnel_profile, opts)\n nil\n end",
"def assign_default_profile(args = {}) \n put(\"/profiles.json/#{args[:profileId]}/default\", args)\nend",
"def newprofile\n if params[\"auth_key\"] == nil or params[\"device_id\"] == nil or params[\"profile_url\"] == nil\n render :json => '{\"status\": \"failed\", \"reason\": \"incorrect parameters\"}'\n else\n device = Device.find(params[\"device_id\"])\n if device.auth_key == params[\"auth_key\"]\n device.profile_url = params[\"profile_url\"]\n if device.save\n render :json => '{\"status\": \"success\"}'\n else\n render :json => '{\"status\": \"failed\", \"reason\": \"save error\"}'\n end\n else\n render :json => '{\"status\": \"failed\", \"reason\": \"not authorized\"}'\n end\n end\n end",
"def create_DB_profile\n \n @profile = ProfileId.new\n @profile.user_name = params[:user_name]\n @profile.profile_id = @parsed[\"profileId\"]\n @profile.save\n end",
"def create_or_replace_gateway_qos_profile_0_with_http_info(qos_profile_id, gateway_qos_profile, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: PolicyNetworkingNetworkingProfilesGatewayQOSProfilesApi.create_or_replace_gateway_qos_profile_0 ...'\n end\n # verify the required parameter 'qos_profile_id' is set\n if @api_client.config.client_side_validation && qos_profile_id.nil?\n fail ArgumentError, \"Missing the required parameter 'qos_profile_id' when calling PolicyNetworkingNetworkingProfilesGatewayQOSProfilesApi.create_or_replace_gateway_qos_profile_0\"\n end\n # verify the required parameter 'gateway_qos_profile' is set\n if @api_client.config.client_side_validation && gateway_qos_profile.nil?\n fail ArgumentError, \"Missing the required parameter 'gateway_qos_profile' when calling PolicyNetworkingNetworkingProfilesGatewayQOSProfilesApi.create_or_replace_gateway_qos_profile_0\"\n end\n # resource path\n local_var_path = '/infra/gateway-qos-profiles/{qos-profile-id}'.sub('{' + 'qos-profile-id' + '}', qos_profile_id.to_s)\n\n # query parameters\n query_params = {}\n query_params[:'override'] = opts[:'override'] if !opts[:'override'].nil?\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = @api_client.object_to_http_body(gateway_qos_profile)\n auth_names = ['BasicAuth']\n data, status_code, headers = @api_client.call_api(:PUT, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'GatewayQosProfile')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: PolicyNetworkingNetworkingProfilesGatewayQOSProfilesApi#create_or_replace_gateway_qos_profile_0\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def create_profile\n self.profile = Profile.create(user_id: id)\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Checks that inbox and processed directories are on the same volume (since we want to be able to move these big files, not copy them) Potential BUG: What happens if I want to move both to a diffent volume. Maybe there should be a method for moving both at the same time..?
|
def register_defaults_for_processed(target_dir)
if Pathstring.new(inbox).volume == Pathstring.new(target_dir).volume then
@defaults.registerDefaults(:processed => Pathstring.new(target_dir) )
else
raise "Both inbox and processed directories must be on same volume."
end
end
|
[
"def receive_flip(from_filestore_name, to_area, paths, unique_names)\n# $stderr << \"receive_flip(#{from_filestore_name}, #{to_area}, #{paths.inspect}, #{unique_names})\\n\"\n tmp_area_path = area_path(:tmp)\n\n # tmp_uuid\n tmp_uuid = unique_name\n\n # first move all moveable paths to a unique named tmp area within the receive area\n tmp_path = File.join(tmp_area_path, tmp_uuid)\n if paths.length>0\n FileUtils.mkdir_p(tmp_path)\n paths.each do |path|\n FileUtils.mv(path, tmp_path)\n end\n end\n\n # move everything from the receive area... recovers interrupted receive_flips too\n to_path = area_path(to_area)\n Dir.foreach(tmp_area_path) do |path|\n path_1 = File.join(tmp_area_path, path)\n if unique_names\n\n if FileStore.ordinary_directory?(path_1)\n # names are unique, so don't move the uuid folders\n Dir.foreach(path_1) do |file_path|\n path_2 = File.join(path_1, file_path)\n FileUtils.mv(path_2, to_path, :force=>true) if File.file?(path_2) || FileStore.ordinary_directory?(path_2) \n end\n elsif File.file?(path_1) # names are unique, so ok to move plain files too\n FileUtils.mv(path_1, to_path, :force=>true)\n end\n\n else\n # move uuid named dirs\n FileUtils.mv(path_1, to_path, :force=>true) if File.file?(path_1) || FileStore.ordinary_directory?(path_1)\n end\n end\n\n # finally remove any empty tmp dirs\n scrub!(:tmp)\n end",
"def move_files\n source_dir = Item.new(Path.new(params[:source]))\n dest_dir = Item.new(Path.new(params[:dest]))\n type = params[:type]\n response = {}\n response[:source_dir] = source_dir\n response[:dest_dir] = dest_dir\n if source_dir.move_files_to(dest_dir, type)\n response[:msg] = \"Success\"\n render json: response, status: 200\n else\n response[:msg] = \"Fail\"\n render json: response, status: 402\n end\n end",
"def move_files_to_in_process\n # find and clear out old dir\n in_process_dir = student_work_dir(:in_process, false)\n\n return false if in_process_dir.nil?\n\n if Dir.exists? in_process_dir\n pwd = FileUtils.pwd\n Dir.chdir(in_process_dir)\n # move all files to the enq dir\n FileUtils.rm Dir.glob(\"*\")\n Dir.chdir(pwd)\n end\n\n from_dir = student_work_dir(:new, false)\n if Dir.exists?(from_dir)\n #save new files in done folder\n compress_new_to_done\n end\n\n zip_file = zip_file_path_for_done_task()\n if zip_file && File.exists?(zip_file)\n extract_file_from_done FileHelper.student_work_dir(:new), \"*\", lambda { | task, to_path, name | \"#{to_path}#{name}\" }\n return false if not Dir.exists?(from_dir)\n else\n return false\n end\n\n # Move files from new to in process\n FileHelper.move_files(from_dir, in_process_dir)\n return true\n end",
"def move_dirs\n \n end",
"def move_files\n\t\tsource_dir = Item.new(Path.new(params[:source]))\n\t\tdest_dir = Item.new(Path.new(params[:dest]))\n\t\ttype = params[:type]\n\t\tresponse = {}\n\t\tresponse[:source_dir] = source_dir\n\t\tresponse[:dest_dir] = dest_dir\n\t\tif source_dir.move_files_to(dest_dir, type)\n\t\t\tresponse[:msg] = \"Success\"\n render json: response, status: 200\n\t\telse\n\t\t\tresponse[:msg] = \"Fail\"\n render json: response, status: 402\n\t\tend\n\tend",
"def remote_directory(target_path, source_path)\n source_path = File.expand_path(source_path, files_dir)\n queue! %[sudo mkdir -p $(dirname #{target_path})]\n queue! %[sudo rsync -av --delete --exclude='.git' #{localhost}:#{source_path}/ #{target_path}/]\nend",
"def verify_shared_folders(folders)\n folders.each do |folder|\n # Warm up PoSH communicator for new instances - any stderr results\n # in failure: https://github.com/mefellows/vagrant-dsc/issues/21\n @machine.communicate.test(\"test -d #{folder}\", sudo: true)\n\n @logger.info(\"Checking for shared folder: #{folder}\")\n if !@machine.communicate.test(\"test -d #{folder}\", sudo: true)\n raise DSCError, :missing_shared_folders\n end\n end\n end",
"def inDestinationRootDir?(item)\n # the dirname-func returns \"/\" if \"/\" is given as dir,\n # therefore we need to check that item is not equal to @destination\n item != @destination && FiRe::filesys.dirname(item) == @destination\n end",
"def check_directories\n if File.exist?(source_dir)\n @source_dir = source_dir\n else\n raise DirectoryNotExist, \"Directory not exist\" \n end\n if File.exist?(destination_dir)\n @destination_dir = destination_dir\n else\n raise DirectoryNotExist, \"Directory not exist\" \n end \n end",
"def register_defaults_for_inbox(inboxpath)\n if processed then # If there is a default set for processed...\n if not Pathstring.new(inboxpath).volume == Pathstring.new(processed).volume then # ...and it's not on the same volume as inboxpath\n raise \"Both inbox and processed directories must be on same volume.\"\n end\n end\n @defaults.registerDefaults(:inbox => File.expand_path(inboxpath))\n end",
"def dirs_same?(dir1, dir2)\n dir1_entries = siba_file.dir_entries dir1\n dir2_entries = siba_file.dir_entries dir2\n diff1 = (dir1_entries - dir2_entries).map{|i| File.join(dir1,i)}\n diff2 = (dir2_entries - dir1_entries).map{|i| File.join(dir2,i)}\n diff = diff1 + diff2\n msg = \"The directories '#{File.basename(dir1)}' and '#{File.basename(dir2)}' are different: \"\n raise Siba::Error, \"#{msg}#{diff.take(10).join(', ')}\" unless diff.empty?\n\n # compare files and directories\n dir1_entries.each do |dir|\n next if dir == \".\" || dir == \"..\"\n sub_dir1_entry = File.join dir1, dir\n sub_dir2_entry = File.join dir2, dir\n\n # compare files\n if siba_file.file_file? sub_dir1_entry\n raise \"#{msg}'#{sub_dir2_entry}' is not a file\" unless siba_file.file_file? sub_dir2_entry\n unless siba_file.file_utils_compare_file sub_dir1_entry, sub_dir2_entry\n raise Siba::Error, \"#{msg}'#{sub_dir1_entry}' and '#{sub_dir2_entry}' files are different.\"\n end\n end\n\n # compare permissions\n if (siba_file.file_stat(sub_dir1_entry).mode % 01000) != (siba_file.file_stat(sub_dir2_entry).mode % 01000)\n raise Siba::Error, \"#{msg}'#{sub_dir1_entry}' and '#{sub_dir2_entry}' entries have different permissions.\"\n end\n\n # compare sub-dirs\n if siba_file.file_directory? sub_dir1_entry\n raise Siba::Error, \"#{msg}'#{sub_dir2_entry}' is not a directory\" unless siba_file.file_directory? sub_dir2_entry\n dirs_same? sub_dir1_entry, sub_dir2_entry\n end\n end\n end",
"def move_files(from_path, to_path)\n # move into the new dir - and mv files to the in_process_dir\n pwd = FileUtils.pwd\n begin\n FileUtils.mkdir_p(to_path) if not Dir.exists? to_path\n Dir.chdir(from_path)\n FileUtils.mv Dir.glob(\"*\"), to_path, :force => true\n Dir.chdir(to_path)\n begin\n #remove from_path as files are now \"in process\"\n FileUtils.rm_r(from_path)\n rescue\n logger.warn \"failed to rm #{from_path}\"\n end\n ensure\n if FileUtils.pwd() != pwd\n if Dir.exists? pwd\n FileUtils.chdir(pwd)\n else\n FileUtils.chdir( student_work_dir() )\n end\n end\n end\n end",
"def check_folders\n DB_LOGGER.info(\"Check folders...\")\n unless File.exist? NEW_FOLDER\n FileUtils.mkdir_p NEW_FOLDER\n end\n unless File.exist? CUR_FOLDER\n FileUtils.mkdir_p CUR_FOLDER\n end\n DB_LOGGER.info(\"Folders checked, Ok...\")\nend",
"def search_inbox inbox_dir\n inbox_path = Pathname.new(inbox_dir)\n unless inbox_path.directory? then\n return\n end\n inbox_path.each_entry do |path|\n ingest_path = inbox_path.join(path)\n\n if ingest_path.directory? && !/^\\.+$/.match(path.to_s) then\n ingest_path.each_entry do |ingest_file|\n if /^disk\\.[0-9]+/.match(ingest_file.to_s) then\n ingest_disk_image ingest_path, ingest_file\n end\n end\n end\n end\n end",
"def move_multimedia_files_to_trash\n if self.multimedia_path\n\n FileUtils.mkdir_p(dir_for_deleted)\n FileUtils.mv(Dir.glob(File.join(class_multimedia_path, self.multimedia_path, \"*.*\")), dir_for_deleted)\n\n logger.info \"Moviendo #{File.join(class_multimedia_path, self.multimedia_path, \"*.*\")} a #{dir_for_deleted}\"\n\n FileUtils.rm_rf(File.join(class_multimedia_path, self.multimedia_path))\n\n # Los videos de la webtv dejarán de funcionar porque están en el mismo directorio\n self.webtv_videos.update_all(\"published_at=NULL, document_id=NULL\") if self.respond_to?('webtv_videos')\n self.gallery_photos.update_all(\"document_id=NULL\") if self.respond_to?('gallery_photos')\n self.album.update_attributes(:document_id => nil, :draft => true) if self.respond_to?('album') && self.album\n end\n return true\n end",
"def compareDirs( relative = \"\" )\n # Combine the base path with the relative path\n original = File.expand_path( File.join( $original, relative ) )\n backup = File.expand_path( File.join( $backup, relative ) )\n\n # Return if this directory has been excluded\n if $options[:ignore].include?( original ) or $options[:ignore].include?( backup )\n $skippedCount += 1\n STDOUT.puts \"SKIP: Skipping comparison of [#{original}] and [#{backup}]\"\n return\n end\n\n # Make sure both directories exist\n unless File.directory?( original ) and File.directory?( backup )\n STDOUT.puts \"DIR: [#{original}] not found in [#{backup}]\"\n $diffCount += 1\n $diffCount += countItems( original ) if $options[:count]\n return\n end\n\n # If both directories exist, we check their contents\n begin\n Dir.foreach( original ) do |item|\n next if item == \".\" or item == \"..\"\n $itemCount += 1\n\n origPath = File.join( original, item )\n backupPath = File.join( backup, item )\n\n if File.directory? origPath\n # Skip symlinks if told to do so...\n if File.symlink?( origPath ) and not $options[:follow]\n $skippedCount += 1\n STDOUT.puts \"SYMLINK: [#{origPath}] skipped.\"\n next\n end\n # Stay on one filesystem if told to do so...\n outerDev = File::Stat.new( original ).dev\n innerDev = File::Stat.new( origPath ).dev\n if outerDev != innerDev and $options[:one_filesystem]\n $skippedCount += 1\n STDOUT.puts \"DIFFFS: [#{origPath}] is on a different file system. Skipped.\"\n next\n end\n compareDirs( File.join( relative, item ) )\n else # It's a file\n unless sameFile( origPath, backupPath )\n $diffCount += 1\n STDOUT.puts \"FILE: [#{origPath}] not found at, or doesn't match [#{backupPath}]\"\n end\n end\n end # Dir.foreach\n rescue Errno::EACCES\n STDOUT.puts \"ERROR: Can't read directory [#{original}]\"\n $errorCount += 1\n end\nend",
"def mirror_directory(src_dir, dest_dir)\n remote_list = capture(\"ls #{dest_dir}\").split\n local_list = Dir.entries( \"#{src_dir}\").select { |f| ! f.start_with?('.') }\n new_files = local_list - remote_list\n old_files = remote_list - local_list\n changed_files = (remote_list & local_list).select do |f| \n different_hash?( \"#{src_dir}/#{f}\", \"#{dest_dir}/#{f}\") \n end\n old_files.each { |file| run \"rm #{dest_dir}/#{file}\" }\n new_files.each { |file| upload( \"#{src_dir}/#{file}\", \"#{dest_dir}/#{file}\" ) }\n changed_files.each { |file| upload( \"#{src_dir}/#{file}\", \"#{dest_dir}/#{file}\" ) }\n end",
"def test_recursive_copy\n\t\t\tcmd = \"./bin/fcp -c test/fcpconfig -r\"\n\t\t\tdir = \"test/tmp_client/plop\"\n\t\t\tremote_dir = \"test/tmp_server/plop\"\n\t\t\tfile1 = dir + \"/foobar1\"\n\t\t\tfile2 = dir + \"/foobar2\"\n\t\t\tremote_file1 = remote_dir + \"/\" + File.basename(file1)\n\t\t\tremote_file2 = remote_dir + \"/\" + File.basename(file2)\n\n\t\t\tDir.mkdir(dir)\n\t\t\tcreate_file(file1)\n\t\t\tcreate_file(file2)\n\n\t\t\tio = IO::popen(\"#{cmd} #{dir} ftp://local:\")\n\n\t\t\tassert(io.readlines.join.empty?)\n\t\t\tassert(File.exists?(remote_dir))\n\t\t\tassert(File.directory?(remote_dir))\n\n\t\t\tassert(check_file(file1, remote_file1))\n\t\t\tassert(check_file(file2, remote_file2))\n\t\tend",
"def push_files(local_path,remote_path)\n debug_p(\"push_files from #{local_path} to #{remote_path}\")\n local_dir = Dir.new(local_path)\n local_dir.each{ |path|\n unless File::ftype(local_path + \"/\" + path) == \"directory\"\n @sftp_session.upload!(local_path + \"/\" + path,remote_path + \"/\" + path)\n end\n }\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Checks that inbox and processed directories are on the same volume
|
def register_defaults_for_inbox(inboxpath)
if processed then # If there is a default set for processed...
if not Pathstring.new(inboxpath).volume == Pathstring.new(processed).volume then # ...and it's not on the same volume as inboxpath
raise "Both inbox and processed directories must be on same volume."
end
end
@defaults.registerDefaults(:inbox => File.expand_path(inboxpath))
end
|
[
"def verify_shared_folders(folders)\n folders.each do |folder|\n # Warm up PoSH communicator for new instances - any stderr results\n # in failure: https://github.com/mefellows/vagrant-dsc/issues/21\n @machine.communicate.test(\"test -d #{folder}\", sudo: true)\n\n @logger.info(\"Checking for shared folder: #{folder}\")\n if !@machine.communicate.test(\"test -d #{folder}\", sudo: true)\n raise DSCError, :missing_shared_folders\n end\n end\n end",
"def register_defaults_for_processed(target_dir)\n if Pathstring.new(inbox).volume == Pathstring.new(target_dir).volume then\n @defaults.registerDefaults(:processed => Pathstring.new(target_dir) )\n else\n raise \"Both inbox and processed directories must be on same volume.\"\n end\n end",
"def fail_if_dir_locked\n if Pathname.glob(@in_dir + '*.lck').size > 0\n raise EdiProcessError, 'Input folder is locked. Unable to process. NB. More than one instance cannot process the same folder.'\n end\n end",
"def search_inbox inbox_dir\n inbox_path = Pathname.new(inbox_dir)\n unless inbox_path.directory? then\n return\n end\n inbox_path.each_entry do |path|\n ingest_path = inbox_path.join(path)\n\n if ingest_path.directory? && !/^\\.+$/.match(path.to_s) then\n ingest_path.each_entry do |ingest_file|\n if /^disk\\.[0-9]+/.match(ingest_file.to_s) then\n ingest_disk_image ingest_path, ingest_file\n end\n end\n end\n end\n end",
"def check_paths\n [@status_path, @pid_path].each do |path|\n unless Dir.exist?(File.dirname(path)) && (! File.exist?(path) || File.writable?(path))\n @logger.fatal <<MSG\nUnable to create the file: <#{path}>\n\nPlease ensure that the directory exists and that your filesystem permissions are set appropriately, or\nchange the locations with Mpg123Player::Configuration.\nMSG\n raise\n end\n end\n end",
"def check_folders\n DB_LOGGER.info(\"Check folders...\")\n unless File.exist? NEW_FOLDER\n FileUtils.mkdir_p NEW_FOLDER\n end\n unless File.exist? CUR_FOLDER\n FileUtils.mkdir_p CUR_FOLDER\n end\n DB_LOGGER.info(\"Folders checked, Ok...\")\nend",
"def check_directories\n if File.exist?(source_dir)\n @source_dir = source_dir\n else\n raise DirectoryNotExist, \"Directory not exist\" \n end\n if File.exist?(destination_dir)\n @destination_dir = destination_dir\n else\n raise DirectoryNotExist, \"Directory not exist\" \n end \n end",
"def check()\n # check if teh volume still exists\n begin\n volumes = $ec2.describe_volumes([self.id])\n rescue RightAws::AwsError\n if $!.errors[0][0] == \"InvalidVolume.NotFound\"\n puts \"WARN: Volume #{self.id} is not running\"\n delete()\n return\n else\n p $!.code\n end\n end\n\n # check that it is attached\n if volumes[0][:aws_attachment_status] == 'attached'\n if self.attached_instance != volumes[0][:aws_instance_id]\n self.attached_instance = volumes[0][:aws_instance_id]\n self.save()\n puts \"WARN: volume #{self.id} is now attached to #{self.attached_instance}\"\n end\n elsif self.attached_instance.nil?\n puts \"WARN: volume #{self.id} is no longer attached\"\n self.attached_instance = nil\n self.save()\n end\n end",
"def check_files\n return if busy # if working on something already, skip the iteration\n updated = []\n files.each do |filename, mtime| \n begin\n current_mtime = File.stat(filename).mtime\n rescue Errno::ENOENT\n # file was not found and was probably deleted\n # remove the file from the file list \n files.delete(filename)\n puts \"=> ERROR: #{filename} not found, ignoring\" if Bolt.verbose?\n next\n end\n if current_mtime != mtime \n updated << filename\n # update the mtime in file registry so we it's only send once\n files[filename] = current_mtime\n $stdout.puts \">> Spotted change in #{filename}\" if Bolt.verbose?\n end\n end\n parent.handle(updated) if updated != []\n false\n end",
"def root_lock_path_same?\n @mutex.synchronize do\n return false unless @parent_stat\n\n cur_stat = zk.stat(root_lock_path) \n cur_stat.exists? and (cur_stat.ctime == @parent_stat.ctime)\n end\n end",
"def same_volume?(path2)\n return volume == Pathstring.new(path2).volume ? true : false # (Yes, I know this is tautologic; but it makes the code easer to read, at least for me)\n end",
"def incoming_files\n dir '~/incoming'\n end",
"def shared?\n \t!self.shared_folders.empty?\n end",
"def reshare dir, host\n if File.exist? \"#{dir}/#{host}/.completed\"\n `share -F nfs -o ro=#{host},anon=0 #{dir}/#{host} > /dev/null 2>&1`\n end\nend",
"def shared?\n !self.shared_folders.empty?\n end",
"def dirs_same?(dir1, dir2)\n dir1_entries = siba_file.dir_entries dir1\n dir2_entries = siba_file.dir_entries dir2\n diff1 = (dir1_entries - dir2_entries).map{|i| File.join(dir1,i)}\n diff2 = (dir2_entries - dir1_entries).map{|i| File.join(dir2,i)}\n diff = diff1 + diff2\n msg = \"The directories '#{File.basename(dir1)}' and '#{File.basename(dir2)}' are different: \"\n raise Siba::Error, \"#{msg}#{diff.take(10).join(', ')}\" unless diff.empty?\n\n # compare files and directories\n dir1_entries.each do |dir|\n next if dir == \".\" || dir == \"..\"\n sub_dir1_entry = File.join dir1, dir\n sub_dir2_entry = File.join dir2, dir\n\n # compare files\n if siba_file.file_file? sub_dir1_entry\n raise \"#{msg}'#{sub_dir2_entry}' is not a file\" unless siba_file.file_file? sub_dir2_entry\n unless siba_file.file_utils_compare_file sub_dir1_entry, sub_dir2_entry\n raise Siba::Error, \"#{msg}'#{sub_dir1_entry}' and '#{sub_dir2_entry}' files are different.\"\n end\n end\n\n # compare permissions\n if (siba_file.file_stat(sub_dir1_entry).mode % 01000) != (siba_file.file_stat(sub_dir2_entry).mode % 01000)\n raise Siba::Error, \"#{msg}'#{sub_dir1_entry}' and '#{sub_dir2_entry}' entries have different permissions.\"\n end\n\n # compare sub-dirs\n if siba_file.file_directory? sub_dir1_entry\n raise Siba::Error, \"#{msg}'#{sub_dir2_entry}' is not a directory\" unless siba_file.file_directory? sub_dir2_entry\n dirs_same? sub_dir1_entry, sub_dir2_entry\n end\n end\n end",
"def check_folder_contents(query)\n if (system(\"[ -d #{query} ]\"))\n puts(\"Error in current directory: #{query} already exists.\")\n false\n else\n true\n end\n end",
"def folder_reachable?\n Dir.exists? folder_path\n end",
"def shared? \n \t!self.shared_folders.empty? \n\tend"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
GET /threats/1 GET /threats/1.xml
|
def show
@threat = Threat.find(params[:id])
respond_to do |format|
format.html # show.html.erb
format.xml { render :xml => @threat }
end
end
|
[
"def index\n @threats = Threat.all\n end",
"def index\n @hats = Hat.find(:all)\n\n respond_to do |format|\n format.html # index.html.erb\n# format.xml { render :xml => @hats }\n end\n end",
"def index\n @threds = Thred.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @threds }\n end\n end",
"def index\n #@seats = Seat.all\n @seats = Seat.paginate( :page => params[:page], :per_page => 15)\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @seats }\n end\n end",
"def show\n @hat = Hat.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @hat }\n end\n end",
"def show\n @happening = Happening.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @happening }\n end\n end",
"def index\n @happenings = Happening.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @happenings }\n end\n end",
"def show\n @chirp = Chirp.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @chirp }\n end\n end",
"def show\n @cheat = Cheat.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @cheat }\n end\n end",
"def show\n @chat = Chat.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @chat }\n end\n end",
"def index\n authenticate\n @user = current_user\n @users = User.find_everyone(@user)\n @chats = @user.chats\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @chats }\n end\n end",
"def index\n @traffics = Traffic.find(:all, :order => \"created_at\")\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @traffics }\n end\n end",
"def index\n @meats = Meat.all\n end",
"def show\n @witch_cat = WitchCat.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @witch_cat }\n end\n end",
"def rss\n @event = Event.find_by_key(params['id'])\n @histories = @event.histories(:order => 'created_at DESC')\n render :layout => false\n response.headers[\"Content-Type\"] = \"application/xml; charset=utf-8\"\n end",
"def show\n @tattoo = Tattoo.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @tattoo }\n end\n end",
"def tournaments\n get('sports/en/tournaments.xml')\n end",
"def index\n @coatings = do_index(Coating, params)\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @coatings }\n end\n end",
"def index\n #@feats = Feat.find(:all)\n @feats = @character.feats.find(:all)\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @feats }\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
GET /threats/new GET /threats/new.xml
|
def new
@threat = Threat.new
respond_to do |format|
format.html # new.html.erb
format.xml { render :xml => @threat }
end
end
|
[
"def new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @coating }\n end\n end",
"def new\n @cheat = Cheat.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @cheat }\n end\n end",
"def new\n @thred = Thred.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @thred }\n end\n end",
"def new\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @talk }\n end\n end",
"def new\n @feat = @person.feats.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @feat }\n end\n end",
"def new\n @monkey = Monkey.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @monkey }\n end\n end",
"def new\n @click_to_talk = ClickToTalk.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @click_to_talk }\n end\n end",
"def new\n @saying = Saying.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @saying }\n end\n end",
"def new\n @newstuff = Newstuff.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @newstuff }\n end\n end",
"def new\n @happening = Happening.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @happening }\n end\n end",
"def new\n @seating = Seating.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @seating }\n end\n end",
"def new\n @chat = Chat.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @chat }\n end\n end",
"def new\n @trail = Trail.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @trail }\n end\n end",
"def new\n @patent = Patent.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @patent }\n end\n end",
"def new\n @seat = Seat.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @seat }\n end\n end",
"def new\n @old_twit = OldTwit.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @old_twit }\n end\n end",
"def new\n @thought = Thought.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @thought }\n end\n end",
"def new\n @witch_cat = WitchCat.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @witch_cat }\n end\n end",
"def new\n @cat = Cat.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @cat }\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
POST /threats POST /threats.xml
|
def create
@threat = Threat.new(params[:threat])
respond_to do |format|
if @threat.save
format.html { redirect_to(@threat, :notice => 'Threat was successfully created.') }
format.xml { render :xml => @threat, :status => :created, :location => @threat }
else
format.html { render :action => "new" }
format.xml { render :xml => @threat.errors, :status => :unprocessable_entity }
end
end
end
|
[
"def index\n @threats = Threat.all\n end",
"def create\n @hat = Hat.new(params[:hat])\n\n respond_to do |format|\n if @hat.save\n flash[:notice] = 'Hat was successfully created.'\n format.html { redirect_to(@hat) }\n format.xml { render :xml => @hat, :status => :created, :location => @hat }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @hat.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def threats=(value)\n @threats = value\n end",
"def create\n @cheat = Cheat.new(params[:cheat])\n\n respond_to do |format|\n if @cheat.save\n format.html { redirect_to(@cheat, :notice => 'Cheat was successfully created.') }\n format.xml { render :xml => @cheat, :status => :created, :location => @cheat }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @cheat.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @thermostat = Thermostat.new(params[:thermostat])\n\n respond_to do |format|\n if @thermostat.save\n format.html { redirect_to @thermostat, notice: 'Thermostat was successfully created.' }\n format.json { render json: @thermostat, status: :created, location: @thermostat }\n else\n format.html { render action: \"new\" }\n format.json { render json: @thermostat.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @thermostat = Thermostat.new(params[:thermostat])\n if @thermostat.save\n redirect_to @thermostat, :notice => \"Successfully created thermostat.\"\n else\n render :action => 'new'\n end\n end",
"def create\n @threats_answer = ThreatsAnswer.new(threats_answer_params)\n @threat = Threat.find(@threats_answer.threat)\n @plano = current_user.plano\n @threats = @plano.threats\n respond_to do |format|\n if @threats_answer.save\n format.html { redirect_to @threats_answer, notice: 'Threats answer was successfully created.' }\n format.json { render :show, status: :created, location: @threats_answer }\n format.js\n else\n format.html { render :new }\n format.json { render json: @threats_answer.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @twat = Twat.new(twat_params)\n\n respond_to do |format|\n if @twat.save\n format.html { redirect_to @twat, notice: 'Twat was successfully created.' }\n format.json { render action: 'show', status: :created, location: @twat }\n else\n format.html { render action: 'new' }\n format.json { render json: @twat.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @twatt = Twatt.new(twatt_params)\n\n respond_to do |format|\n if @twatt.save\n format.html { redirect_to @twatt, notice: 'Twatt was successfully created.' }\n format.json { render :show, status: :created, location: @twatt }\n else\n format.html { render :new }\n format.json { render json: @twatt.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @user = current_user\n @turf = Turf.find_by_id(params[:chat][:turf_id_value])\n @chat = @turf.chats.build(:content => params[:chat][:content])\n @chat.user = current_user\n\n if @chat.save\n respond_with(@chat)\n flash[:success] = \"Chat created!\"\n else\n flash[:error] = \"Chat not created :(\"\n end\n end",
"def create\n @meat = Meat.new(meat_params)\n\n respond_to do |format|\n if @meat.save\n format.html { redirect_to @meat, notice: 'Meat was successfully created.' }\n format.json { render :show, status: :created, location: @meat }\n else\n format.html { render :new }\n format.json { render json: @meat.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @achat = Achat.new(achat_params)\n\n respond_to do |format|\n if @achat.save\n format.html { redirect_to @achat, notice: 'Achat was successfully created.' }\n format.json { render :show, status: :created, location: @achat }\n else\n format.html { render :new }\n format.json { render json: @achat.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @tattoo = Tattoo.new(params[:tattoo])\n\n respond_to do |format|\n if @tattoo.save\n flash[:notice] = 'Tattoo was successfully created.'\n format.html { redirect_to(@tattoo) }\n format.xml { render :xml => @tattoo, :status => :created, :location => @tattoo }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @tattoo.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def send_theses(disert_theme)\n print \"Sending DT id #{disert_theme.id} with title #{disert_theme.title}.... \"\n\n # prepare theses xml\n xml_to_send = prepare_xml(disert_theme)\n\n # tempfile gen\n tf = Tempfile.new(\"export\");\n tf.write(xml_to_send)\n tf.rewind\n\n\n #TODO all URL to CONSTS\n response = \"\"\n curl = Curl::Easy.new(\"https://theses.cz/auth/th_sprava/neosobni_import_dat.pl\")\n curl.multipart_form_post = true\n curl.http_auth_types = :basic\n curl.username = THESIS_USERNAME\n curl.password = THESIS_PASSWORD\n curl.on_body{ |data| response = data }\n curl.http_post(Curl::PostField.file('soubor', tf.path))\n\n disert_theme.update_attributes(:theses_request => xml_to_send,\n :theses_request_at => Time.now,\n :theses_request_response => response)\n\n if Nokogiri::XML(response).xpath('//commited').empty?\n puts \"Error, check the response\"\n else\n puts \"Sent\"\n disert_theme.update_attribute('theses_request_succesfull', true)\n end\n\n tf.close!\n return true\n end",
"def create\n @my_friend = MyFriend.new(params[:my_friend])\n\n respond_to do |format|\n if @my_friend.save\n format.html { redirect_to(@my_friend, :notice => 'MyFriend was successfully created.') }\n format.xml { render :xml => @my_friend, :status => :created, :location => @my_friend }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @my_friend.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @whatsapp_stat = WhatsappStat.new(whatsapp_stat_params)\n\n respond_to do |format|\n if @whatsapp_stat.save\n format.html { redirect_to @whatsapp_stat, notice: 'Whatsapp stat was successfully created.' }\n format.json { render :index, status: :created, location: @whatsapp_stat }\n else\n format.html { render :show }\n format.json { render json: @whatsapp_stat.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @whats_on = WhatsOn.new(whats_on_params)\n\n respond_to do |format|\n if @whats_on.save\n format.html { redirect_to @whats_on, notice: 'Whats on was successfully created.' }\n format.json { render action: 'show', status: :created, location: @whats_on }\n else\n format.html { render action: 'new' }\n format.json { render json: @whats_on.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @action = params[:action_military_retreat_army_action]\n \n army = Military::Army.find(@action[:army_id])\n \n raise NotFoundError.new('army not found') if army.nil?\n raise BadRequestError.new('not owner of army') unless army.owner == current_character\n\n army.battle_retreat = @action[:retreat]\n raise BadRequestError.new('not owner of army') unless army.save \n\n respond_to do |format|\n format.html { redirect_to action_path, notice: 'Retreat army action was successfully executed.' }\n format.json { render json: {}, status: :created }\n end\n end",
"def destroy\n @threat = Threat.find(params[:id])\n @threat.destroy\n\n respond_to do |format|\n format.html { redirect_to(threats_url) }\n format.xml { head :ok }\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
DELETE /threats/1 DELETE /threats/1.xml
|
def destroy
@threat = Threat.find(params[:id])
@threat.destroy
respond_to do |format|
format.html { redirect_to(threats_url) }
format.xml { head :ok }
end
end
|
[
"def destroy\n @cheat = Cheat.find(params[:id])\n @cheat.destroy\n\n respond_to do |format|\n format.html { redirect_to(cheats_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @hat = Hat.find(params[:id])\n @hat.destroy\n\n respond_to do |format|\n format.html { redirect_to(hats_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @threat = Threat.find(params[:id])\n @threat.destroy\n\n respond_to do |format|\n format.html { redirect_to threats_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @cattle = Cattle.find(params[:id])\n @cattle.destroy\n\n respond_to do |format|\n format.html { redirect_to(cattles_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @boat = Boat.find(params[:id])\n @boat.destroy\n\n respond_to do |format|\n format.html { redirect_to(boats_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @rescat = Rescat.find(params[:id])\n @rescat.destroy\n\n respond_to do |format|\n format.html { redirect_to(rescats_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @t1 = T1.find(params[:id])\n @t1.destroy\n\n respond_to do |format|\n format.html { redirect_to(t1s_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @attcategory = Attcategory.find(params[:id])\n @attcategory.destroy\n\n respond_to do |format|\n format.html { redirect_to(attcategories_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\r\n @cat1 = Cat1.find(params[:id])\r\n @cat1.destroy\r\n\r\n respond_to do |format|\r\n format.html { redirect_to(cat1s_url) }\r\n format.xml { head :ok }\r\n end\r\n end",
"def destroy\n @beat = Beat.find(params[:id])\n @beat.destroy\n\n respond_to do |format|\n format.html { redirect_to(beats_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @tst1 = Tst1.find(params[:id])\n @tst1.destroy\n\n respond_to do |format|\n format.html { redirect_to(tst1s_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @panchayat = Panchayat.find(params[:id])\n @panchayat.destroy\n\n respond_to do |format|\n format.html { redirect_to(panchayats_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @line_eat = LineEat.find(params[:id])\n @line_eat.destroy\n\n respond_to do |format|\n format.html { redirect_to(line_eats_url) }\n format.xml { head :ok }\n end\n end",
"def del\n @status1 = Status1.find(params[:id])\n @status1.destroy\n\n respond_to do |format|\n format.html { redirect_to(status1s_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @seat = Seat.find(params[:id])\n @seat.destroy\n\n respond_to do |format|\n format.html { redirect_to(seats_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @witch_cat = WitchCat.find(params[:id])\n @witch_cat.destroy\n\n respond_to do |format|\n format.html { redirect_to(witch_cats_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @user_threat = UserThreat.find(params[:id])\n @user_threat.destroy\n\n respond_to do |format|\n format.html { redirect_to(user_threats_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @tconexion = Tconexion.find(params[:id])\n @tconexion.destroy\n\n respond_to do |format|\n format.html { redirect_to(tconexions_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @feat = @person.feats.find(params[:id])\n @feat.destroy\n\n respond_to do |format|\n format.html { redirect_to(person_feats_url(@person)) }\n format.xml { head :ok }\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Takes a range and yields pairs of [value, valid?]
|
def test_range(r)
yield [r.min-1, false]
yield [r.min, true]
yield [r.max, true]
yield [r.max+1, false]
end
|
[
"def valid_ranges\n @ranges.select(&:valid?).map do |range|\n [@tiers, @revue, range.min_and_max, @start, @end].flatten.join(TAB)\n end\n end",
"def range(start_value, end_value, args, block)\n filter << (start_value.to_i..end_value.to_i).to_a\n end",
"def it_should_ensure_value_in_range(attribute, range, opts = {})\n low_message, high_message = get_options!([opts], :low_message, :high_message)\n low_message ||= default_error_message(:inclusion)\n high_message ||= default_error_message(:inclusion)\n\n klass = self.described_type\n min = range.first\n max = range.last\n\n describe \"does not allow #{attribute} to be less than #{min}\" do\n v = min - 1\n assert_bad_value(klass, attribute, v, low_message)\n end\n\n describe \"allows #{attribute} to be #{min}\" do\n v = min\n assert_good_value(klass, attribute, v, low_message)\n end\n\n describe \"does not allow #{attribute} to be more than #{max}\" do\n v = max + 1\n assert_bad_value(klass, attribute, v, high_message)\n end\n\n describe \"allows #{attribute} to be #{max}\" do\n v = max\n assert_good_value(klass, attribute, v, high_message)\n end\n end",
"def check_range(name, range, value, unit = '')\n grace = (-0.00001..0.00001)\n str = value.to_s\n value = value.value if value.is_a?(Sass::Script::Value::Number)\n return value if range.include?(value)\n return range.first if grace.include?(value - range.first)\n return range.last if grace.include?(value - range.last)\n raise ArgumentError.new(\n \"#{name} #{str} must be between #{range.first}#{unit} and #{range.last}#{unit}\")\n end",
"def validate_discount_range_values\n curr_val = curr_per = 0\n first = true\n discount_values.each do |val, per|\n if per > 100\n self.errors.add(:discount, I18n.t(\"activerecord.errors.messages.invalid_range_percentage\"))\n break\n end\n unless first\n if val <= curr_val or per <= curr_per\n self.errors.add(:discount, I18n.t(\"activerecord.errors.messages.invalid_range_secuence\"))\n break\n end\n end\n first = false\n curr_val, curr_per = val, per\n end\n end",
"def test_endlessRange_valid\n assert (0..).valid? # This raises the uncapturable SyntaxError for Ruby 2.5 and before anyway.\n assert (0...).valid?\n refute (true..).valid?\n assert (nil...).valid? # false in Ver.1 (Ruby 2.6)\n assert (nil..nil).valid? # false in Ver.1 (Ruby 2.6)\n assert (Float::INFINITY..Float::INFINITY).valid?\n assert (-Float::INFINITY..Float::INFINITY).valid?\n assert (-Float::INFINITY...Float::INFINITY).valid?\n end",
"def valid?\n (@ranges && !@ranges.empty?)\n end",
"def allowable_values(value_range)\n\t allowable_values = value_range.to_a\n\n\t self.constraints.each do |constraint|\n\t\tallowable_values = constraint.allowable_values(allowable_values)\n\t end\n\n\t return allowable_values\n\tend",
"def each \n @range.each {|val| yield val }\n end",
"def range_for(*args)\n range_pairs(p_max(*args), p_min(*args))\n end",
"def range\n unless value.is_a? Range\n errors.add(:value, :range)\n end\n end",
"def run(lower, upper, validator)\n count = 0\n current_value = lower - 1\n\n # Each iteration, we skip to the next valid number in range and check whether\n # it satisfies the validator.\n while (current_value = step(current_value)) <= upper\n count += 1 if validator.call(current_value)\n end\n\n count\nend",
"def visit_range(node); end",
"def between(min, max, &block)\n return false unless (1..min).all? { try(&block) }\n (min..max).all? { try(&block) } # only using all?'s shortcut property\n true\n end",
"def in_range(a, b)\n return ((a >= 20 && a <= 30) || (b >= 20 && b <= 30));\t\nend",
"def match_range(range, match); end",
"def test_range\n assert_equal(2, @closed_range_01.count)\n assert_equal(2, @open_range_01.count)\n refute((PowerOfThree.new(-10)...PowerOfThree.new(2)).include?(Rational(1,2)))\n refute((PowerOfThree.new(-10)...PowerOfThree.new(2)).include?(Rational(1,3)))\n assert_nothing_raised(){(PowerOfThree.new(-10)...PowerOfThree.new(2)).each()}\n assert_equal(2, Range.new(PowerOfThree.new(0),PowerOfThree.new(1)).count()) \n end",
"def in_range(lower, upper)\n Proc.new { |answer|\n answer.between?(lower, upper) ? range_answer = answer : range_answer = nil\n [range_answer, \"#{answer} must be between #{lower} and #{upper}.\"]\n }\nend",
"def should_ensure_value_in_range(attribute, range, opts = {})\n message, low_message, high_message = get_options!([opts],\n :message,\n :low_message,\n :high_message)\n matcher = ensure_inclusion_of(attribute).\n in_range(range).\n with_message(message).\n with_low_message(low_message).\n with_high_message(high_message)\n should matcher.description do\n assert_accepts matcher, subject\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Add an +identifer+ / +element+ pair which will be used for looking up unresolved identifers
|
def add_identifier(ident, element)
map_entry = @identifier_map[ident]
if map_entry
if map_entry.is_a?(Array)
map_entry << element
else
@identifier_map[ident] = [map_entry, element]
end
else
@identifier_map[ident] = element
end
end
|
[
"def add_identifier(val)\n @identifiers.push( self.class.normalize_id(val) )\n end",
"def add_identifier(val)\n return if val.nil?\n @identifiers.push(self.class.normalize_id(val)) unless @identifiers.index(self.class.normalize_id(val))\n end",
"def identifier_map(id, name, id_map)\n if !id_map.include? id\n id_map[id] = name\n end \nend",
"def identifier_id_pair_from_element_uri(pid, eu)\n peu = eu.gsub(\"gdc/md/#{pid}/obj/\",\"\")\n peu = strip_enclosing_chars_from_element_uri(peu)\n id = peu.scan(/[0-9]+/)[1]\n uri = eu.gsub(/\\/elements/,\"\")\n uri = uri.gsub(/\\?id =[0-9]+/,\"\")\n uri = strip_enclosing_chars_from_element_uri(uri)\n identifier = identifier(uri)\n return [identifier, id]\n end",
"def insert_identifier(opts={})\n type = nil\n if !opts[:identifierType].nil?\n type = opts[:identifierType]\n end\n value = nil\n if !opts[:identifierValue].nil?\n value = opts[:identifierValue]\n end\n node = MedusaPremis::Datastream::RepresentationObjectDs.identifier_template(type, value)\n nodeset = self.find_by_terms(:identifier)\n\n unless nodeset.nil?\n if nodeset.empty?\n self.ng_xml.root.add_child(node)\n index = 0\n else\n nodeset.after(node)\n index = nodeset.length\n end\n # deprecated...\n # self.dirty = true\n end\n return node, index\n end",
"def add_element(key,val)\n de(key,'{'+val+'}')\n end",
"def add_to_element_hash(race, element)\n @@element_hash[race] = element\n end",
"def element_with_id!(tag, *args)\n attrs = { 'id' => :id }\n attrs = attrs.merge!(args.pop) if args.last.respond_to?(:fetch)\n args.push(Proc.new) if block_given?\n args.push attrs\n jig = element!(tag, *args)\n jig.eid = \"x#{newjig.object_id}\"\n jig.plug!(:id, jig.eid)\n end",
"def add_info_on_tag_ids\n desc_hash = Notion.where(locale: @lecture.locale || I18n.default_locale)\n .pluck(:title, :tag_id, :aliased_tag_id)\n .map { |x| [x.first.downcase, x.second || x.third] }\n .select { |x| x.first.in?(@content_descriptions.map(&:downcase)) }\n .to_h\n @content.each do |c|\n c['tag_id'] = desc_hash[c['description'].downcase]\n end\n end",
"def insert_identifier(opts={})\n type = nil\n if !opts[:identifierType].nil?\n type = opts[:identifierType]\n end\n value = nil\n if !opts[:identifierValue].nil?\n value = opts[:identifierValue]\n end\n node = MedusaPremis::Datastream::AgentDs.identifier_template(type, value)\n nodeset = self.find_by_terms(:identifier)\n\n unless nodeset.nil?\n if nodeset.empty?\n self.ng_xml.root.add_child(node)\n index = 0\n else\n nodeset.after(node)\n index = nodeset.length\n end\n # deprecated... \n # self.dirty = true\n end\n return node, index\n end",
"def associate_by_identity(ast, comments); end",
"def id_elements(*element_names)\n options = element_names.extract_options!\n\n patterned_elements \"\\#%{element_name}\", *element_names, options.slice(:element_array)\n end",
"def add_element_name element_name\n @element_names ||= []\n @element_names << element_name\n end",
"def insert_identifier(opts={})\n node = Medusa::Premis::Agent.identifier_template\n nodeset = self.find_by_terms(:agentIdentifier)\n \n unless nodeset.nil?\n if nodeset.empty?\n self.ng_xml.root.add_child(node)\n index = 0\n else\n nodeset.after(node)\n index = nodeset.length\n end\n self.dirty = true\n end\n \n return node, index\n end",
"def add_ids(array)\n array.each do |element|\n element[:id].nil? and element[:id] = get_element(element.href)\n end\n array\n end",
"def visit_ident(node); end",
"def attlistdecl(element, pairs, contents)\n end",
"def register_identifier(klass:, identifier:)\n raise Emittance::InvalidIdentifierError unless valid_identifier? identifier\n raise Emittance::IdentifierCollisionError if identifier_reserved? identifier, klass\n\n identifier_to_klass_mappings[identifier] = klass\n\n klass_to_identifier_mappings[klass] ||= empty_collection\n klass_to_identifier_mappings[klass] << identifier\n\n klass\n end",
"def ident_key\n :ident\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Tries to resolve the given +unresolved_refs+. If resolution is successful, the proxy object will be removed, otherwise there will be an error description in the problems array. In case the resolved target element's type is not valid for the given feature, the +target_type_error+ flag will be set on the unresolved reference. Returns an array of the references which are still unresolved. Options: :problems an array to which problems will be appended :on_resolve a proc which will be called for every sucessful resolution, receives the unresolved reference as well as to new target element :use_target_type use the expected target type to narrow the set of possible targets (i.e. ignore targets with wrong type) :failed_resolutions a Hash which will receive an entry for each failed resolution for which at least one target element was found (wrong target type, or target not unique). hash key is the uref, hash value is the target element or the Array of target elements
|
def resolve(unresolved_refs, options={})
problems = options[:problems] || []
still_unresolved_refs = []
failed_resolutions = options[:failed_resolutions] || {}
unresolved_refs.each do |ur|
if @identifier_resolver
target = @identifier_resolver.call(ur.proxy.targetIdentifier)
else
target = @identifier_map[ur.proxy.targetIdentifier]
end
target = [target].compact unless target.is_a?(Array)
if options[:use_target_type]
feature = ur.element.class.ecore.eAllReferences.find{|r| r.name == ur.feature_name}
target = target.select{|e| e.is_a?(feature.eType.instanceClass)}
end
if target.size == 1
status = ResolutionHelper.set_uref_target(ur, target[0])
if status == :success
options[:on_resolve] && options[:on_resolve].call(ur, target[0])
elsif status == :type_error
ur.target_type_error = true
problems << type_error_message(target[0])
still_unresolved_refs << ur
failed_resolutions[ur] = target[0]
end
elsif target.size > 1
problems << "identifier #{ur.proxy.targetIdentifier} not uniq"
still_unresolved_refs << ur
failed_resolutions[ur] = target
else
problems << "identifier #{ur.proxy.targetIdentifier} not found"
still_unresolved_refs << ur
end
end
still_unresolved_refs
end
|
[
"def unresolve_refs(rrefs)\r\n # make sure any removed_urefs have been removed, \r\n # otherwise they will be removed later even if this method actually re-added them\r\n unresolved_refs\r\n rrefs.each do |rr|\r\n ur = rr.uref\r\n refs = ur.element.getGeneric(ur.feature_name)\r\n if refs.is_a?(Array)\r\n index = refs.index(rr.target)\r\n ur.element.removeGeneric(ur.feature_name, rr.target)\r\n ur.element.addGeneric(ur.feature_name, ur.proxy, index)\r\n else\r\n ur.element.setGeneric(ur.feature_name, ur.proxy)\r\n end\r\n @unresolved_refs << ur\r\n end\r\n end",
"def unresolved_refs\r\n @unresolved_refs ||= collect_unresolved_refs\r\n if @removed_urefs\r\n @unresolved_refs -= @removed_urefs\r\n @removed_urefs = nil\r\n end\r\n @unresolved_refs\r\n end",
"def unresolve_external\r\n return if @resolved_refs.nil?\r\n raise \"can not unresolve, missing fragment information\" if @resolved_refs == :dirty || @resolved_refs[:unknown]\r\n rrefs = @resolved_refs.values.flatten\r\n @resolved_refs = {}\r\n unresolve_refs(rrefs)\r\n end",
"def mark_resolved(uref, target_fragment, target)\r\n @resolved_refs = {} if @resolved_refs.nil? || @resolved_refs == :dirty\r\n target_fragment ||= :unknown\r\n if target_fragment != self\r\n @resolved_refs[target_fragment] ||= []\r\n @resolved_refs[target_fragment] << ResolvedReference.new(uref, target)\r\n end\r\n @removed_urefs ||= []\r\n @removed_urefs << uref\r\n end",
"def unresolved_refs\r\n @fragments.collect{|f| f.unresolved_refs}.flatten\r\n end",
"def resolve_local(options={})\r\n resolver = RGen::Instantiator::ReferenceResolver.new\r\n index.each do |i|\r\n resolver.add_identifier(i[0], i[1])\r\n end\r\n @unresolved_refs = resolver.resolve(unresolved_refs, :use_target_type => options[:use_target_type])\r\n end",
"def resolve\n debug do\n <<~MSG\n Filtering out #{conflicts.size} conflicts from #{base_local_diffs.size + base_remote_diffs.size} diffs\n Remaining diffs #{unconflicted_diffs.size}\n MSG\n end\n\n conflicts.each_with_object(unconflicted_diffs) do |conflict, diffs|\n # TODO: this will result in diffs being out of order from their\n # original order. diffs should be flagged as conflicted and\n # this method should instead remove the conflicted flag.\n diffs.concat(@conflict_resolver.resolve(conflict))\n # TODO: if the conflict is resolved, it should be removed from the\n # @conflicts array.\n end\n end",
"def replace_references(refs)\n set = doc.xpath('.//references')\n\n if set\n inner_html = create_references_list_items(refs)\n replace_references_node(set, inner_html)\n end\n\n nil\n end",
"def fetch_refs(fetch_options = {})\n before_fetch = Hash[@repository.refs.collect { |r| [r.name, r.target_id] }]\n\n # Prune is not supported by rugged! Deleting all remote refs and re-fetch\n delete_all_remote_branches\n delete_all_tags\n\n @repository.remotes['origin'].fetch(nil, fetch_options)\n\n delete_all_remote_branches @repository.config['daun.branch.blacklist'].split\n delete_all_tags @repository.config['daun.tag.blacklist'].split\n if @repository.config['daun.tag.limit'].to_i > -1\n keep_new_tags @repository.config['daun.tag.limit'].to_i\n end\n\n after_fetch = Hash[@repository.refs.collect { |r| [r.name, r.target_id] }]\n\n Daun::RefsDiff.new(before_fetch, after_fetch)\n end",
"def resolve_references(value)\n return value if @properties_to_resolve.empty?\n\n # Make sure we have an array, even if there's just one record to resolve\n was_wrapped = false\n if value.is_a?(Array)\n records = value\n else\n records = ASUtils.wrap(value)\n was_wrapped = true\n end\n\n # Any JSONModels can become vanilla hashes\n records = records.map {|value|\n if value.is_a?(JSONModelType)\n value.to_hash(:trusted)\n else\n value\n end\n }\n\n # We'll work through our records breadth-first, first resolving non-nested\n # properties, then those that are nested two-levels deep, then\n # three-levels deep, and so on.\n #\n # With each iteration, we try to group together resolve requests for\n # common record types to get as much bang for our SQL buck as possible.\n depth = 1\n while true\n properties_for_current_depth = @properties_to_resolve.select {|property| property.length == depth}\n\n break if properties_for_current_depth.empty?\n\n refs_to_resolve = find_matching_refs(records, properties_for_current_depth)\n\n resolved = fetch_records_by_uri(refs_to_resolve.map {|ref| ref['ref']})\n\n refs_to_resolve.each do |ref|\n uri = ref['ref']\n ref['_resolved'] = resolved.fetch(uri) if resolved.has_key?(uri)\n end\n\n depth += 1\n end\n\n # Return the same type we were given\n was_wrapped ? records[0] : records\n end",
"def parse_references(refs)\n\t\t\t\treferences = []\n\t\t\t\trefs.each do |r|\n\t\t\t\t\t# We do not want references that are URLs\n\t\t\t\t\tnext if r.ctx_id == \"URL\"\n\t\t\t\t\t# Format the reference as it is saved by Nessus\n\t\t\t\t\treferences << \"#{r.ctx_id}-#{r.ctx_val}\"\n\t\t\t\tend\n\t\t\t\treturn references\n\t\t\tend",
"def unresolved\n dependencies.inject([]){ |list,dep| \n (!dep.met && !dep.resolved) ? (list << dep) : (list)\n }\n end",
"def resolve_refs!\n name_of_last_ref = nil\n lookup_refs.each_with_index do |ref,z|\n @settings = two_layer_merge(@settings, ref.settings)\n @children.merge!(ref.children)\n name_of_last_ref = ref.name\n end\n if @settings[:path].nil? && !name_of_last_ref.nil?\n @settings[:path] = name_of_last_ref.to_s\n end\n @settings.delete :ref\n return self\n end",
"def resolve\n if !refs.nil? and refs.select { |ref| File.file? ref }.any?\n paths = refs.select { |ref| File.file? ref }.map { |path| File.expand_path(path) }\n elsif refs and refs.kind_of? Array\n paths, gems = GemsResolver.new(refs).call\n else\n paths = Dir.glob(File.join(\".\", \"lib/**/*.rb\")).map { |path| File.expand_path(path) }\n end\n\n { paths: paths, gems: gems || [] }\n end",
"def resolve_top_level_references(data)\n if data.is_a?(Array)\n data.flat_map { |elem| resolve_top_level_references(elem) }\n elsif reference?(data)\n partially_resolved = data.transform_values do |v|\n resolve_references(v)\n end\n fully_resolved = resolve_single_reference(partially_resolved)\n # The top-level reference may have returned more references, so repeat the process\n resolve_top_level_references(fully_resolved)\n else\n data\n end\n end",
"def resolve_notification_references\n run_context.immediate_notifications(self).each { |n| n.resolve_resource_reference(run_context.resource_collection) }\n run_context.delayed_notifications(self).each {|n| n.resolve_resource_reference(run_context.resource_collection) }\n end",
"def resolve\n @resolved = []\n @unresolved = []\n @circular = []\n @outputs.each do |onode|\n rdep onode\n end\n [@resolved, @circular.empty? ? nil : @circular]\n end",
"def resolve_notification_references\n run_context.before_notifications(self).each { |n|\n n.resolve_resource_reference(run_context.resource_collection)\n }\n run_context.immediate_notifications(self).each { |n|\n n.resolve_resource_reference(run_context.resource_collection)\n }\n run_context.delayed_notifications(self).each {|n|\n n.resolve_resource_reference(run_context.resource_collection)\n }\n end",
"def remove_refs(remote, refs: nil)\n dir = repo_base_dir_for(remote)\n return nil unless ::File.directory?(dir)\n results = []\n lock_repo(dir, remote) do |repo_lock|\n refs = repo_lock.refs if refs.nil? || refs == :all\n Array(refs).each do |ref|\n ref_data = repo_lock.delete_ref!(ref)\n results << RefInfo.new(ref, ref_data) if ref_data\n end\n end\n results.sort\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Return all of the fences straight from mongo
|
def get_all_fences
@coll.find.to_a
end
|
[
"def collections\n records = {}\n itql(\"select $object $title from <#ri> where ($object <fedora-model:label> $title and $object <fedora-model:hasModel> <info:fedora/islandora:collectionCModel>)\").each do |rec|\n records[rec.object.sub('info:fedora/', '')] = rec.title\n end\n\n return records\n end",
"def return_full_documents\n d = Documents.new(@documents)\n i = 0\n while i < divided_documents_are_scored.length\n texts = divided_documents_are_scored[i]\n puts \"Word:\" + texts[0]\n puts \"TF socre:\" + texts[3]\n puts \"Document:\" + texts[1]\n #puts d.full_documents([texts[1]])\n # if texts[2][0] != nil\n puts \"Document:\"\n puts texts[2][0]\n # puts d.full_documents([texts[2][0]])\n i+=1\n end\n #end\n end",
"def fetchAllDocuments(collection)\n return collection.find({})\n end",
"def get_pdfs_in_mongo\n @mongo.get_all_docs_by :filename\n end",
"def familiar_documents\n self.known_documents.map {|doc_id| Document.find_by(id: doc_id)}\n end",
"def index\n @fine_collections = FineCollection.all\n end",
"def collections\n wayfinder.decorated_collections\n end",
"def documents\n authorizations.map(&:document)\n end",
"def everything\n list = ChillDB::List.load(JSON.parse(@@database.http('_all_docs?include_docs=true').get.body))\n list.database = @@database\n return list\n end",
"def find_entities\n []\n end",
"def fetch_scenarios\n collection = connect_to_mongo\n scenarios = {}\n collection.find.each { |doc| scenarios.merge!(doc) }\n scenarios.delete('_id')\n scenarios\nend",
"def documents\n results[\"results\"].map do |attributes|\n doc = Factory.from_db(criteria.klass, attributes[\"obj\"], criteria)\n doc.attributes[\"geo_near_distance\"] = attributes[\"dis\"]\n doc\n end\n end",
"def eds_documents(data)\n docs = data&.dig('response', 'docs')\n docs = Array.wrap(docs).compact\n factory = blacklight_config.document_factory\n model_opt = { lens: blacklight_config.lens_key }\n docs.map { |doc| factory.build(doc, data, model_opt) }\n end",
"def documents\n cursor_document[FIRST_BATCH]\n end",
"def object_doc_get_all(collection_name)\n if @collections[collection_name]\n @collections[collection_name].values\n else\n []\n end\n end",
"def index\n @influences = Influence.all\n end",
"def index\n @offences = Offence.all\n end",
"def all; @docs.values end",
"def index\n @scope_documents = ScopeDocument.all\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Create a new dataset edit
|
def create_edit(commit, key, value)
ActiveRecord::Base.transaction do
DatasetEdit.create!(
commit_id: commit.id,
key: key,
value: value
)
end
end
|
[
"def create\n\n ## Make sure we have the required fields.\n # if get_with_default(@data, :name, \"\").empty? or \n # get_with_default(@data, :summary, \"\").empty? or\n # get_with_default(@data, :description, \"\").empty?\n # respond_with_error \"You must provide a name, summary, and description.\",\n # new_dataset_path\n # return\n # end\n\n ## Create the new entry.\n @data[:creator] = current_user\n @dataset = Dataset.new(@data)\n begin\n ActiveRecord::Base.transaction do\n @dataset.save!\n respond_with_success get_redirect_path(dataset_path(@dataset))\n end\n rescue => e\n respond_with_error \"There was an error saving the dataset entry: #{e}.\",\n 'new', true, false\n end\n end",
"def create\n @dataset = Dataset.new(dataset_params)\n\n if @dataset.save\n redirect_to @dataset, notice: 'Dataset was successfully created.'\n else\n redirect_to datasets_path, notice: 'Dataset could not be created.'\n end\n end",
"def create\n @dataset = Dataset.new(params[:dataset])\n\n respond_to do |format|\n if @dataset.save\n format.html { redirect_to @dataset, notice: 'Dataset was successfully created.' }\n format.json { render json: @dataset, status: :created, location: @dataset }\n else\n format.html { render action: \"new\" }\n format.json { render json: @dataset.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @dataset = Dataset.new(params[:dataset])\n current_user.datasets << @dataset\n\n respond_to do |format|\n if @dataset.save\n format.html { redirect_to @dataset, notice: 'Dataset was successfully created.' }\n format.json { render json: @dataset, status: :created, location: @dataset }\n else\n format.html { render action: \"new\" }\n format.json { render json: @dataset.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @dataset = Dataset.new(params[:dataset])\n\n respond_to do |format|\n if @dataset.save\n format.html { redirect_to(@dataset, :notice => 'Dataset was successfully created.') }\n format.xml { render :xml => @dataset, :status => :created, :location => @dataset }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @dataset.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @dataset = Dataset.new(params[:dataset])\n\n respond_to do |format|\n if @dataset.save\n flash[:notice] = 'Dataset was successfully created.'\n format.html { redirect_to(@dataset) }\n format.xml { render :xml => @dataset, :status => :created, :location => @dataset }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @dataset.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def new\n @dataset = current_user.datasets.build\n render layout: false\n end",
"def create\n @dataset = Dataset.new(params[:dataset])\n\n respond_to do |format|\n if @dataset.save\n format.html { redirect_to(@dataset, flash: { success: 'Dataset was successfully created.' }) }\n format.xml { render xml: @dataset, status: :created, location: @dataset }\n else\n format.html { render action: 'new' }\n format.xml { render xml: @dataset.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @data_set = DataSet.new(data_set_params)\n respond_to do |format|\n if @data_set.save\n format.html { redirect_to @data_set, notice: 'Data set was successfully created.' }\n format.json { render :show, status: :created, location: @data_set }\n else\n format.html { render :new }\n format.json { render json: @data_set.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @dataset = @user.datasets.build\n render :layout => 'dialog'\n end",
"def create_new_object(data_set)\n data_input_errors = enter_object_data data_set\n click_save_button\n when_exists(delete_button, Config.short_wait)\n data_input_errors\n end",
"def create\n @data_set = DataSet.new(params[:data_set])\n\n respond_to do |format|\n if @data_set.save\n format.html { redirect_to @data_set, notice: 'Data set was successfully created.' }\n format.json { render json: @data_set, status: :created, location: @data_set }\n else\n format.html { render action: \"new\" }\n format.json { render json: @data_set.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @qdataset = Qdataset.new(qdataset_params)\n\n respond_to do |format|\n if @qdataset.save\n format.html { redirect_to @qdataset, notice: 'Qdataset was successfully created.' }\n format.json { render :show, status: :created, location: @qdataset }\n else\n format.html { render :new }\n format.json { render json: @qdataset.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create_new_object(data_set)\n data_input_errors = enter_object_data data_set\n click_top_save_button\n when_exists(delete_button, Config.short_wait)\n data_input_errors\n end",
"def create\n\t\t@data_set = DataSet.new(params[:data_set])\n\n\t\trespond_to do |format|\n\t\t\tif @data_set.save\n\t\t\t\tformat.html { redirect_to @data_set, notice: 'Data set was successfully created.' }\n\t\t\t\tformat.json { render json: @data_set, status: :created, location: @data_set }\n\t\t\telse\n\t\t\t\tformat.html { render action: \"new\" }\n\t\t\t\tformat.json { render json: @data_set.errors, status: :unprocessable_entity }\n\t\t\tend\n\t\tend\n\tend",
"def add_dataset(dataset) @datasets << dataset; end",
"def create_new_acquisition(data_set)\n enter_acquisition_info_data data_set\n click_save_button\n when_exists(delete_button, Config.short_wait)\n end",
"def create_ds_set_form_vars\n @edit = {}\n @edit[:new] = {}\n @edit[:key] = \"ontap_file_share_create_ds__#{@sfs.id}\"\n @edit[:new][:ds_name] = @sfs.default_datastore_name\n @edit[:hosts] = @sfs.applicable_hosts\n end",
"def create_project_data_set\n @data_set =\"\"\n parent.managed_repository do\n if params[:name]\n if Voeis::DataSet.first(:name => params[:name]).nil?\n if params[:type].nil? || params[:type].empty?\n params[:type] = \"default\"\n end\n if params[:description].nil?\n params[:description] = \"\"\n end\n debugger\n @data_set = Voeis::DataSet.create(:name=>params[:name], :type=>params[:type], :description=>params[:description])\n else\n @data_set = {\"error\" => \"The name: #{params[:name]} already exists as data set.\"}\n end\n else\n @data_set = {\"error\" => \"The name parameter is required to create a new data set.\"}\n end\n end\n respond_to do |format|\n format_response(@data_set, format)\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Finds all commits belonging to a dataset with an edit to the given key.
|
def find_commits(dataset, edit_key)
dataset.commits
.joins(:dataset_edits)
.where(dataset_edits: { key: edit_key })
.order(updated_at: :desc)
end
|
[
"def find_commits(dataset, edit_key)\n dataset.commits\n .joins(:dataset_edits)\n .where(dataset_edits: { key: edit_key })\n .order(updated_at: :desc)\n end",
"def find_edit(dataset, edit_key)\n commits = find_commits(dataset, edit_key)\n\n return nil unless commits.any?\n\n DatasetEdit\n .where(commit_id: commits.pluck(:id), key: edit_key)\n .order(updated_at: :desc)\n .first\n end",
"def destroy_edits(dataset, edit_key)\n commits = find_commits(dataset, edit_key)\n\n return if commits.none?\n\n commits.each do |commit|\n if commit.dataset_edits.one?\n commit.destroy\n else\n commit.dataset_edits.find_by(key: edit_key).destroy\n end\n end\n end",
"def destroy_edits(dataset, edit_key)\n commits = find_commits(dataset, edit_key)\n\n return if commits.none?\n\n commits.each do |commit|\n if commit.dataset_edits.one?\n commit.destroy\n else\n commit.dataset_edits.find_by_key(edit_key).destroy\n end\n end\n end",
"def get_order_edit_by_key(project_key, key, opts = {})\n data, _status_code, _headers = get_order_edit_by_key_with_http_info(project_key, key, opts)\n data\n end",
"def comments(project_id, key_id, params = {})\n c_r Lokalise::Collections::KeyComment, :all, [project_id, key_id], params\n end",
"def get_commit(key, id)\n key = key_for(key)\n commit = git.commit(id)\n blob = commit.tree / key\n decode(blob.data) if blob\n end",
"def commits\n return @commits if @commits\n # perform search and memoize result\n commits_from_search = []\n commits_from_search = parent_report_group.commits if parent_report_group\n find_by_hash.each do |field, args|\n commits_from_search = self.send(\"find_by_#{field}\".to_sym, commits_from_search, args)\n end\n @commits = commits_from_search\n end",
"def changelogs\n changelog.entries(pattern: \"*#{key.digest}*\")\n end",
"def key_changes_by_key\n key_changes.group_by(&:first)\n end",
"def select_commits_matching_dataset_criteria(grit_commits, dataset_filter_options)\n return [] if grit_commits.empty?\n repo = GitRepo.first(:name => grit_commits.first.repo_name)\n raise \"This commit does not have a repo_name set on it: #{grit_commits.first.sha}\" unless repo\n commits_dataset = Commit.select(:sha).filter(:sha => grit_commits.map(&:sha), :git_repo_id => repo.id).\n filter(dataset_filter_options)\n commit_ids = Set.new(commits_dataset.all.map(&:sha))\n grit_commits.select { |grit_commit| commit_ids.include?(grit_commit.sha) }\n end",
"def find(key)\n entity = @entity_set.new_entity\n key_property = entity.get_property(entity.primary_key)\n key_property.value = key\n\n pathname = \"#{entity_set.name}(#{key_property.url_value})\"\n query = [pathname, assemble_criteria].compact.join('?')\n execute(query).first\n end",
"def commit_search(m, repo, id)\n uri = \"/repos/#{User}/#{repo}/commits/#{id}\"\n # Request the commit from GitHub and store the info\n res = request(uri, Net::HTTP::Get)\n m.reply \"Git commit query for commit #{id} on #{User}/#{repo}\"\n m.reply \"Commit author: #{res[\"commit\"][\"author\"][\"name\"]} <#{res[\"commit\"][\"author\"][\"email\"]}>\"\n m.reply \"Commit date: #{res[\"commit\"][\"author\"][\"date\"]}\"\n m.reply \"Commit message: #{res[\"commit\"][\"message\"]}\"\n m.reply \"Modified file listing:\"\n # Iterate through all file statistics\n res[\"files\"].each do |file|\n m.reply \"#{file[\"filename\"]} - #{file[\"changes\"]} changes (#{file[\"additions\"]}+, #{file[\"deletions\"]}-)\"\n end\n end",
"def filtered_by_key(key)\n where(key: key)\n end",
"def get_changes(dbh, table, cols, keys, id)\n id_parts = id.split(/:/)\n where = ''\n keys.split(/,/).each_with_index do |key, i|\n where << \" AND \" if where != ''\n where << \"#{key}=#{dbh.quote(id_parts[i])}\"\n end\n row = dbh.select_one(\"SELECT #{cols} FROM #{table} WHERE #{where}\")\n row_last = dbh.select_one(\"SELECT #{cols} FROM #{table}_last WHERE #{where}\")\n changes = []\n cols.split(/,/).each_with_index do |col, i|\n changes.push([col, row[i], row_last[i]]) if \"#{row[i]}\" != \"#{row_last[i]}\"\n end\n changes\nend",
"def comments(project_id, key_id, req_params = {})\n name = 'KeyComments'\n params = { query: [project_id, key_id], req: req_params }\n\n data = endpoint(name: name, params: params).do_get\n\n collection name, data\n end",
"def find_all_by_key(key)\n select { |r| r.key.to_s =~ /#{key}/}\n end",
"def fetch_issue_keys(project_key: nil, updated_since: nil)\n query_items = []\n query_items << \"project = \\\"#{project_key}\\\"\" unless project_key.nil?\n query_items << \"updatedDate > \\\"#{updated_since.strftime('%Y-%m-%d %H:%M')}\\\"\" unless updated_since.nil?\n query = query_items.join(\" AND \")\n client.issue_keys_for_query(query)\n end",
"def find_all(hash_key)\n query(hash_key)\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Finds the most recent edit of a key belonging to a dataset.
|
def find_edit(dataset, edit_key)
commits = find_commits(dataset, edit_key)
return nil unless commits.any?
DatasetEdit
.where(commit_id: commits.pluck(:id), key: edit_key)
.order(updated_at: :desc)
.first
end
|
[
"def find_commits(dataset, edit_key)\n dataset.commits\n .joins(:dataset_edits)\n .where(dataset_edits: { key: edit_key })\n .order(updated_at: :desc)\n end",
"def find_commits(dataset, edit_key)\n dataset.commits\n .joins(:dataset_edits)\n .where(dataset_edits: { key: edit_key })\n .order(updated_at: :desc)\n end",
"def get_latest(key)\n select(key) { { :limit => \"LIMIT 1\" } }.first[1]\n end",
"def last_focus\n @key_value.get('last_focus')\n end",
"def last\n last_key.get\n end",
"def last_id\n # hashs are ordered, so just return the latest entry\n @entries.keys.last\n end",
"def getChangeFromLatest( key, startDate )\n endDate = lastDate( key )\n return getChange( key, startDate, endDate )\n end",
"def stored_data_last_key\n self.stored_data.keys.sort.last || nil\n end",
"def lastDate( key = nil )\n last = @data.keys.max();\n if( key == nil ) then\n return last;\n end\n sortDates();\n @sortedDates.reverse_each(){\n |date|\n if( getByD( key, date ) != nil )then \n return date;\n end;\n }\n return nil;\n end",
"def last_edited\n @instance.client.getLastEdited(padID: @id)[:lastEdited]\n end",
"def last_edited_time\n begin\n $redis.hget 'last_edit:instrument', self.id\n rescue\n nil\n end\n end",
"def find(key)\n @data[key]\n end",
"def last_keystroke\n # Looks if we are still in the middle of our last keystroke, and returns it if necessary.\n @keystrokes.reverse_each do |keystroke|\n if(keystroke[:time_released] == nil)\n return keystroke\n end\n end\n # If all the logged keystrokes were released, return the last one in the array.\n @keystrokes.last\n end",
"def current_key(keyspace)\n keyspace.key(last_modified_time)\n end",
"def key_changes_by_key\n key_changes.group_by(&:first)\n end",
"def get_latest_for(changeable)\n my_id = changeable.id\n self.send(\"find_by_#{self.changeable_fk}\", my_id, {:order => 'updated_at DESC', :limit => 1})\n end",
"def last_keys\n\n filter = []\n unless @entity_config[:src][:condition].nil?\n filter << @entity_config[:src][:condition]\n end\n\n filter << %{ \"#{updated_at_field}\" = '#{last_item[updated_at_field]}' }\n\n # Return only private keys values\n query([pk_field], filter).map { |item| item[pk_field] }\n\n end",
"def last_key\n @chunk_reader.last_key\n end",
"def higher_entry(key)\n find(key, Relation::HIGHER)\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Removes all dataset edits matching the `edit_key`. If the key is the only dataset belonging to the commit, the commit will also be removed.
|
def destroy_edits(dataset, edit_key)
commits = find_commits(dataset, edit_key)
return if commits.none?
commits.each do |commit|
if commit.dataset_edits.one?
commit.destroy
else
commit.dataset_edits.find_by_key(edit_key).destroy
end
end
end
|
[
"def destroy_edits(dataset, edit_key)\n commits = find_commits(dataset, edit_key)\n\n return if commits.none?\n\n commits.each do |commit|\n if commit.dataset_edits.one?\n commit.destroy\n else\n commit.dataset_edits.find_by(key: edit_key).destroy\n end\n end\n end",
"def delete_order_edit_by_key(project_key, key, version, opts = {})\n data, _status_code, _headers = delete_order_edit_by_key_with_http_info(project_key, key, version, opts)\n data\n end",
"def delete(key, opts = {})\n options = setup_commit_options({:message => \"deleted #{key}\"}.merge(opts))\n self[key].tap do\n save(options) {|index| index.delete(key_for(key)) }\n end\n end",
"def remove_edit_lines\n if lines = App.edit_lines\n unless lines.empty?\n puts \"Removing edit_lines\"\n lines.map &:remove\n end\n end\n end",
"def destroy_invalid_history(dataset)\n return if dataset.valid?\n\n dataset.commits.each do |commit|\n next if commit.valid?\n\n if commit.dataset_edits.one?\n commit.destroy\n else\n commit.dataset_edits.each do |edit|\n edit.destroy unless edit.valid?\n end\n end\n end\n end",
"def delete_key(key)\n self.reject! { |x,y| x == key }\n end",
"def delete(key)\n responsible_clients(key).each do |v|\n with_retries { v.logical.delete(wrap_key(key)) }\n end\n end",
"def find_commits(dataset, edit_key)\n dataset.commits\n .joins(:dataset_edits)\n .where(dataset_edits: { key: edit_key })\n .order(updated_at: :desc)\n end",
"def find_edit(dataset, edit_key)\n commits = find_commits(dataset, edit_key)\n\n return nil unless commits.any?\n\n DatasetEdit\n .where(commit_id: commits.pluck(:id), key: edit_key)\n .order(updated_at: :desc)\n .first\n end",
"def delete\n memo_key = key[\"_indices\"]\n commands = [[\"DEL\", key], [\"DEL\", memo_key], [\"DEL\", key[\"_counters\"]]]\n index_list = redis.call(\"SMEMBERS\", memo_key)\n index_list.each do |index_key|\n commands << [\"SREM\", index_key, id]\n end\n model.tracked.each do |tracked_key|\n commands << [\"DEL\", key[tracked_key]]\n end\n\n model.synchronize do\n commands.each do |command|\n redis.queue(*command)\n end\n redis.commit\n end\n\n return self\n end",
"def find_commits(dataset, edit_key)\n dataset.commits\n .joins(:dataset_edits)\n .where(dataset_edits: { key: edit_key })\n .order(updated_at: :desc)\n end",
"def unset_cmd\n sprintf 'sed -i /%s/d %s', key_set_string, @file\n end",
"def shell_registry_deletekey(key, view)\n key = normalize_key(key)\n # REG DELETE KeyName [/v ValueName | /ve | /va] [/f]\n shell_registry_cmd_result(\"delete \\\"#{key}\\\" /f\", view)\n end",
"def del(key, options = {})\n parameters = validate_key_and_options(key, options)\n return nil if parameters.nil?\n\n key = parameters[0][0]\n layer_name, section = parameters[1]\n\n section = Lorj.defaults.get_meta_section(key) if section.nil?\n section = :default if section.nil?\n section, key = _detect_section(key, section)\n\n return nil if readonly?(key, section)\n\n options = { :keys => [key], :section => section }\n\n options[:index] = index_to_update(layer_name, key, section)\n\n p_del(options)\n end",
"def delete_key(key)\n @bnf_records.delete(key)\n end",
"def abort_current_edit\n ensure_active_edit!\n\n result = api_client.execute(\n api_method: android_publisher.edits.delete,\n parameters: {\n 'editId' => current_edit.data.id,\n 'packageName' => current_package_name\n },\n authorization: auth_client\n )\n\n raise result.error_message.red if result.error?\n\n self.current_edit = nil\n self.current_package_name = nil\n end",
"def delete(key)\n (@kvs_instance.del(safe_key(key)) == 1)\n end",
"def drop_data_matrix(key)\n ids = data_matrix(key).flatten.compact.collect { |da| da.id }\n DataAssociation.where(id: ids).destroy_all\n self\n end",
"def delete_key(key)\n key_list_set = self.key_list_set.reload\n key_list_set.remove(key)\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Address Allows setting use_another_address_for_shipping attribute, ensuring it is stored as a Boolean value and not a number string
|
def use_another_address_for_shipping=(val)
value = val.is_a?(String) ? (val.to_i > 0) : val
@use_another_address_for_shipping = value
end
|
[
"def use_billing_address=(switch)\n self.use_shipping_address = !(switch.to_i == 1)\n end",
"def use_billing_address?\n !use_shipping_address\n end",
"def shipping_same_as_billing=(value)\n return :skipped unless !! value and billing_address\n fields = billing_address.attributes.to_options.slice Chequeout::Address::LOCATION_FIELDS\n self.build_shipping_address unless shipping_address\n self.shipping_address.attributes = fields\n end",
"def apply_use_same_address\n self.contact_address = self.billing_address if self.use_billing_as_contact_address?\n true\n end",
"def can_calculate_shipping?\n !self.shipping_address.blank? && self.shipping_address.valid?\n end",
"def clone_shipping_address\n if Spree::AddressBook::Config[:show_bill_address_on_cc_form] == false\n if self.ship_address\n self.bill_address = self.ship_address\n end\n else\n if self.bill_address_id == nil\n self.bill_address = self.ship_address\n end\n end\n true\n end",
"def set_checkbox_same_address\n return false if @profile.nil?\n @use_same_address = (@profile.shipping_address.attributes.except('id','created_at','updated_at') == @profile.billing_address.attributes.except('id','created_at','updated_at'))\n end",
"def validate_ship_to_address_is_acceptable\n if (!ship_to_billing? && ship_to_address && ship_to_address.country != 'US') || (ship_to_billing? && payment && payment.billing_address && payment.billing_address.country != 'US')\n self.errors.add_to_base('We currently only ship orders to the United States. Please select an alternative Shipping Address')\n end\n end",
"def ship_to_address\n (shipping_address || user.shipping_address || user.billing_address).try(:full_address)\n end",
"def delivery_address_required?\n !(shipping_class && !shipping_class.requires_delivery_address?)\n end",
"def ship_to_address(options)\n for setting in [:first_name, :last_name, :company, :city, :state, :zip, :country] do\n if options[setting] then\n add_field 'x_ship_to_' + setting.to_s, options[setting]\n end\n end\n raise 'must use :address1 and/or :address2' if options[:address]\n add_field 'x_ship_to_address', (options[:address1].to_s + ' ' + options[:address2].to_s).strip\n end",
"def ship_to_address(options)\n for setting in [:first_name, :last_name, :company, :city, :state, :zip, :country] do\n if options[setting] then\n add_field 'x_ship_to_' + setting.to_s, options[setting]\n end\n end\n raise 'must use :address1 and/or :address2' if options[:address]\n add_field 'x_ship_to_address', (options[:address1].to_s + ' ' + options[:address2].to_s).strip\n end",
"def address_validation\n \"Y\"\n end",
"def shipping_address\n source = shipping_address_customization || order\n source.shipping_address\n end",
"def before_address\n @order.bill_address ||= Spree::Address.default(try_spree_current_user, \"bill\")\n if @order.checkout_steps.include? \"delivery\"\n if @order.event\n ship_address = @order.event.shipping_address\n @order.ship_address = ship_address\n else\n @order.ship_address ||= Spree::Address.default(try_spree_current_user, \"ship\")\n end\n end\n end",
"def check_address\n return unless migrated_for_validation?\n self.validated = deliverable_address?\n errors[:base] << Spree.t(:invalid_address) unless validated?\n end",
"def update_magento_address?\n !self.magento_id.nil? && self.is_addressable_user?\n end",
"def invalid_shipping_address?\n shipping_address.blank? || !shipping_address.valid?\n end",
"def seller_address? \r\n sold_by_business? && user.has_address? rescue false\r\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Set the shipping method id on the cart by creating the corresponding adjustment at the same time to ensure cart price and recap takes shipping costs into account
|
def shipping_method_id=(val)
@shipping_method_id = val
if shipping_method_id
adjustments.reject! { |a| a.type == "shipping-method" }
adjustment = Glysellin::Cart::Adjustment::ShippingMethod.new(self,
shipping_method_id: shipping_method_id
)
adjustments << adjustment
end
@shipping_method_id
end
|
[
"def shipping_method=(method)\n raise ArgumentError.new(\"method cannot be nil\") unless method.present?\n self.shipping_name = method.name\n self.shipping_cost = method.cost\n end",
"def set_shipping_method\n ship_id = params[:ship_type_id]\n # Convert to integers for comparison purposes!\n ship_type = session[:order_shipping_types].find { |type| type.id.to_i == ship_id.to_i }\n ship_price = ship_type.calculated_price\n @order.order_shipping_type_id = ship_id\n @order.shipping_cost = ship_price\n @order.save\n \n if Preference.find_by_name('store_show_confirmation').is_true?\n action_after_shipping = 'confirm_order'\n else\n action_after_shipping = 'finish_order'\n end\n redirect_to :action => action_after_shipping\n end",
"def update_shipping_methods\n @retailer = Retailer.find(params[:retailer_id])\n params[:retailer][:shipping_method_ids] ||= []\n @retailer.update_attributes(params[:retailer])\n flash[:notice] = \"Shipping methods have been updated\"\n redirect_to admin_retailer_shipping_methods_url(@retailer)\n end",
"def active_shipping_method_id=(method_value)\n session[:shipping_method_id] = method_value\n end",
"def add_shipping_method(method, name, price)\n @command.create_shipping_method(method) do |sm|\n sm.name = name \n sm.price = Money.new(price * 100, \"USD\")\n sm.create_allowed_area(Google4R::Checkout::WorldArea)\n end\n end",
"def shipping_method\n @shipping_methods = @order.available_shipping_methods\n @shipping_method = @shipping_methods.find(params[:method_id])\n if @shipping_method.shipping_gateway.present?\n @shipping_gateway = @shipping_method.shipping_gateway_class.new(order: @order)\n @data = @shipping_gateway.prepare_interface_data(params)\n else\n @shipping_gateway = nil\n end\n rescue => e\n flash.now[:error] = e.message\n head :unprocessable_entity\n end",
"def shipping\n adjustments.find { |a| a.type == \"shipping-method\" }\n end",
"def create_shipment!\n shipping_method(true)\n if shipment.present?\n shipment.update_attributes(:shipping_method => shipping_method)\n else\n self.shipments << Shipment.create(:order => self,\n :shipping_method => shipping_method,\n :address => self.ship_address)\n end\n\n end",
"def create_shipment!\n shipping_method(true)\n\n if shipment.present?\n shipment.update_attributes!(:shipping_method => shipping_method)\n else\n self.shipments << Shipment.create!({ :order => self,\n :shipping_method => shipping_method,\n :address => self.ship_address}, :without_protection => true)\n end\n \n end",
"def fill_shipping_method_information\n @browser.radio(:id, @checkout_data['shipping_method']['method']).set\n @browser.div(:id, \"checkout-step-shipping_method\").button(:text, @checkout_data['shipping_method']['text']).click\n end",
"def apply_shipping_cost!(shipment)\n cost = shipment.cost(product_pricing)\n unless cost.nil?\n order_items.create(\n product: shipment.shipping_cost_product,\n amount: 1,\n priority: 1e9,\n price: cost.amount,\n tax_rate: cost.tax_rate,\n price_includes_tax: cost.tax_included,\n label: ''\n )\n end\n order_items.reload\n end",
"def refresh_shipping_methods\n region = Region.find(params['region'])\n @shipping_methods = region.shipping_methods\n @default_method = region.default_shipping_method\n\n @cart = current_cart\n @subtotal = @cart.sub_total\n @shipping_total = @cart.shipping_total(region, @default_method)\n @grand_total = @cart.grand_total(@shipping_total)\n\n # change the active region\n set_active_shipping_region_id(region.id)\n set_active_shipping_method_id(@default_method.id)\n end",
"def shipping_method\n shipping_rate.shipping_method\n end",
"def assign_default_shipping_method\n if shipping_method.nil?\n default_sm = first_class_shipping_method\n if default_sm\n self.shipping_method = default_sm\n else\n self.shipping_method = available_shipping_methods(:front_end).first\n end\n end\n end",
"def set_as_default\n store = Breeze::Commerce::Store.first\n unless store.default_shipping_method\n store.default_shipping_method = self\n store.save\n end\n end",
"def shipping_option_id\n @payment['shipping_option_id']\n end",
"def shipping_method_code\n hash [\"ShippingMethodCode\"]\n end",
"def shipping_item\n shipping_scope.first || shipping_scope.build(shipping_options)\n end",
"def shipping_methods\n @retailer = Retailer.find(params[:retailer_id])\n @shipping_methods = Spree::ShippingMethod.all\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Shortcut method to get shipping adjustments from adjustments list
|
def shipping
adjustments.find { |a| a.type == "shipping-method" }
end
|
[
"def price_adjustments\n adjustments = []\n\n line_items.each do |line_item|\n adjustments.concat (line_item.adjustments - line_item.adjustments.gift_packaging)\n end\n\n adjustments\n end",
"def shipping\n total_quantity = items.inject(0) {|q, item| q + item.quantity}\n if total_quantity == 1\n return items.first.product.base_shipping\n else\n return items.inject(0) {|total, item|\n total + (item.product.add_on_shipping * item.quantity)\n }\n end\n end",
"def shipping\n @shipping ||= items.inject(0.0) {|shipping, item| shipping += (item.final_shipping * item.quantity)}\n end",
"def separate_shipment_costs\n return if @order.canceled?\n extracted_total = 0.to_d\n @order.shipments.each do |shipment|\n # Spree 2.1.x: shipment costs are expressed as order adjustments linked through source to the shipment\n # Spree 2.2.x: shipments have a cost which is authoritative, and one or more adjustments (shiptax, etc)\n cost = if shipment.respond_to?(:adjustment)\n shipment.adjustment.try(:amount) || 0.to_d\n else\n shipment.cost + shipment.adjustment_total\n end\n\n if cost > 0\n extracted_total += cost\n shipment.adjustment.open if shipment.respond_to? :adjustment\n shipment.adjustments.delete_all if shipment.respond_to? :adjustments\n shipment.shipping_rates.delete_all\n shipment.cost = 0\n shipment.add_shipping_method(rop_tbd_method, true)\n shipment.save!\n end\n end\n\n if extracted_total > 0\n # TODO: is Standard Shipping the best name for this? Should i18n happen?\n @order.adjustments.create(amount: extracted_total, label: \"Standard Shipping\", mandatory: false)\n @order.save!\n end\n end",
"def paid_expense_items\n paid_details.map{|pd| pd.expense_item }\n end",
"def shipping_method\n shipping_rate.shipping_method\n end",
"def paid_line_items\n paid_details.map { |pd| pd.line_item }\n end",
"def shipping\n @data[:shipping]\n end",
"def shipping_rates(items = nil)\n items ||= OrderItem.order_items_in_cart(self.id)\n rates = items.inject([]) do |rates, item|\n rates << item.shipping_rate if item.shipping_rate.individual? || !rates.include?(item.shipping_rate)\n rates\n end\n end",
"def shipping_methods\n response = API::SOAP::Checkout::Shipping.list(self)\n\n if response.success?\n return [] if response.body[:result][:item].nil?\n response.body[:result][:item] = [response.body[:result][:item]] unless response.body[:result][:item].is_a? Array\n\n return response.body[:result][:item]\n else\n handle_magento_response(response)\n return []\n end\n end",
"def get_rates_with_shipment_details(shipment_details, config = {})\n @rates.get_rates_with_shipment_details(shipment_details, config)\n end",
"def get_adjustment_line\n prd = Product.get_adjustment_product\n raise \"Missing adjustment product: please add using Maintain/Products function\" unless prd\n @lines.find {|li| li.li_prd_id == prd.prd_id }\n end",
"def total\n all_amount = @items.map(&:price).sum\n all_adjustments = @adjustments.sum { |a| a[:amount] }\n\n all_amount + all_adjustments\n end",
"def calculate_adjustment(checkout)\n @promotion_action.calculate_adjustment(checkout)\n end",
"def shipping_points\r\n AxInventory.sum(\r\n \"shipping_points\",\r\n :joins => \"RIGHT JOIN erp_sales_lines ON erp_sales_lines.item_id = ax_inventories.item_id\",\r\n :conditions => [\"erp_sales_lines.erp_sales_order_id = ?\", self.id]\r\n )\r\n end",
"def calculate_adjustment\n if adjustment_source && adjustment_source.respond_to?(:calculator)\n calc = adjustment_source.calculator\n result = calc && calc.compute(self)\n if promoted_products = adjustment_source.promoted_products\n ceiling = order.\n line_items(:join => :product).\n select{|li| promoted_products.include?(li.product)}.\n map(&:total).\n sum\n else\n ceiling = order.item_total\n end\n result = ceiling if result.to_i.abs > ceiling.abs\n result && -result.abs\n end\n end",
"def shipping_service\n @shipping_service ||= service_items.select {|i| i.service.key == 'shipping'}.first\n end",
"def original_shipping_total\n shipping_service.pre_discount_total\n end",
"def primary_shipping\n shipping.first.try(:[], :cost)\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Validates customer informations are correctly filled
|
def validate_customer_informations
validate_nested_resource(:customer)
validate_nested_resource(:billing_address)
if use_another_address_for_shipping
validate_nested_resource(:shipping_address)
end
end
|
[
"def validate_customer\n if !customer_id.nil? && Customer.find_by_id(self.customer_id).nil?\n errors.add(:customer_id, \"Invalid\")\n end\n end",
"def validate_customer_data(payload)\n headers = admin_headers\n\n put_wrapper('/V1/customers/validate', payload.to_json, headers)\n end",
"def validate_customer\n\t\t@customer = current_user\t\t\tif current_user.is_a? Customer\n\tend",
"def validate_billing_info\n errors = super.merge(validate_required(posted, ['password']))\n errors['password'] ||= error_for_passwords(posted['password'], posted['password_confirm'])\n errors.delete_if { |k,v| v.nil? }\n end",
"def validate_customer_json(customer_json, line_no)\n \t# check latitude\n if valid_latitude?(customer_json)\n customer_json[\"latitude\"] = customer_json[\"latitude\"].to_f\n else\n @errors << error_message(\"latitude\", line_no)\n end\n\n # check longitude\n if valid_longitude?(customer_json)\n customer_json[\"longitude\"] = customer_json[\"longitude\"].to_f\n else\n @errors << error_message(\"longitude\", line_no)\n end\n\n # Check user id\n if valid_user_id?(customer_json)\n customer_json[\"user_id\"] = customer_json[\"user_id\"].to_i\n else\n @errors << error_message(\"user_id\", line_no)\n end\n\n customer_json\n end",
"def validate_billing_info\n errors = {}\n\n if (posted['stripeToken'].nil? || posted['stripeToken'].empty?)\n errors['stripeToken'] = \"Your card wasn't accepted.\"\n end\n\n errors\n end",
"def new_customer\n customers = Customer.all\n customers.each do |c|\n if c.email == self.email\n errors.add(:email, 'Do you have an account with us already? <a href=\"#\">Sign in.</a>'.html_safe)\n return \n end\n end\n end",
"def test_invoice_with_new_customer_with_error\r\n @basic_user.user.profile.company_country = nil\r\n @basic_user.user.save(false)\r\n \r\n @basic_user.wait()\r\n @basic_user.goto_new_invoice_site_url\r\n @basic_user.wait()\r\n\r\n #Enter an existing Customer\r\n @customer = customers(:customer_with_contacts)\r\n \r\n #Enter an new Customer\r\n @basic_user.click_new_customer_button\r\n \r\n #enter the country first\r\n \r\n @basic_user.enter_customer_data(\r\n :country => \"Canada\")\r\n \r\n @basic_user.enter_customer_data(\r\n :name => @customer.name,\r\n :address1 => \"New Address1\", \r\n :address2 => \"New Address2\", \r\n :city => \"New City\", \r\n :province_state => \"Saskatchewan\",\r\n :postalcode_zip => \"S7S 7S7\",\r\n :website => \"New Website\", \r\n :phone => \"New Phone\", \r\n :fax => \"New Fax\" \r\n )\r\n\r\n # check that no customer gets added\r\n assert_no_difference( 'Customer.find(:all).size') do \r\n @basic_user.click_customer_add_button(1) \r\n end\r\n \r\n @basic_user.wait()\r\n \r\n # error message displayed \r\n assert_not_equal 0, @basic_user.div(:id, \"errorExplanation\").text.length \r\n \r\n end",
"def validate_account_info\n errors = validate_required(posted, ['email'])\n email = posted['email']\n slug = posted.fetch('slug', slugify(posted['name']))\n\n errors['email'] ||= error_for_email(dealer_dao, email) if email_changed?\n errors['slug'] ||= error_for_slug(dealer_dao, slug) if slug != @dealership.slug && !slug.empty?\n\n errors.delete_if { |k,v| v.nil? }\n end",
"def ensure_contact_details\n if email.blank? and phone.blank?\n message = '- please ensure you have either an email or phone contact details'\n errors.add :email, message\n errors.add :phone, message\n end\n end",
"def validate_that_at_least_one_set_of_contact_info_is_present\n return if veteran_contact_info.present? || claimant_contact_info.present?\n\n errors.add :form_data, I18n.t('appeals_api.errors.contact_info_presence')\n end",
"def no_repeat_customer\n if customer_first_name.present? && customer_last_name.present? && customer_primary_phone.present?\n Customer.all.each do |customer|\n if self.customer_first_name.upcase == customer.customer_first_name.upcase &&\n self.customer_last_name.upcase == customer.customer_last_name.upcase &&\n self.customer_primary_phone == customer.customer_primary_phone\n\n errors.add(:customer, \"already exists\")\n\n end\n end\n end\n end",
"def generate_customer_example\n \n cust = {\n :custname => \"\",\n :cust_ref => \"\",\n #:address => \"\",\n :tax_id => \"\", # related\n :curr_code => \"\", # related\n :credit_status => \"\", # related\n :payment_terms => \"\", # related\n :discount => 0,\n :pymt_discount => 0,\n :credit_limit => 10000,\n :sales_type => \"\", # related\n #:notes => \"\",\n :cfdi_street => \"\",\n :cfdi_street_number => \"\",\n #:cfdi_suite_number => \"\",\n :cfdi_district => \"\",\n :cfdi_postal_code => \"\",\n :cfdi_city => \"\",\n :cfdi_state => \"\",\n :cfdi_country => \"\",\n :client_no => \"\",\n :area_id => \"\", # related\n :salesman_id => \"\", # related\n :tax_group_id => \"\", #related\n :location_id => \"\", #related\n :ship_via_id => \"\" # related\n }\n \n end",
"def valid_customer\n unless (@customer && @customer.activated? &&\n @customer.authenticated?(:reset, params[:id]))\n redirect_to root_url\n end\n end",
"def test_edit_customer_text_fields\r\n\r\n # go directly to the customers/55/edit url\r\n # the other choice is to select a customer from the list but this is brittle\r\n goto_edit_customer_url \r\n # Click Edit for the 4th customer in the list\r\n #edit_customer_in_list(4)\r\n\r\n @user.enter_customer_data(\r\n :name => \"Modified Customer Name\", \r\n :address1 => \"Modified Address1\", \r\n :address2 => \"Modified Address2\", \r\n :city => \"Modified City\", \r\n :postalcode_zip => \"C4C 4C4\",\r\n :website => \"Modified Website\", \r\n :phone => \"Modified Phone\", \r\n :fax => \"Modified Fax\" )\r\n \r\n @user.submits \r\n \r\n # verify update was successful\r\n assert_equal \"Customers: edit\", @user.title \r\n\r\n c = Customer.find(customers(:customer_with_all_address_fields).id)\r\n \r\n verify_customer_data_fields(\r\n c,\r\n :name => \"Modified Customer Name\", \r\n :address1 => \"Modified Address1\", \r\n :address2 => \"Modified Address2\", \r\n :city => \"Modified City\", \r\n :postalcode_zip => \"C4C 4C4\",\r\n :website => \"Modified Website\", \r\n :phone => \"Modified Phone\", \r\n :fax => \"Modified Fax\" )\r\n end",
"def require_billing_info_validation\n change_billing_info? || !@user.subscribed?\n end",
"def create_customer\n customer = Customer.new(params)\n if customer.push_errors.empty?\n Customer.create(customer)\n else\n @errors << customer.push_errors\n end\n end",
"def load_customer_info(customer)\n self.customer_id = customer.id\n self.cardholder_email = customer.email\n\n customer_card = get_default_card(customer)\n\n self.cardholder_name = customer_card.name\n self.card_type = customer_card.type\n self.last4 = customer_card.last4\n self.expiration = customer_card.exp_month.to_s +\n '/' + customer_card.exp_year.to_s\n end",
"def validate\n errors.add(:post_office, \"- must be filled for postalcode #{self.postal_code}\") if self.post_office.blank? && !self.postal_code.blank?\n errors.add(:postal_code, \"- must be filled for #{self.post_office}\") if self.postal_code.blank? && !self.post_office.blank? \n errors.add_to_base(\"- Person must have at least one phonenumber\") if (self.phone_home.blank? && self.phone_cell.blank? && self.phone_work.blank?) \n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Validates the selected country is eligible for the current cart contents to be shipped to
|
def validate_shippable
if !shipping || !shipping.valid
code = use_another_address_for_shipping ?
shipping_address.country : billing_address.country
country = Glysellin::Helpers::Countries::COUNTRIES_LIST[code]
errors.add(
:shipping_method_id,
I18n.t(
"glysellin.errors.cart.shipping_method_unavailable_for_country",
method: shipping_method.name,
country: country
)
)
end
end
|
[
"def validate_ship_to_address_is_acceptable\n if (!ship_to_billing? && ship_to_address && ship_to_address.country != 'US') || (ship_to_billing? && payment && payment.billing_address && payment.billing_address.country != 'US')\n self.errors.add_to_base('We currently only ship orders to the United States. Please select an alternative Shipping Address')\n end\n end",
"def validate_shippable\n code = shippable.shipping_address.country\n country = Glysellin::Helpers::Countries::COUNTRIES_LIST[code]\n\n errors.add(\n :shipping_method_id,\n I18n.t(\n 'glysellin.errors.cart.shipping_method_unavailable_for_country',\n method: shipment.shipping_method.name,\n country: country\n )\n )\n end",
"def validate_shipping_method\n if shipping_method && !shipping_method.available?(city_id: city_id)\n errors.add(:shipping_method, :invalid)\n end\n end",
"def require_country?\n true\n end",
"def supports_shipment_between_countries?(sender_country_code: nil, destination_country_code: nil)\n return false\n end",
"def rescue_from_no_retailer_can_ship_full_order_error\n ##flash[:error] = \"Catch all for all other scenarios - we are not able to ship all items to you...\"\n # First check if we have items that we cannot ship at all to the state, but also have items that we can ship\n result = Spree::ReservebarCore::OrderSplitter.find_shippable_categories(current_order)\n if result[:unshippable].count > 0 && result[:shippable].count > 0\n shippable_names = Spree::ShippingCategory.find(result[:shippable]).map(&:name).join(', ')\n unshippable_names = Spree::ShippingCategory.find(result[:unshippable]).map(&:name).join(', ')\n flash[:notice] = \"Thank you for attempting to purchase #{unshippable_names} and #{shippable_names} with ReserveBar. We appreciate your business; however, we currently cannot accept orders for delivery of #{unshippable_names} to your intended state due to that state's regulations. Fortunately, we are still able to accept the #{shippable_names} portion of your order for that state. Please remove #{unshippable_names} from your shopping cart and proceed through check out as normal.<br /> \n We realize this is not an ideal situation, but we trust our extensive selection of #{shippable_names} will provide your gift recipient an equally meaningful experience. We apologize for the inconvenience and thank you again for gifting with ReserveBar.\".html_safe\n elsif result[:unshippable].count > 0 && result[:shippable].count == 0\n # We can;t ship any of the items, tell the user what other items we can ship \n if Spree::Config[:use_county_based_routing]\n shippable_names = Spree::ReservebarCore::RetailerSelectorProfit.find_shippable_category_names(current_order.ship_address.state)\n else\n shippable_names = Spree::ReservebarCore::RetailerSelector.find_shippable_category_names(current_order.ship_address.state)\n end\n unshippable_names = Spree::ShippingCategory.find(result[:unshippable]).map(&:name).join(', ')\n flash[:notice] = \"Thank you for attempting to purchase #{unshippable_names}, unfortunately, we currently cannot accept orders for delivery of #{unshippable_names} to your intended state due to that state's regulations. However, we are able to accept orders for #{shippable_names} to be delivered to that state. Please remove #{unshippable_names} from your shopping cart and browse our extensive selection of #{shippable_names} for your gift purchase. <br />\n We realize this is not your first choice, but we trust our selection of #{shippable_names} will prove to be an attractive alternative.\".html_safe\n else\n # we can ship all items to the state, but not by the same retailer, so find a shippable subset\n shipping_categories = current_order.shipping_categories\n result = Spree::ReservebarCore::OrderSplitter.full_search(shipping_categories, current_order.ship_address.state)\n if result \n # We have a subset shippable by a single retailer\n shippable_names = Spree::ShippingCategory.find(result[:shippable]).map(&:name).join(', ')\n unshippable_names = Spree::ShippingCategory.find(result[:unshippable]).map(&:name).join(', ')\n flash[:notice] = \"Thank you for attempting to purchase #{shippable_names} and #{unshippable_names} with ReserveBar. We appreciate your business; however, we currently cannot combine those alcohol categories into one order for your intended state due to that state's regulations. Please remove #{unshippable_names} items from your shopping cart and proceed through check out with #{shippable_names} only. \n Then, we invite you to create a separate order with #{unshippable_names} items and proceed through check out with #{unshippable_names} only.\".html_safe\n else\n # we do not have a subset shippable by a single retailer (should really never happen at this point, unless we need to recurse deeper)\n flash[:notice] = \"Hmm, looks we cannot ship any of the items to your state.\"\n end\n end\n redirect_to cart_path\n end",
"def billing_postcode_required?\n billing_address_required? && !%w( IE ).include?(billing_country)\n end",
"def valid_shipping_cost?(cost)\n flatrate_shipping_option_costs.member? cost\n end",
"def validate_country\n return unless country.present?\n return if country.valid?\n\n errors.add(:country, \"Is invalid: #{country.errors.full_messages}\")\n end",
"def require_billing_and_delivery_addresses\n redirect_to checkout_path unless billing_address && delivery_address_valid?\n end",
"def scotland_country_code_valid?(country)\n country == Rails.configuration.x.allowed_property_country_code\n end",
"def validate_cart(cart)\n if @inventory_managed\n cart.cart_items.each do |cart_item|\n unless validate_cart_item(cart_item)\n cart.errors.clear\n if cart.product_total <= 0\n cart.errors.add_to_base(\"Our apologies, all items in your cart were no longer available.\")\n else\n cart.errors.add_to_base(\"The quantity of certain items in your cart exceeded availability\" +\n \" and were adjusted\")\n end\n end\n end\n end\n end",
"def validate(country)\n raise UnknownCountry, \"Country not found.\" if country.empty?\n end",
"def invalid_for_country?(country)\n !valid_for_country?(country)\n end",
"def has_valid_ip_selected_country?\n if session[:ip_session].nil? or session[:ip_session].phase_id.nil?\n false\n else\n ip_session = session[:ip_session]\n if ip_session.selected_country_ids.length>0\n true\n else\n false\n end\n end\n end",
"def check_for_eligible_of_free_shipping(order)\n order.any? { |item| item >= 100 }\nend",
"def exists_in_country_list\n country_code.upcase!\n if !Map.is_country_code?(country_code)\n errors.add(:country_code, \"is not in the list of allowed countries\")\n end\n end",
"def state_must_belong_to_country\n errors.add(:state, \"must belong to the selected country\") if Carmen::state_name(self.state, self.country).nil?\n end",
"def affects?(country)\n (country.regions & restricted_regions).any?\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Retrieve order from database if it exists, or use cached version
|
def order
@order ||= Glysellin::Order.where(id: order_id).first
end
|
[
"def find_reusable_order\n return # FIXME: currently it may return address which is being used by other active order\n expired_orders = find_expired_orders_row\n if expired_orders.size >= Config.reuse_address_orders_threshold &&\n fetch_transactions_for(expired_orders.last.address).empty?\n return expired_orders.last\n end\n nil\n end",
"def retrieve_order(order_id)\n\n end",
"def update_order_items_cache\n unless self.order.nil?\n order = Order.find(self.order)\n order.save\n end\n end",
"def order\n @order = Order.find(order_id)\n end",
"def current_order\n @current_order ||= load_order_from_session unless @current_order == false\n end",
"def order(order_id)\n Order.find(order_id)\n end",
"def find_order \n unless session[:order_id].blank?\n @order = Order.find_or_create_by_id(session[:order_id])\n else \n @order = Order.create\n end\n session[:order_id] = @order.id\n @order\n end",
"def placed_order\n @placed_order ||= Order.find_by_id(session[:placed_order]) if session[:placed_order]\n end",
"def load_order\n if !params[:format].nil? && params[:format] == \"json\"\n if session[:order_id]==nil\n current_user=Spree::User.find_by_authentication_token(params[:authentication_token])\n if current_user.present?\n current_order = Spree::Order.find_by_number(params[:order_id])\n if current_order.present?\n payment=Spree::Payment.find_by_order_id(current_order.id)\n if payment.present?\n @order=current_order\n else\n error = error_response_method($e15)\n render :json => error\n end\n else\n error = error_response_method($e24)\n render :json => error\n end\n else\n error = error_response_method($e13)\n render :json => error\n end\n end\n else\n @order ||= Spree::Order.find_by_number! params[:order_id]\n #end\n end\n end",
"def load_order!\n @order = current_user.orders.find params[:id]\n # @order = @cookbook.get_active_reorder(params[:id]) if @cookbook.is_owner? current_user\n end",
"def pending_order\n order = orders.find_or_create_by(completed: false)\n end",
"def order\n @order ||= if session['order']\n OrderBasket.load(session['order']).tap(&:apply_promotions!)\n else\n OrderBasket.new\n end\n end",
"def getOrder(id)\n @orders.find {|ord| ord.orderId.to_i == id.to_i}\n end",
"def fetch(orders_path:, customer_id: nil, order_status: 'processing')\n self.edi_orders_path = orders_path\n self.customer_id = customer_id\n self.order_status = order_status\n\n # Fetch only order that are 'processing'\n response = woo_get('orders', status: order_status)\n return unless response\n\n response.each do |order|\n # update orders status to 'on-hold'\n status = woo_put(\"orders/#{order['id']}\", status: 'on-hold')\n logger.info \"Order #{order['id']} status set to 'on-hold'\"\n next unless status\n\n # Check if order already is in Pupesoft\n @pupe_draft = SalesOrder::Draft.find_by(laatija: 'WooCommerce', asiakkaan_tilausnumero: order['id'])\n @pupe_order = SalesOrder::Order.find_by(laatija: 'WooCommerce', asiakkaan_tilausnumero: order['id'])\n\n if @pupe_draft.nil? && @pupe_order.nil?\n logger.info \"Order #{order['id']} fetched and put in Pupesoft processing queue\"\n write_to_file(order)\n else\n logger.info \"Order #{order['id']} NOT fetched beacause it already exists in Pupesoft\"\n end\n end\n end",
"def get_from_cache(id)\n\t\t\n\t\t# Check cache\n\t\tobj = nil\n\t\t@cache.each{ |o| obj = o if !o.nil? && o.id == id }\n\n\t\t# Manage last used first position cache here\n\t\tif obj.nil?\n\t\t\t# get from storage if not in the cache\n\t\t\tobj = get_from_storage(id)\n\t\t\tDebug.add(\"[STORAGE] Got #{id} of type #{obj.class.name}\")\n add_to_cache(obj)\n\t\telse\n\t\t\tDebug.add(\"[CACHE] Got #{id} of type #{obj.class.name}\")\n\t\t\tshift_in_cache(obj)\n\t\tend\n\n\t\treturn obj\n\tend",
"def load!\n transfer = find_or_create_transfer\n transfer.load!(order.items_waiting)\n end",
"def fetch\n @raw_result = opts_for_cache_proxy[:raw] == true\n\n result = if refresh_cache?\n execute_find(@raw_result)\n elsif cached.is_a?(AridCache::CacheProxy::Result)\n if cached.has_ids? && @raw_result\n self.cached # return it unmodified\n elsif cached.has_ids?\n fetch_from_cache # return a list of active records after applying options\n else # true if we have only calculated the count thus far\n execute_find(@raw_result)\n end\n else\n cached # some base type, return it unmodified\n end\n end",
"def cache_get\n result = EOAT.cache.get(@host, @uri)\n result.from_cache = true if result\n result\n end",
"def get(key)\n # Return nothing if not in the cache or it has expired.\n return if key.nil?\n\n entry = @cache[key]\n return unless entry\n return if entry.expired?\n\n # Otherwise return the cached object.\n # We don't delete the cached entry because we might need to force its use if its expired and offline\n entry.object\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Assign order and order_id, if nil is explicitly passed, ensure we set order id to nil too
|
def order=(order)
self.order_id = order && order.id
@order = order
end
|
[
"def set_order\n @order = Order.find(session[:order_id])\n puts(session[:order_id])\n #if an error occurs (if the session doesn't exist) - creating a new one.\n rescue ActiveRecord::RecordNotFound\n @order = Order.create\n session[:order_id] = @order.id\n end",
"def set_order\n # @order = current_user.order ?\n # @order = current_user.build ?\n end",
"def prepare_order\n @order = Order.new\n @order.is_new_order = false\n @order.is_new_stage = false\n @order.child_birthday = Date.today\n @order.performance_date = Date.today\n @order.performance_time = Time.now\n end",
"def prepare_new_order\n initialize_account if @account.nil?\n @order = Order.new(order_params)\n @order.account_id = @account.id\n @order.price_cents = @price_cents\n end",
"def create_order_id\n self.order_id = \"GO%.5d\" % self.id\n self.save\n end",
"def set_order_session(order)\n session[:order_id] = order.id\n #puts \"session_order_id #{session[:order_id]}\"\n end",
"def find_order \n unless session[:order_id].blank?\n @order = Order.find_or_create_by_id(session[:order_id])\n else \n @order = Order.create\n end\n session[:order_id] = @order.id\n @order\n end",
"def order\n @order = Order.find(order_id)\n end",
"def persist_order_address(order)\n if order.ship_address\n address = save_in_address_book(\n order.ship_address.attributes,\n Spree::Config.automatic_default_address\n )\n self.ship_address_id = address.id if address && address.persisted?\n end\n\n if order.bill_address\n address = save_in_address_book(\n order.bill_address.attributes,\n order.ship_address.nil? && Spree::Config.automatic_default_address\n )\n self.bill_address_id = address.id if address && address.persisted?\n end\n\n save! # In case the ship_address_id or bill_address_id was set\n end",
"def clone_from_order(order)\n if order.invoiced?\n raise ArgumentError, \"The supplied order is already invoiced.\"\n else\n self.user_id = order.user_id\n self.order = order\n self.document_number = order.document_number\n self.payment_method_id = order.payment_method_id\n self.billing_address_type = order.billing_address_type\n self.shipping_address_type = order.shipping_address_type\n self.billing_address = order.billing_address\n self.shipping_address = order.shipping_address\n self.shipping_cost = order.shipping_cost\n self.shipping_taxes = order.shipping_taxes\n self.rebate = order.rebate\n self.status_constant = Invoice::UNPAID\n self.invoice_lines_from_order(order)\n order.reload\n end \n end",
"def set_order(opts)\n opts = check_params(opts,[:orders])\n super(opts)\n end",
"def order=(value)\n @order = value\n end",
"def add_identification_new_order(request, options)\n requires!(options, :order_id)\n request[:orderId] = options[:order_id]\n end",
"def prepare_new_order\n @order = Order.new(order_params)\n @order.user_id = current_user.id\n @product = Product.find(params[:orders][:product_id])\n @order.product = @product\n @order.price_cents = @product.price_cents\n end",
"def set_OrderID(value)\n set_input(\"OrderID\", value)\n end",
"def update_address_id\n if params[:order]\n\n # [:order][:ship_address_id] was inserted by the address book\n # it will be blank if no address from the book was selected\n # it will have the id if an address from the book was selected\n \n if params[:order][:ship_address_id]\n ship_address_id = params[:order][:ship_address_id]\n params[:order].delete :ship_address_id\n\n if !ship_address_id.blank?\n address = Address.find_by_id( ship_address_id )\n if address\n @order.ship_address = address\n params[:order].delete :ship_address_attributes\n end\n return true\n end\n end\n if params[:order][:bill_address_id]\n\n bill_address_id = params[:order][:bill_address_id]\n params[:order].delete :bill_address_id\n\n if !bill_address_id.blank?\n bill_address = Address.find_by_id( bill_address_id )\n if bill_address\n @order.bill_address = bill_address\n params[:order].delete :bill_address_attributes\n end\n return true\n end\n end\n true\n end\n end",
"def id\n order_id\n end",
"def initialize(order)\n super\n @order = order\n end",
"def order=(value)\n @order = value\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Cleans cart stored order if it exists
|
def clean_order!
if order
# Destroy current cart order if not paid already, cause
# we're creating a new one
order.destroy if order.state_name == :ready
# unset order
self.order = nil
end
end
|
[
"def check_order\n if order.order_lines.count == 0\n order.destroy\n end\n end",
"def clear_cart\n self.order_status_id = 4\n self[:subtotal] = 0\n self.order_items.destroy_all\n end",
"def clear_cart_and_order(destroy_order = true)\n @cart = find_cart.empty!\n if session[:order_id] then\n @order = Order.find(session[:order_id])\n if destroy_order then\n @order.destroy\n end\n session[:order_id] = nil\n end\n end",
"def empty!\n self.cart_items.each { |ci| ci.destroy }\n end",
"def clear_cart_and_order(destroy_order = true)\n @cart.empty!\n if session[:order_id] then\n @order = Order.find(session[:order_id])\n if destroy_order then\n @order.destroy\n end\n session[:order_id] = nil\n end\n end",
"def destroy_test_order! \n cart = self.cart\n # inventory orders don't have a cart\n if cart\n cart.cart_items.each do |cart_item|\n cart_item.destroy\n end\n cart.destroy\n end\n \n self.destroy\n end",
"def sanitize! \n if session[:order_id]\n session[:order_id].to_s\n order = Order.find(session[:order_id])\n if order.order_status_code_id != 1 && order.order_status_code_id != 3\n clear_cart_and_order(false)\n end\n end\n end",
"def clean_empty_carts\n carts = cart_model.empty.older_than(Stall.config.empty_carts_expires_after.ago)\n\n log \"Cleaning #{ carts.count } empty carts ...\"\n carts.destroy_all\n log \"Done.\"\n end",
"def empty_to_buy\n self.cart_items.delete(self.items_to_buy)\n end",
"def remove!\n if order_item.destroy\n order_maker.handle_coupon!\n order_maker.destroy_empty_order!\n return_with(:success)\n else\n return_with(:error, error: order_maker.order_errors)\n end\n end",
"def clear_cart_after_checkout\n puts \" \"\n puts \"************Bye Bye************\"\n puts \" \"\n Cart.destroy_all\n end",
"def clean_up_db\n items = CartItem.all\n cartproducts = items.map { |item| item.product_id }\n products = Product.all\n products.each do |product|\n unless cartproducts.include?(product.id)\n product.destroy\n end\n end\n end",
"def test_cleanup_successful\n setup_new_order()\n @o.order_line_items << @li\n @o.order_status_code = order_status_codes(:cart)\n @o.notes = \"test test\"\n assert @o.save\n\n # Make sure inventory control is enabled.\n assert Preference.find_by_name('store_use_inventory_control').is_true?\n # Make sure cc number obfuscation is enabled.\n assert Preference.find_by_name('cc_clear_after_order').is_true?\n \n initial_quantity = @li.item.quantity\n notes_before = @o.notes.clone\n \n @o.cleanup_successful\n @li.item.reload\n \n # Quantity should be updated.\n assert_equal @li.item.quantity, (initial_quantity - @li.quantity)\n # Status code should be updated.\n @o.reload\n assert_equal @o.order_status_code, order_status_codes(:ordered_paid_to_ship)\n \n # CC number should be obfuscated.\n number_len = @o.account.cc_number.length\n new_cc_number = @o.account.cc_number[number_len - 4, number_len].rjust(number_len, 'X')\n assert_equal @o.account.cc_number, new_cc_number\n \n # A new note should be added.\n notes_after = @o.notes\n assert_not_equal notes_before, notes_after\n end",
"def clear_current_cart\n @person.cart.empty! if @person\n self.current_cart = @cart = nil\n true\n end",
"def empty_cart\n clear_cart_and_order\n redirect_to_index(\"All items have been removed from your order.\")\n end",
"def destroy\n unless cart.nil? || cart.ordered? || ordered?\n self.update_attribute(:deleted_at, Time.now.utc)\n end\n end",
"def cleanup_successful\n\t\tself.order_status_code_id = 5\n self.new_notes=\"Order completed.\"\n self.product_cost = self.line_items_total\n self.account.clear_personal_information\n self.save\n\tend",
"def cleanup_successful\n self.order_status_code_id = 5\n self.new_notes=\"Order completed.\"\n self.product_cost = self.line_items_total\n self.account.clear_personal_information\n self.save\n end",
"def clear_cart\n update_cart({})\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
a random timeout in range range between the min timeout and twice that
|
def random_timeout
@IS_TIMEOUT_RANDOM ? @MIN_TIMEOUT + rand(@MIN_TIMEOUT) : @MIN_TIMEOUT
end
|
[
"def random_spawn_countdown minimum\n 10.randomize(:ratio, :sign).to_i + 60\n end",
"def random_timeout\n range = @options[:election_timeout] || ELECTION_TIMEOUT\n min, max = range.first, range.last\n min + rand(max - min)\n end",
"def random_duration\n min_duration + rand(max_duration - min_duration + 1)\n end",
"def sltime \n time = rand(0.5..1.0)\n sleep(time)\n end",
"def sleep_time\n 1.0/(rand(10) + 1)\n end",
"def sleep(min=30,max=45)\n raise 'max cannot be less than min' if max < min\n \n seconds_to_sleep = (60*min + rand(60 * (max-min)))\n puts \"(#{Time.now.strftime(\"%H:%M\")}) Sleeping for #{seconds_to_sleep/60} mins\"\n Kernel.sleep seconds_to_sleep\n end",
"def sleep_rand(min, max)\n sleep_time = rand(min..max)\n sleep(1)\n print_time_stamp\n puts \"Sleeping for #{sleep_time - 1} seconds ...\".colorize(:red)\n sleep(sleep_time - 1)\n end",
"def random_range_end(start)\n start + (rand(1..5) * 30).minutes\n end",
"def next_duration\n @durations.empty? ? rand(20) : @durations.shift\n end",
"def random_on_interval(min, max)\n min + @prng.rand(max - min)\n end",
"def wait_randomly\n random_seconds = rand(0.5..2.5)\n sleep random_seconds\nend",
"def _randomize(delay)\n (delay * (1.0 + 0.5 * rand)).round\n end",
"def randomize_baddie_timer\n @baddie_randomizer = rand(60*5) + 60\n end",
"def random(min,max) (max - min) * rand + min end",
"def rule (rate, rand)\n r = rand.rand\n return r < rate #60*60 = 3600 seconds\nend",
"def generate_random_time\n\t\trandom_value = Random.rand(0.0..1.0)\n\t\t(0..(@nbTSInOneDay - 1)).each do |ts|\n\t\t\treturn ts if @demandOverTimeCumulatedAndNormalised[ts] >= random_value\n\t\tend\n\tend",
"def gen_speed\n rand + 15.0\n end",
"def timeout_after(time); end",
"def wait_for_seconds\n\t\tsleep(1 * rand + 1)\n\tend"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
GET /micgroposts/1 GET /micgroposts/1.xml
|
def show
@micgropost = Micgropost.find(params[:id])
respond_to do |format|
format.html # show.html.erb
format.xml { render :xml => @micgropost }
end
end
|
[
"def rss\n @events = Post.find(:all, {:conditions=> \"status=public\"},:order => \"id DESC\")\n render :layout => false\n headers[\"Content-Type\"] = \"application/xml; charset=utf-8\"\n end",
"def show\n @mircopost = Mircopost.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @mircopost }\n end\n end",
"def index\n @posts = Post.find(:all)\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @posts }\n end\n end",
"def rss\n @answers = Answer.find :all, :order => 'answers.created_at DESC', :limit => 20\n\n respond_to do |format|\n format.xml\n end\n end",
"def index\n @posts = Post.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @posts }\n end\n end",
"def list\n @posts = Post.find(:all)\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @posts }\n end\n end",
"def get_xml(params = {:format => :xml, :action => :query})#TODO put these in so that they're default and not lost when we pass stuff in...\n url = _form_url(params)\n \n #requests without user-agents are refused. See:\n #http://www.mooduino.co.uk/2010/04/wikipedia-api-user-agent-string-in-php.html\n http = Net::HTTP.new(WIKI_API_SERVER) #en.wikipedia.org\n resp = http.request_get(WIKI_API_PATH+url, 'User-Agent' => 'WikipediaSpamBot/0.1 (+hincapie.cis.upenn.edu)')\n \n raise \"POST FAILED:\" + resp.inspect unless resp.is_a? Net::HTTPOK or resp.is_a? Net::HTTPFound\n resp.body #get xml\nend",
"def show\n @scrap_xml = ScrapXml.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @scrap_xml }\n end\n end",
"def index\n @mush_crawlers = MushCrawler.find(:all)\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @mush_crawlers }\n end\n end",
"def index\n @rss_targets = RssTarget.find(:all)\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @rss_targets }\n end\n end",
"def show\n @post = Post.find(params[:id])\n\n respond_to do |format|\n format.json { render json: @post }\n format.xml { render xml: @posts }\n end\n end",
"def rss\n @event = Event.find_by_key(params['id'])\n @histories = @event.histories(:order => 'created_at DESC')\n render :layout => false\n response.headers[\"Content-Type\"] = \"application/xml; charset=utf-8\"\n end",
"def xml(options = {})\n host = Picasa.host\n path = Picasa.path(options)\n url = URI(\"#{host}#{path}\")\n\n http = Net::HTTP.new(url.host, url.port)\n http.use_ssl = true\n\n req = add_auth_headers(Net::HTTP::Get.new url.path)\n\n response = http.request(req)\n if response.code =~ /20[01]/\n response.body\n end\n end",
"def index\n @magazines = Magazine.all\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @magazines }\n end\n end",
"def show\n @post80 = Post80.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @post80 }\n end\n end",
"def new\n @micgropost = Micgropost.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @micgropost }\n end\n end",
"def feed\n magazine = Magazine.find_by_apps_id(params[:id]) # get current magazine\n @issues = Issue.includes(:previews, :contents).where(:magazine_id => magazine).order(\"position\") # get current magazine issues\n render 'feed.xml'\n end",
"def new\n @post = Post.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @post }\n end\n make_rss\n end",
"def show\n @post51 = Post51.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @post51 }\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
GET /micgroposts/new GET /micgroposts/new.xml
|
def new
@micgropost = Micgropost.new
respond_to do |format|
format.html # new.html.erb
format.xml { render :xml => @micgropost }
end
end
|
[
"def new\n @post = Post.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @post }\n end\n make_rss\n end",
"def new\n @post201 = Post201.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @post201 }\n end\n end",
"def new\n @rss_post = RssPost.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @rss_post }\n end\n end",
"def new\n @post80 = Post80.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @post80 }\n end\n end",
"def new\n @post200 = Post200.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @post200 }\n end\n end",
"def new\n @post50 = Post50.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @post50 }\n end\n end",
"def new\n @post155 = Post155.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @post155 }\n end\n end",
"def new\n @post55 = Post55.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @post55 }\n end\n end",
"def new\n @post198 = Post198.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @post198 }\n end\n end",
"def new\n @post = Post.new\n \n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @post }\n end\n end",
"def new\n @mircopost = Mircopost.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @mircopost }\n end\n end",
"def new\n @moto_post = MotoPost.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @moto_post }\n end\n end",
"def new\n @rssnew = Rssnews.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @rssnew }\n end\n end",
"def new\n @post101 = Post101.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @post101 }\n end\n end",
"def new\n @post100 = Post100.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @post100 }\n end\n end",
"def new\n @post78 = Post78.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @post78 }\n end\n end",
"def new\n @post42 = Post42.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @post42 }\n end\n end",
"def new\n @post_it = PostIt.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @post_it }\n end\n end",
"def new\n @post57 = Post57.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @post57 }\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
POST /micgroposts POST /micgroposts.xml
|
def create
@micgropost = Micgropost.new(params[:micgropost])
respond_to do |format|
if @micgropost.save
format.html { redirect_to(@micgropost, :notice => 'Micgropost was successfully created.') }
format.xml { render :xml => @micgropost, :status => :created, :location => @micgropost }
else
format.html { render :action => "new" }
format.xml { render :xml => @micgropost.errors, :status => :unprocessable_entity }
end
end
end
|
[
"def create\n @micripost = Micripost.new(params[:micripost])\n\n respond_to do |format|\n if @micripost.save\n format.html { redirect_to @micripost, notice: 'Micripost was successfully created.' }\n format.json { render json: @micripost, status: :created, location: @micripost }\n else\n format.html { render action: \"new\" }\n format.json { render json: @micripost.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @micrpost = Micrpost.new(micrpost_params)\n\n respond_to do |format|\n if @micrpost.save\n format.html { redirect_to @micrpost, notice: 'Micrpost was successfully created.' }\n format.json { render :show, status: :created, location: @micrpost }\n else\n format.html { render :new }\n format.json { render json: @micrpost.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @micrpost = Micrpost.new(params[:micrpost])\n\n respond_to do |format|\n if @micrpost.save\n format.html { redirect_to @micrpost, notice: 'Micrpost was successfully created.' }\n format.json { render json: @micrpost, status: :created, location: @micrpost }\n else\n format.html { render action: \"new\" }\n format.json { render json: @micrpost.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @mircopost = Mircopost.new(params[:mircopost])\n\n respond_to do |format|\n if @mircopost.save\n format.html { redirect_to(@mircopost, :notice => 'Mircopost was successfully created.') }\n format.xml { render :xml => @mircopost, :status => :created, :location => @mircopost }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @mircopost.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\nputs \"===============================> create<===================================================\"\n megam_rest.post_node(to_hash)\n end",
"def POST; end",
"def post_vms(xmlfile)\n xml=File.read(xmlfile)\n\n url = URI.parse(@endpoint+\"/compute\")\n\n req = Net::HTTP::Post.new(url.path)\n req.body=xml\n\n req.basic_auth @occiauth[0], @occiauth[1]\n\n res = CloudClient::http_start(url) do |http|\n http.request(req)\n end\n\n if CloudClient::is_error?(res)\n return res\n else\n return res.body\n end\n end",
"def post_vms(xmlfile)\n xml=File.read(xmlfile)\n \n url = URI.parse(@endpoint+\"/compute\")\n \n req = Net::HTTP::Post.new(url.path)\n req.body=xml\n \n req.basic_auth @occiauth[0], @occiauth[1]\n \n res = CloudClient::http_start(url) do |http|\n http.request(req)\n end\n\n if CloudClient::is_error?(res)\n return res\n else\n return res.body\n end\n end",
"def post *args\n make_request :post, *args\n end",
"def post(data, tags_in = {}) ; post_to nil, data, tags_in end",
"def post_stomp(msg,headers)\n \n response_header = {\"Content-type\" => \"text/xml\"}\n response_header.merge headers\n ht =Net::HTTP.start(self.host,self.port)\n url = self.url # + \"/\" + self.topic\n puts \"posting to: #{self.host}: #{self.port} #{url} message: #{msg.to_xml}\"\n r=ht.post(url,msg.to_xml,response_header)\n \n puts \"result: #{r.to_s}\"\n r\n end",
"def create\n megam_rest.post_node(to_hash)\n end",
"def post\n resource.post(request, response)\n end",
"def create\n @micorpost = Micorpost.new(params[:micorpost])\n\n respond_to do |format|\n if @micorpost.save\n format.html { redirect_to @micorpost, notice: 'Micorpost was successfully created.' }\n format.json { render json: @micorpost, status: :created, location: @micorpost }\n else\n format.html { render action: \"new\" }\n format.json { render json: @micorpost.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @micrrapost = Micrrapost.new(params[:micrrapost])\n\n respond_to do |format|\n if @micrrapost.save\n format.html { redirect_to @micrrapost, notice: 'Micrrapost was successfully created.' }\n format.json { render json: @micrrapost, status: :created, location: @micrrapost }\n else\n format.html { render action: \"new\" }\n format.json { render json: @micrrapost.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create_xmlrpc\n xml = request.body.read\n \n if(xml.empty?)\n error = 400\n return\n end\n \n # Parse xml\n method, arguments = XMLRPC::Marshal.load_call(xml)\n arg = arguments[0]\n response = create_report(arg)\n \n redirect_to retrieve_response_url(iform_xml_feed, :format => 'xml') \n end",
"def moip_post\n @nasp_rail = NaspRail.new(params[:nasp_rail])\n\n format.html { redirect_to @nasp_rail, :notice => 'Nova entrada criada com sucesso.' }\n format.json { render :json => @nasp_rail, :status => :created, :location => @nasp_rail }\n end",
"def post_config(url_prefix, xml)\n post_data(url_prefix, xml, 'application/xml;charset=UTF-8')\n end",
"def new\n @micgropost = Micgropost.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @micgropost }\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
DELETE /micgroposts/1 DELETE /micgroposts/1.xml
|
def destroy
@micgropost = Micgropost.find(params[:id])
@micgropost.destroy
respond_to do |format|
format.html { redirect_to(micgroposts_url) }
format.xml { head :ok }
end
end
|
[
"def destroy\n @mircopost = Mircopost.find(params[:id])\n @mircopost.destroy\n\n respond_to do |format|\n format.html { redirect_to(mircoposts_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @scrap_xml = ScrapXml.find(params[:id])\n @scrap_xml.destroy\n\n respond_to do |format|\n format.html { redirect_to(scrap_xmls_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @post = Post.find(params[:id])\n @post.deleted = 1\n @post.save\n\n respond_to do |format|\n format.html { redirect_to(posts_url) }\n format.xml { head :ok }\n end\n make_rss\n end",
"def destroy\n @moto_post = MotoPost.find(params[:id])\n @moto_post.destroy\n\n respond_to do |format|\n format.html { redirect_to(moto_posts_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @mypost = Mypost.find(params[:id])\n @mypost.destroy\n\n respond_to do |format|\n format.html { redirect_to(myposts_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @post1 = Post1.find(params[:id])\n @post1.destroy\n\n respond_to do |format|\n format.html { redirect_to(post1s_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @post14 = Post14.find(params[:id])\n @post14.destroy\n\n respond_to do |format|\n format.html { redirect_to(post14s_url) }\n format.xml { head :ok }\n end\n end",
"def delete\n api_xml(category(target),:delete) if options.data or options.category\n if options.itemdef\n parse_itemdef\n uid=find_definition_uid_by_name(itemdef.name)\n response=admin_xml(\"/itemDefinitions/#{uid}\")\n verbose \"About to delete: #{REXML::XPath.first(response,'//Name/text()').value} item definition.\\n\"\n admin_xml(\"/itemDefinitions/#{uid}\",\n :delete) if itemdef.name\n end\n end",
"def destroy\n @micripost = Micripost.find(params[:id])\n @micripost.destroy\n\n respond_to do |format|\n format.html { redirect_to micriposts_url }\n format.json { head :no_content }\n end\n end",
"def destroy1\n @smile = Smile.find(params[:id])\n @smile.destroy\n\n respond_to do |format|\n format.html { redirect_to(smiles_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @rss_post = RssPost.find(params[:id])\n @rss_post.destroy\n\n respond_to do |format|\n format.html { redirect_to(rss_posts_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @miscopost.destroy\n respond_to do |format|\n format.html { redirect_to miscoposts_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @mosaic = Mosaic.find(params[:id])\n @mosaic.destroy\n\n respond_to do |format|\n format.html { redirect_to(mosaics_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @post160 = Post160.find(params[:id])\n @post160.destroy\n\n respond_to do |format|\n format.html { redirect_to(post160s_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @medic = Medic.find(params[:id])\n @medic.destroy\n\n respond_to do |format|\n format.html { redirect_to(medics_url) }\n format.xml { head :ok }\n end\n end",
"def del\n @status1 = Status1.find(params[:id])\n @status1.destroy\n\n respond_to do |format|\n format.html { redirect_to(status1s_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @misale = Misale.find(params[:id])\n @misale.destroy\n\n respond_to do |format|\n format.html { redirect_to(misales_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @motivo = Motivo.find(params[:id])\n @motivo.destroy\n\n respond_to do |format|\n format.html { redirect_to(motivos_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @margenop = Margenop.find(params[:id])\n @margenop.destroy\n\n respond_to do |format|\n format.html { redirect_to(margenops_url) }\n format.xml { head :ok }\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
euler020 6/22/12 Find the sum of the digits in the number 100! ============================================================================
|
def euler020
def fact(x)
ans = x.downto(1).reduce(:*)
end
fact(100).to_s.split('').map { |x| x.to_i}.reduce(:+)
end
|
[
"def euler_13\n data = File.new(\"big_ass_number\").to_a\n data_line = []\n sum = 0 \n digits = []\n\n (0..data.length).each do |i|\n sum += data[i].to_i\n end\n digits = sum.to_s\n puts digits.slice(0...10)\n end",
"def euler016\n (2 ** 1000).to_s.split('').map { |x| x.to_i }.reduce(:+)\nend",
"def euler006\n square_of_sum = (1..100).reduce(:+) ** 2\n sum_of_squares = (1..100).inject { |sum, n| sum + n*n }\n\n return square_of_sum - sum_of_squares\nend",
"def large_sum\n digits = <<EOS\n37107287533902102798797998220837590246510135740250\n46376937677490009712648124896970078050417018260538\n74324986199524741059474233309513058123726617309629\n91942213363574161572522430563301811072406154908250\n23067588207539346171171980310421047513778063246676\n89261670696623633820136378418383684178734361726757\n28112879812849979408065481931592621691275889832738\n44274228917432520321923589422876796487670272189318\n47451445736001306439091167216856844588711603153276\n70386486105843025439939619828917593665686757934951\n62176457141856560629502157223196586755079324193331\n64906352462741904929101432445813822663347944758178\n92575867718337217661963751590579239728245598838407\n58203565325359399008402633568948830189458628227828\n80181199384826282014278194139940567587151170094390\n35398664372827112653829987240784473053190104293586\n86515506006295864861532075273371959191420517255829\n71693888707715466499115593487603532921714970056938\n54370070576826684624621495650076471787294438377604\n53282654108756828443191190634694037855217779295145\n36123272525000296071075082563815656710885258350721\n45876576172410976447339110607218265236877223636045\n17423706905851860660448207621209813287860733969412\n81142660418086830619328460811191061556940512689692\n51934325451728388641918047049293215058642563049483\n62467221648435076201727918039944693004732956340691\n15732444386908125794514089057706229429197107928209\n55037687525678773091862540744969844508330393682126\n18336384825330154686196124348767681297534375946515\n80386287592878490201521685554828717201219257766954\n78182833757993103614740356856449095527097864797581\n16726320100436897842553539920931837441497806860984\n48403098129077791799088218795327364475675590848030\n87086987551392711854517078544161852424320693150332\n59959406895756536782107074926966537676326235447210\n69793950679652694742597709739166693763042633987085\n41052684708299085211399427365734116182760315001271\n65378607361501080857009149939512557028198746004375\n35829035317434717326932123578154982629742552737307\n94953759765105305946966067683156574377167401875275\n88902802571733229619176668713819931811048770190271\n25267680276078003013678680992525463401061632866526\n36270218540497705585629946580636237993140746255962\n24074486908231174977792365466257246923322810917141\n91430288197103288597806669760892938638285025333403\n34413065578016127815921815005561868836468420090470\n23053081172816430487623791969842487255036638784583\n11487696932154902810424020138335124462181441773470\n63783299490636259666498587618221225225512486764533\n67720186971698544312419572409913959008952310058822\n95548255300263520781532296796249481641953868218774\n76085327132285723110424803456124867697064507995236\n37774242535411291684276865538926205024910326572967\n23701913275725675285653248258265463092207058596522\n29798860272258331913126375147341994889534765745501\n18495701454879288984856827726077713721403798879715\n38298203783031473527721580348144513491373226651381\n34829543829199918180278916522431027392251122869539\n40957953066405232632538044100059654939159879593635\n29746152185502371307642255121183693803580388584903\n41698116222072977186158236678424689157993532961922\n62467957194401269043877107275048102390895523597457\n23189706772547915061505504953922979530901129967519\n86188088225875314529584099251203829009407770775672\n11306739708304724483816533873502340845647058077308\n82959174767140363198008187129011875491310547126581\n97623331044818386269515456334926366572897563400500\n42846280183517070527831839425882145521227251250327\n55121603546981200581762165212827652751691296897789\n32238195734329339946437501907836945765883352399886\n75506164965184775180738168837861091527357929701337\n62177842752192623401942399639168044983993173312731\n32924185707147349566916674687634660915035914677504\n99518671430235219628894890102423325116913619626622\n73267460800591547471830798392868535206946944540724\n76841822524674417161514036427982273348055556214818\n97142617910342598647204516893989422179826088076852\n87783646182799346313767754307809363333018982642090\n10848802521674670883215120185883543223812876952786\n71329612474782464538636993009049310363619763878039\n62184073572399794223406235393808339651327408011116\n66627891981488087797941876876144230030984490851411\n60661826293682836764744779239180335110989069790714\n85786944089552990653640447425576083659976645795096\n66024396409905389607120198219976047599490197230297\n64913982680032973156037120041377903785566085089252\n16730939319872750275468906903707539413042652315011\n94809377245048795150954100921645863754710598436791\n78639167021187492431995700641917969777599028300699\n15368713711936614952811305876380278410754449733078\n40789923115535562561142322423255033685442488917353\n44889911501440648020369068063960672322193204149535\n41503128880339536053299340368006977710650566631954\n81234880673210146739058568557934581403627822703280\n82616570773948327592232845941706525094512325230608\n22918802058777319719839450180888072429661980811197\n77158542502016545090413245809786882778948721859617\n72107838435069186155435662884062257473692284509516\n20849603980134001723930671666823555245252804609722\n53503534226472524250874054075591789781264330331690\nEOS\n digits.each_line.inject(0) { |a, e| a + e.to_i }.to_s[0..9].to_i\nend",
"def euler001\n (1...1000).select { |i| i % 3 == 0 or i % 5 == 0 }.reduce(:+)\nend",
"def euler017\n def split_number(num)\n num = num.to_s.rjust(4, '0')\n {\n :thousands => num[0].chr.to_i,\n :hundreds => num[1].chr.to_i,\n :tens => num[2].chr.to_i,\n :ones => num[3].chr.to_i\n }\n end\n\n dict = {\n 1 => 'one', 2 => 'two', 3 => 'three', 4 => 'four', 5 => 'five',\n 6 => 'six', 7 => 'seven', 8 => 'eight', 9 => 'nine', 10 => 'ten',\n 11 => 'eleven', 12 => 'twelve', 13 => 'thirteen', 14 => 'fourteen',\n 15 => 'fifteen', 16 => 'sixteen', 17 => 'seventeen', 18 => 'eighteen',\n 19 => 'nineteen', 20 => 'twenty', 30 => 'thirty', 40 => 'forty',\n 50 => 'fifty', 60 => 'sixty', 70 => 'seventy', 80 => 'eighty',\n 90 => 'ninety', 100 => 'hundred', 1000 => 'thousand'\n }\n\n sum = 0\n (1..1000).each do |x|\n num = split_number(x)\n word = ''\n\n # thousands\n if num[:thousands] > 0\n word << dict[num[:thousands]] + ' ' + dict[1000] + ' '\n end\n\n # hundreds\n if num[:hundreds] > 0\n word << dict[num[:hundreds]] + ' ' + dict[100] + ' '\n end\n\n tens_ones = 10*num[:tens] + num[:ones]\n\n # tens (20-99)\n if (2..9) === num[:tens]\n word << 'and ' if word.length > 0\n word << dict[10*num[:tens]]\n word << dict[num[:ones]] if num[:ones] > 0\n # teens (1-19)\n elsif tens_ones > 0\n word << 'and ' if word.length > 0\n word << dict[tens_ones]\n end\n\n sum += word.gsub(/[^a-z]/, '').length\n end\n\n sum\nend",
"def sum_digits\n number = 2 ** 1000\n number.to_s.split(\"\").inject{|x,y| x.to_i + y.to_i}\nend",
"def p34\n require 'j_math'\n puts \"Working...\"\n ans = ( JMath.factorial_digit_sum_match ).inject(:+)\n puts \"The sum of all numbers which are equal to the sum\"\\\n \"of the factorial of their digits is: #{ ans }.\"\n end",
"def euler65\n vec = []\n val = tmp = den = num = 0 \n (0..98).each do |i|\n val = i%3==1 ? 2*(i/3+1) : 1\n vec.push(val)\n\n den = 1 \n num = vec.last\n vec.reverse.drop(1).each do |r| \n tmp = num;\n num = num * r + den; \n den = tmp;\n end\n \n den = 2 * num + den;\n $ans = den.to_s.split(//).inject(0){|sum,x| sum+x.to_i}\n end\nend",
"def sum_of_factorial_digits(factorial)\r\n\tthe_factorial = (1..factorial).inject(1){ |milti, i| milti * i }\r\n\r\n\treturn the_factorial.to_s.split('').inject(0){ |sum, i| sum + i.to_i }\r\nend",
"def digit_sum\n\t\ttotal = 0 \n\t\tdoubled_digits.map do |i|\n\t\t\tif i.to_s.length > 1\n\t\t\t\ttotal += i.to_s[0].to_i + i.to_s[1].to_i\n\t\t\telse\n\t\t\t\ttotal += i\n\t\t\tend\n\t\tend\n\t\treturn total\n\tend",
"def digit_factorial_sum(num)\n sum = 0\n num.to_s.split(//).each do |digit|\n sum += factorial(digit.to_i)\n end\n return sum\nend",
"def sumDigits(n)\n\tn.abs.to_s.split(//).map(&:to_i).reduce(:+)\nend",
"def euler048\n (1..1000).each.reduce { |a,i| a + i**i }.to_s.slice(-10, 10).to_i\nend",
"def sum \n @new_digits.map! do |number| \n if number >= 10 \n number.to_s.split('').map(&:to_i) \n else \n number\n end \n end\n \n @sum_digits = @new_digits.flatten.reduce(:+) \n\n end",
"def sum(number)\r\n number.digits.sum\r\nend",
"def solve\n (1..1000).map { |n| n**n } # power series\n .inject(&:+) # sum\n .to_s[-10..-1] # last ten digits\nend",
"def sum_digits_89(num)\n r = results[num]\n return r unless r.nil?\n sum = sum_digits(num)\n results[num] = true if sum == 89\n results[num] = false if sum == 1\n sum_digits_89(sum)\n end",
"def sum(number)\n number.to_s.chars.map(&:to_i).sum\nend"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
This returns the API user being used for calls
|
def api_user
@user
end
|
[
"def user_info\n get(api_get.body.identity).body\n end",
"def current_api_user\n User.find_by(auth_headers) if auth_headers.present? \n end",
"def current_api_user\n if params[:api_key]\n User.find_by_api_key params[:api_key]\n else\n current_user\n end\n end",
"def current_user\r\n api_key = ApiKey.active.where(access_token: token).first\r\n if api_key\r\n return api_key.user\r\n else\r\n return nil\r\n end\r\n end",
"def current_user\n api_key = ApiKey.active.where(access_token: token).first\n if api_key\n return api_key.user\n else\n return nil\n end\n end",
"def user\n @user ||= User.new heroku_api, heroku_api.get_user.body\n end",
"def get_user_info\n request :get_user_info\n end",
"def get_user_info\n response = send_method(:get_user_info)\n user_from(response)\n end",
"def user\n return @user ? @user : Dotfu.config_user\n end",
"def current_user\n authentication.account\n end",
"def user\r\n return for_context(nil, false) { |c| c.user }\r\n end",
"def get_user\n @user\n end",
"def full_user_response\n return api_response(\"user-information\")\n end",
"def get_user\n if @options[:user]\n @options[:user]\n elsif @options[:get_user_method]\n send( @options[:get_user_method] )\n elsif self.respond_to? :current_user\n current_user\n elsif not @options[:allow_guests]\n raise( CannotObtainUserObject, \"Couldn't find #current_user or @user, and nothing appropriate found in hash\" )\n end\n end",
"def auth_user_id\n auth[:user_id]\n end",
"def current_user\n current_login_credential\n end",
"def current_user\n return @current_user if @current_user\n\n token = request.headers['X-Auth-Token'].presence || params[:token].presence\n return nil if token.blank?\n @current_user ||= User.find_by_api_token(token)\n end",
"def current_user_id\n info['user']['id']\n end",
"def user(user_name)\n OctocatHerder::User.fetch(user_name, connection)\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
This returns the API key being used for calls
|
def api_key
@key
end
|
[
"def get_apiKey\n return 'wbf3wr553fuaevgju4ekyng9'\n end",
"def api_key; \"055a1100-a84d-4064-84d7-bce46f7b80c8\"; end",
"def api_key\n @api_key ||= Census.installed_key\n end",
"def api_key\n @config.fetch(:api_key)\n end",
"def retrieve_api_key(key_id)\n start.uri('/api/api-key')\n .url_segment(key_id)\n .get()\n .go()\n end",
"def api_key\n api_user && api_user.single_access_token\n end",
"def api_key\n # We are not lazy loading the @api_key in order to preserve the\n # distinction between it and the default (class) key.\n @api_key || self.class.api_key\n end",
"def get_api_key\n enterprise = get_enterprise\n Octo::Authorization.find_by_username(enterprise.name).apikey\n end",
"def api_key\n return unless attributes['api_key']\n\n ::Utilities::Encryption.decrypt(attributes['api_key'])\n end",
"def api_key\n\t\t\t@username || 'unknown'\n\t\tend",
"def api_key\n @api_key ||= config_value.split('-')[1].chomp\n end",
"def api_key\n @api_key || ENV['CODELOCKS_API_KEY'] || (raise CodelocksError.new(\"No API key specified\"))\n end",
"def api_key_id\n @attributes[:api_key_id]\n end",
"def kiss_metrics_api_key\n return Lascivious.api_key\n end",
"def api_key\n request.headers['HTTP_AUTHORIZATION']\n end",
"def api_key\n network_options[:api_key]\n end",
"def clockify_api_key\n clockify_source[\"access_token\"] #|| clockify_source -> Necessary?\n end",
"def api_key\n @api_key ||= ENV['POSTAGEAPP_API_KEY']\n end",
"def access_key_id\n obtain_credentials\n @access_key_id\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
This authenticates with Rackspace and returns the information necessary to make subsequent authenticated calls to the API
|
def authenticate
raise Rackspace::NotInitialized unless self.initialized?
headers = RestClient::Request.execute(:method => :get, :url => "#{AUTH_URL}/#{self.api_version}", :headers => {"X-Auth-User" => self.api_user, "X-Auth-Key" => self.api_key}, :raw_response => true).headers
{:auth_token => headers[:x_auth_token], :storage_url => headers[:x_storage_url], :server_management_url => headers[:x_server_management_url], :cdn_management_url => headers[:x_cdn_management_url]}
end
|
[
"def authenticate\n if credentials[:auth_token] && credentials[:tennant_id]\n credentials\n else\n #puts \"RACKSPACE_AUTHENTICATE\"\n #puts \"RACKSPACE_AUTHENTICATE credentials #{credentials.inspect}\" # TODO: DISABLE THIS BEFORE PRODUCTION!\n resp = submit_request(\n 'https://identity.api.rackspacecloud.com/v2.0/tokens',\n :post,\n {auth:{\"RAX-KSKEY:apiKeyCredentials\" => {username:credentials[:rackspace_username], apiKey:credentials[:rackspace_api_key]}}}.to_json\n )\n @auth_response = resp\n resp = from_json(@auth_response.body)\n credentials[:tennant_id] = resp[\"access\"][\"token\"][\"tenant\"][\"id\"]\n credentials[:auth_token] = resp[\"access\"][\"token\"][\"id\"]\n end\n credentials\n end",
"def auth\n logger.info \"connecting to openstack server #{@hostname} for auth\"\n payload = { \n auth: { \n identity: { \n methods: [\"password\"],\n password: {\n user: {\n domain: {name: @admin_domain},\n name: @admin_user, \n password: @admin_password\n } \n } \n },\n scope: { \n project: { \n domain: { name: @admin_domain },\n name: @admin_project \n } \n } \n }\n }\n authurl=\"#{@auth_url}/auth/tokens?nocatalog\"\n auth_resp = RestClient::Request.execute(method: :post, :url => authurl, headers: {content_type: \"application/json\"}, :verify_ssl => false, :payload => payload.to_json)\n logger.info \"recieved response, returning new token\"\n auth_resp.headers[:x_subject_token]\n end",
"def authenticate\n response = post('login')\n @access_token = response['access-token']\n @client_id = response['client-id']\n end",
"def authenticated_request(options)\n headers = options[:headers] || {}\n headers[\"Authorization\"] = \"Bearer #{@credentials.access_token}\"\n headers[\"Content-Type\"] = \"application/json\"\n options[:headers] = headers\n # TODO: This is just a wrapper around a call to Excon::Connection#request\n # so can be extracted from Compute by passing in the connection,\n # credentials and options\n @connection.request(options)\n end",
"def authenticated_request(options)\n headers = options[:headers] || {}\n headers.merge!(\"Authorization\" => \"OAuth #{@credentials.access_token}\", \"Content-Type\" => \"application/json\")\n options[:headers] = headers\n # TODO This is just a wrapper around a call to Excon::Connection#request\n # so can be extracted from Compute by passing in the connection,\n # credentials and options\n @connection.request(options)\n end",
"def authenticate!\n error!(\"401 Unauthorized\", 401) unless check_auth_token \n end",
"def authenticate\n end",
"def define_rackspace_api\n {\n provider: 'rackspace',\n rackspace_username: UI.prompt('Rackspace Cloud Username'),\n rackspace_api_key: UI.prompt('Rackspace Cloud API key'),\n rackspace_region: define_rackspace_region\n }\n end",
"def auth\n @auth ||= Savon.client(\n wsdl: AUTH_WSDL,\n headers: { 'Authorization' => \"Basic #{@auth_code}\", 'SOAPAction' => [''] },\n env_namespace: :soapenv,\n logger:,\n log: true,\n log_level: @log_level,\n pretty_print_xml: true\n )\n end",
"def do_authenticated_request(method, url, options = {})\n begin\n response = case client\n when BasicAuthClient\n self.class.send(method, api_url(url), options.merge(:basic_auth => client.credentials))\n when OauthClient\n # We have to wrap the result of the access_token request into an HTTParty::Response object\n # to keep duck typing with HTTParty\n result = client.send(method, api_url(url), options)\n content_type = Parser.format_from_mimetype(result.content_type)\n parsed_response = Parser.call(result.body, content_type)\n\n HTTParty::Response.new(nil, result, lambda { parsed_response })\n else\n raise CredentialsMissing\n end\n check_response_codes(response)\n response.parsed_response\n rescue Timeout::Error\n raise Unavailable.new('Service Unavailable')\n end\n end",
"def authenticate(uri)\n request = Net::HTTP::Get.new(uri)\n request['Accept'] = 'application/json'\n request['Authorization'] = \"Bearer #{@session_token}\"\n request\n end",
"def authenticate\n builder = Nokogiri::XML::Builder.new do |xml|\n xml.aaaUser(name: @user, pwd: @password)\n end\n post_url = URI.encode(@baseurl.to_s + '/api/mo/aaaLogin.xml')\n puts 'POST REQUEST', post_url if @debug\n puts 'POST BODY', builder.to_xml if @debug\n puts 'Initiating authentication with APIC'\n response = @client.post(post_url, body: builder.to_xml)\n puts 'POST RESPONSE: ', response.body if @debug\n doc = Nokogiri::XML(response.body)\n fail ApicAuthenticationError,\n sprintf('Authentication error(%s): %s',\n doc.at_css('error')['code'], doc.at_css('error')['text']) \\\n if doc.at_css('error')\n fail ApicErrorResponse,\n sprintf('Unexpected HTTP Error response code(%s): %s',\n response.code, response.body) if response.code != 200\n @auth_cookie = doc.at_css('aaaLogin')['token']\n @refresh_time = doc.at_css('aaaLogin')['refreshTimeoutSeconds']\n end",
"def authenticate\n authed = true\n if(resource.respond_to?(:authenticate, true))\n authed = false\n uname = nil\n password = nil\n if(request.env['HTTP_AUTHORIZATION'])\n auth = Rack::Auth::Basic::Request.new(request.env)\n if(auth.basic? && auth.credentials)\n uname = auth.credentials[0]\n password = auth.credentials[1]\n end\n end\n authed = resource.send(:authenticate, uname, password)\n end\n raise Unauthorized unless authed\n end",
"def authenticate_and_start_getting_data\n response = @connection.waitfor(/(-|Rejected)/)\n raise \"Connection rejected.\" if response.include? 'Rejected'\n response += @connection.cmd({\n \"String\" => @user,\n \"Match\" => /[Pp]ass(?:word|phrase)[:]*\\z/n\n })\n response += @connection.cmd(@password)\n @authenticated = true\n filtered response.split(/\\n|\\r/)\n end",
"def rackspace_api_key\n Fog.credentials[:rackspace_api_key] || get_user_input(\"Enter Rackspace API key\")\nend",
"def authenticate!\n error!('Authorization header is required', :unauthorized) unless headers['Authorization']\n\n @decoded = JsonWebToken.decode(headers['Authorization'])\n if entity_subdomain && entity_subdomain != @decoded[:subdomain]\n error!('Invalid authorization token', :unauthorized)\n end\n\n @current_entity = Entity.find_by!(subdomain: @decoded[:subdomain])\n resource_class = @decoded[:resource_type].constantize\n @current_user = resource_class.find(@decoded[:resource_id])\n rescue ActiveRecord::RecordNotFound => e\n error!('Invalid authorization token', :unauthorized)\n rescue JWT::DecodeError => e\n error!('Invalid authorization token', :unauthorized)\n end",
"def basic_auth\n \"#{username}:#{access_token}\"\n end",
"def authenticate\n auth.call(:authenticate)\n end",
"def authenticate\n builder = Nokogiri::XML::Builder.new do |xml|\n xml.aaaUser(name: @user, pwd: @password)\n end\n post_url = URI::Parser.new.escape(@baseurl.to_s + '/api/mo/aaaLogin.xml')\n puts 'POST REQUEST', post_url if @debug\n puts 'POST BODY', builder.to_xml if @debug\n response = @client.post(post_url, body: builder.to_xml)\n puts 'POST RESPONSE: ', response.body if @debug\n doc = Nokogiri::XML(response.body)\n fail ApicAuthenticationError, 'Authentication error(%s): %s' % [doc.at_css('error')['code'], doc.at_css('error')['text']] \\\n if doc.at_css('error')\n fail ApicErrorResponse, 'Unexpected HTTP Error response code(%s): %s' % [response.code, response.body] if response.code != 200\n @auth_cookie = doc.at_css('aaaLogin')['token']\n @refresh_time = doc.at_css('aaaLogin')['refreshTimeoutSeconds']\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
This returns the available versions of the API
|
def versions
JSON.parse(RestClient.get("#{VERSION_URL}/.json", self.default_headers))["versions"].collect { |v| v["id"] }.uniq
end
|
[
"def known_api_versions\n return ['2']\n end",
"def fetch_api_versions\n raw, _, st = kubectl.run(\"api-versions\", attempts: 5, use_namespace: false)\n # The \"core\" group is represented by an empty string\n versions = { \"\" => %w(v1) }\n if st.success?\n rows = raw.split(\"\\n\")\n rows.each do |group_version|\n group, version = group_version.split(\"/\")\n versions[group] ||= []\n versions[group] << version\n end\n end\n versions\n end",
"def api_versions\n api_versions = []\n\n resource_client('').apis.groups.each do |api_group|\n api_versions << api_group.preferredVersion.groupVersion\n end\n\n api_versions << 'v1'\n\n api_versions\n end",
"def list_api_versions(namespace)\n url = build_url(namespace)\n response = rest_get(url)\n JSON.parse(response)['resourceTypes'].first['apiVersions']\n end",
"def available_versions\n if unified_backend?\n Mixlib::Install.new(\n product_name: project_name,\n channel: channel_name.to_sym\n ).available_versions\n else\n data = nil\n begin\n data = bintray_backend.bintray_get(\"/#{channel_name}/#{project_name}\")\n rescue Net::HTTPServerException => e\n # bintray returns 404 when there is no available versions for a\n # given product & channel\n if e.response.code == \"404\"\n puts \"No available versions for '#{project_name}' - '#{channel_name}'\"\n else\n raise e\n end\n end\n\n data.nil? ? [ ] : data[\"versions\"]\n end\n end",
"def versions\n request_str = \"/gallery/appliances/#{id.to_i}/versions\"\n response = GenericRequest.new(self.class.studio_connection).get request_str\n tree = XmlSimple.xml_in response, \"ForceArray\" => [\"version\"]\n return tree[\"appliance\"][\"versions\"][\"version\"]\n end",
"def versions(id)\n @connection.get(\"/v_beta/apps/#{id}/versions\")\n end",
"def versions\n registered_versions.keys\n end",
"def list_all_aos_versions(args = {}) \n get(\"/aosversions.json/all\", args)\nend",
"def versions\n Version.all\n end",
"def available_versions\n Mixlib::Install.available_versions(project, channel)\n end",
"def list_api_versions(namespace, service_name)\n url = build_url(namespace)\n response = rest_get(url)\n JSON.parse(response)['resourceTypes'].find{ |type| type['resourceType'].casecmp(service_name) == 0 }['apiVersions']\n rescue NoMethodError\n raise ArgumentError, \"unable to find data for the '#{namespace}/#{service_name}' resource type\"\n end",
"def api_version\n request('getAPIVersion')\n end",
"def supported_versions\n @supported_versions\n end",
"def list_active_aos_versions(args = {}) \n get(\"/aosversions.json/\", args)\nend",
"def api_info(req)\n client.request(req) do |response|\n json_response = ::RHC::Json.decode(response.content)\n [ json_response['supported_api_versions'], json_response['api_version'] || json_response['version'].to_f, json_response['data'] ]\n end\n end",
"def versions(options = { page: 1, per_page: 20 })\n @app.client.model_versions @id, options\n end",
"def query_version\n response = query_api(VERSION_ENDPOINT)\n response.body\n end",
"def get_product_supported_versions(product_code)\n path = \"/d2l/api/#{product_code}/versions/\"\n _get(path)\nend"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
This caches the authentication response for subsequent usage
|
def auth_response
@auth_response ||= self.authenticate
end
|
[
"def refresh(_)\n authenticate\n end",
"def refresh\n @cached_credentials = nil\n end",
"def retry_with_auth(response)\n @already_tried_with_auth = true\n logger.info(\"Authentication Required. Retrying with auth info\")\n accessor.auth_manager.associate_auth_info(response)\n add_credentials!\n @body.rewind if @body # Its a stringIO, and we already fed it to the adapter once, so rewind it when we try again\n response = fetch_response\n end",
"def cached_response\n response = cache.read(@api_request.cache_key)\n build_response(response) unless response.nil?\n end",
"def cache_token(auth_token)\n @api.set_auth_token(auth_token)\n @auth_token = auth_token\n end",
"def handle_authentication_response(response)\n result = { }\n\n response.body.each_line do |line|\n if line.include? '='\n line.chomp!\n key, value = line.split('=')\n result[key] = value\n end\n end\n\n result\n end",
"def handle_auth_response(response)\n @auth_token = {}\n\n # TODO: Fix this to handle error cases\n begin\n response = JSON.parse(response.body)\n @auth_token[:token] = response[\"token\"]\n @auth_token[:expires_at] = DateTime.parse(response[\"expires_at\"])\n rescue JSON::ParserError, TypeError\n @auth_token = {}\n end\n end",
"def authentication_response\n if authenticate_user.success?\n { :token => authenticate_user.result }\n else\n { :error => :unauthorized }\n end\n end",
"def authenticate(req, res)\n unless result = @mutex.synchronize{ _authenticate(req, res) }\n challenge(req, res)\n end\n if result == :nonce_is_stale\n challenge(req, res, true)\n end\n return true\n end",
"def cache\n @cache ||=\n Garm::AuthorizationCache.new(self)\n end",
"def refresh_login_hash\n self.login_hash = unique_hash\n end",
"def authenticate\n authenticated_session || render_unauthorized\n end",
"def refresh_authorization\n # Handled implicitly by auth lib, here in case need to override\n logger.debug('Retrying after authentication failure')\n end",
"def live_response\n if response = api_client.fetch(@api_request)\n cache.write(@api_request.cache_key, response)\n build_response(response)\n end\n end",
"def authenticate\n raise Rackspace::NotInitialized unless self.initialized?\n headers = RestClient::Request.execute(:method => :get, :url => \"#{AUTH_URL}/#{self.api_version}\", :headers => {\"X-Auth-User\" => self.api_user, \"X-Auth-Key\" => self.api_key}, :raw_response => true).headers\n {:auth_token => headers[:x_auth_token], :storage_url => headers[:x_storage_url], :server_management_url => headers[:x_server_management_url], :cdn_management_url => headers[:x_cdn_management_url]}\n end",
"def fetch\n @lock.synchronize do\n @credentials = yield if @credentials.nil? || @credentials.expired?\n @credentials\n end\n end",
"def update_auth(response)\r\n auth = response.at('auth')\r\n user = auth.at('user')\r\n \r\n @perms = auth.at('perms').inner_text.to_sym\r\n @token = auth.at('token').inner_text\r\n @user_id = user['nsid']\r\n @user_name = user['username']\r\n @user_fullname = user['fullname']\r\n end",
"def authenticate!\n Rails.logger.debug \"[AuthN] [#{self.class}] Authenticating ...\"\n\n if OPTIONS.block_all\n fail! 'BlockAll for DummyStrategy is active!'\n return\n end\n\n user = Hashie::Mash.new\n user.auth!.type = OPTIONS.fake_type || 'dummy'\n\n case user.auth.type\n when 'dummy', 'basic'\n user.auth!.credentials!.username = OPTIONS.fake_username || 'dummy_user'\n user.auth!.credentials!.password = OPTIONS.fake_password || 'dummy_password'\n user.identity = user.auth.credentials.username\n when 'x509', 'voms'\n user.auth!.credentials!.client_cert_dn = OPTIONS.fake_client_cert_dn || 'dummy_cert_dn'\n user.auth!.credentials!.client_cert = OPTIONS.fake_client_cert || 'dummy_cert'\n user.auth!.credentials!.client_cert_voms_attrs = OPTIONS.fake_voms_attrs || {}\n user.auth!.credentials!.issuer_cert_dn = OPTIONS.fake_issuer_cert_dn || 'dummy_issuer_cert_dn'\n user.auth!.credentials!.verification_status = OPTIONS.fake_verification_status || 'SUCCESS'\n user.identity = user.auth.credentials.client_cert_dn\n else\n user.identity = 'unknown'\n user.auth!.credentials = {}\n end\n\n Rails.logger.debug \"[AuthN] [#{self.class}] Authenticated #{user.to_hash.inspect}\"\n success! user.deep_freeze\n end",
"def cache_response(response=nil)\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
This is the auth token provided by Rackspace after a successful authentication
|
def auth_token
self.auth_response[:auth_token]
end
|
[
"def auth_token\n Keycard::Token.rfc7235(safe(\"HTTP_AUTHORIZATION\"))\n end",
"def auth_token\n @auth_token ||= request.headers.fetch('Authorization', '').split(' ').last\n end",
"def auth_token\n generate_auth_token if @auth_token.nil?\n return @auth_token\n end",
"def auth_token=(value); end",
"def auth_token\n auth_token_for(DEFAULT_AUTH_TOKEN_KEY)\n end",
"def auth_token\n request.env['HTTP_X_GEOTIX_AUTH_TOKEN']\n end",
"def auth_token\n return if auth_config[:strategy] != :bearer || auth_config[:class].blank?\n @auth_token ||= auth_config[:class].encode\n end",
"def auth_token\n return regenerate_auth_token if expired?\n\n authentication.auth_token\n end",
"def token \n request.headers['Authorisation']\n end",
"def set_auth_token\n\t\t\treturn if auth_token.present?\n\t\t\tself.auth_token = generate_token(\"auth_token\",2)\n\t\tend",
"def auth_header_token\n cookies[CONSTANTS['authorization-cookie']]\n end",
"def auth_header\n @token.empty? ? '' : \"cobSession=#{@token}\"\n end",
"def auth\n Auth.decode(token)\n end",
"def authentication_token\n @authentication_token ||= JWT.encode(payload, secret, algorithm)\n end",
"def new_auth_token()\n query = { action: 'authToken', apikey: @apikey, secretkey: @secretkey }\n res = HTTParty.get(@endpoint, query: query)\n parse_response(res).fetch(\"authToken\")\n end",
"def token\n t = [@options[:username], @options[:password]].join(':')\n Base64.encode64(t).delete(\"\\r\\n\")\n end",
"def retrieve_auth_token\n http = Net::HTTP.new(auth_endpoint.host, auth_endpoint.port)\n\n request = Net::HTTP::Post.new(auth_endpoint.request_uri)\n\n request.basic_auth(\n TodoableApi.configuration.username,\n TodoableApi.configuration.password\n )\n\n handle_auth_response(http.request(request))\n end",
"def current_auth_token\n Thread.current[:usergrid_auth_token]\n end",
"def auth\n ActionController::HttpAuthentication::Token\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
This performs a basic POST request using the supplied URL, payload and headers
|
def post(url, payload = {}, headers = {})
http :post, "#{url}.json", payload.to_json, headers
end
|
[
"def post url, body, headers = {}\n http_request(url, Net::HTTP::Post, body, headers)\n end",
"def make_post_request url, body, headers = []\n make_request url, method: ::Rack::POST, body: body, headers: headers\n end",
"def post path, payload, request_headers = {}\n post_headers = headers request_headers\n post_request = Net::HTTP::Post.new path, post_headers\n post_request.body = payload\n\n connection_do post_request\n end",
"def post(url, data, headers = {})\n request(:post, url, headers, :data => data)\n end",
"def post(payload)\n response = http(config.api_uri).request(zipped_post(config.api_uri, payload))\n if response.code.to_i >= 200 && response.code.to_i < 300\n response\n else\n raise \"HTTP Error: #{response.code} #{config.api_uri}\"\n end\n end",
"def post(url, endpoint, auth_token, payload)\n uri = URI.parse(\"#{url}\")\n http = Net::HTTP.new(uri.host, uri.port)\n http.read_timeout = 60 * 5\n if url.include?('https://')\n http.use_ssl = true\n http.verify_mode = OpenSSL::SSL::VERIFY_NONE\n end\n request = Net::HTTP::Post.new(\"#{BASE_API}#{endpoint}?auth_token=#{auth_token}\")\n request.add_field('Content-Type', 'application/json')\n request.body = payload.to_json\n http.request(request)\nend",
"def post(url, post_vars={})\n send_request url, post_vars, 'POST'\n end",
"def dropbox_post(url, headers: {}, payload: nil)\n raise ArgumentError, \"missing keyword: payload\" unless payload\n\n require \"net/https\"\n require \"uri\"\n\n headers = {\n \"Authorization\" => \"Bearer #{config[\"token\"]}\",\n \"Content-Type\" => \"application/json\"\n }.merge(headers)\n\n unless payload.nil? || payload.is_a?(String)\n payload = JSON.generate(payload)\n end\n\n uri = URI.parse(url)\n http = Net::HTTP.new(uri.host, uri.port)\n http.use_ssl = uri.scheme == \"https\"\n\n http.post(uri.path, payload, headers).tap do |response|\n log { \"POST #{url}\" }\n\n unless payload.nil? || payload.is_a?(String)\n log { \" PAYLOAD --> #{payload}\" }\n end\n\n log { \" RESPONSE --> #{response.body}\" }\n\n return yield response\n end\n end",
"def post payload, path = \"\" \n make_request(path, \"post\", payload)\n end",
"def post(url, options)\n headers = options[:headers] || {}\n params = options[:params] || {}\n req = Net::HTTP::Post.new(url)\n req = request_with_headers(req, headers)\n request_with_params(req, params)\n end",
"def post url, object = nil\n request url, HTTP::Post, object\n end",
"def post_request(url, hash={})\n req = Net::HTTP::Post.new(url.path)\n req.set_form_data(hash)\n req \n end",
"def post(uri:, payload:, headers: {}, limit: redirect_limit)\n do_post_or_put(method: :post, uri: uri, payload: payload, headers: headers, limit: limit, timeout: timeout)\n end",
"def http_post(url = nil, body = nil, headers = {}, params = {})\n block = Proc.new if block_given?\n http_method :post, url, body, headers, params, &block\n end",
"def post_request endpoint, opts={}, &block\n connection.post endpoint, opts, &block\n end",
"def post endpoint, data\n do_request :post, endpoint, data\n end",
"def send_post(url,input_body,headers,http=nil)\n request = Net::HTTP::Post.new(url)\n http = connect(@cfg['test_host'], @cfg['port']) if http==nil\n #Headers handling\n if headers != nil\n headers.each do |h|\n request.add_field h[0], h[1]\n end\n end\n\n #Body and response\n request.body = input_body\n request, response = send_request(http,request)\n return request, response\n\n end",
"def post(url)\n authsub_http_request(Net::HTTP::Post,url)\n end",
"def post_json(url:, params: {}, headers: {})\n post url, params: params, headers: headers(additional_headers: headers)\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
This performs a basic PUT request using the supplied URL, payload and headers
|
def put(url, payload = {}, headers = {})
http :put, "#{url}.json", payload.to_json, headers
end
|
[
"def _put(url=\"\", params={}, headers={}, payload)\n\t\tif !params.empty? then\n\t\t\theaders[:params] = params\n\t\tend\n\t\tresponse = RestClient.put(url, payload, headers)\n\t\thandle_response(response)\n\tend",
"def put url, body, headers = {}\n http_request(url, Net::HTTP::Put, body, headers)\n end",
"def put path, payload, request_headers = {}\n put_headers = headers request_headers\n put_request = Net::HTTP::Put.new path, put_headers\n put_request.body = payload\n\n connection_do put_request\n end",
"def put(url, endpoint, auth_token, payload)\n uri = URI.parse(\"#{url}\")\n http = Net::HTTP.new(uri.host, uri.port)\n http.read_timeout = 60 * 5\n if url.include?('https://')\n http.use_ssl = true\n http.verify_mode = OpenSSL::SSL::VERIFY_NONE\n end\n request = Net::HTTP::Put.new(\"#{BASE_API}#{endpoint}?auth_token=#{auth_token}\")\n request.add_field('Content-Type', 'application/json')\n request.body = payload.to_json\n puts \"PAYLOAD: #{request.body}\"\n http.request(request)\nend",
"def put(url, data, headers = {})\n request(:put, url, headers, :data => data)\n end",
"def put payload, path = \"\"\n make_request(path, \"put\", payload)\n end",
"def put(url, vars={})\n send_request url, vars, 'PUT'\n end",
"def put url, object = nil\n request url, HTTP::Put, object\n end",
"def expects_put(url, payload, headers = {})\n RestClient.expects(:put).with(url, payload.to_json, default_headers.merge(headers))\n end",
"def put(uri:, payload:, headers: {}, limit: redirect_limit)\n do_post_or_put(method: :put, uri: uri, payload: payload, headers: headers, limit: limit, timeout: timeout)\n end",
"def put(path, params={}); make_request(:put, host, port, path, params); end",
"def put(uri, params = {})\n send_request(uri, :put, params)\n end",
"def put(url)\n authsub_http_request(Net::HTTP::Put,url)\n end",
"def put endpoint, data\n do_request :put, endpoint, data\n end",
"def request_put(path, content = nil, headers = {})\n http = Net::HTTP.new(REST_ENDPOINT, @options[:ssl] ? 443 : 80)\n \n http.use_ssl = @options[:ssl]\n http.verify_mode = OpenSSL::SSL::VERIFY_NONE\n \n http.start do |http|\n req = sign_request(Net::HTTP::Put.new(path), content, headers)\n \n if content.is_a?(IO)\n req.body_stream = content\n else\n req.body = content\n end\n \n response = http.request(req)\n \n return response\n end\n end",
"def put(href, additional_parameters = {})\n rest_connect do |base_uri, headers|\n href = \"#{base_uri}/#{href}\" unless begins_with_slash(href)\n new_path = URI.escape(href)\n req = Net::HTTP::Put.new(new_path, headers) \n req.set_content_type('application/json')\n req.body = additional_parameters.to_json\n req\n end\n end",
"def put_request(_context, endpoint, data)\n Puppet.runtime[:http].put(\n build_uri(endpoint),\n Puppet::Util::Json.dump(data),\n headers: {\n 'Content-Type' => 'application/json'\n },\n options: build_options,\n )\n end",
"def send_put_request endpoint, params={}, api_key=nil, ssl=false\n uri = URI.parse(endpoint)\n\n Net::HTTP.start(uri.host, uri.port) do |http|\n http.use_ssl = true if ssl\n request = Net::HTTP::Put.new(uri.request_uri)\n request['authorization'] = \"Token token=#{api_key}\" if api_key\n request.set_form_data(params)\n http.request request\n end\n end",
"def http_put(url, data, initheader = nil, &block)\n uri =uri(url)\n res = Net::HTTP.start(uri.hostname, uri.port) do |http|\n http.request_put(uri, data, initheader, &block)\n end\n res.body\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
This will perform an HTTP call with the specified method, and arguments It will also pick up if the response is that the request was unauthorized, and will attempt the same request again after reauthenticating (in case the auth token has expired)
|
def http(method, *args)
args.last.merge!(self.default_headers).merge!("X-Auth-Token" => self.auth_token)
response = RestClient.send(method, *args)
@retried = false
response
rescue RestClient::Unauthorized
@auth_response = nil
if @retried
raise
else
@retried = true
retry
end
end
|
[
"def execute(method, path, **options)\n uri = URI(\"#{@base_url}/#{path}\")\n\n case method.to_sym\n when :get\n req = Net::HTTP::Get.new(uri)\n when :post\n req = Net::HTTP::Post.new(uri)\n when :put\n req = Net::HTTP::Put.new(uri)\n when :delete\n req = Net::HTTP::Delete.new(uri)\n else\n raise ARestException, \"Unknown method: #{method}\"\n end\n\n req.form_data = options[:form_data] if options[:form_data]\n\n headers = options[:headers] || @headers \n headers.each { |k,v| req[k] = v } if headers\n\n auth_user = options[:auth_user] || @auth_user\n auth_password = options[:auth_password] || @auth_password\n req.basic_auth auth_user, auth_password if auth_user && auth_password\n\n token = options[:token] || @auth_token\n if token\n req[\"Authorization\"] = \"Token token=#{token}\"\n end\n\n res = Net::HTTP.start(uri.hostname, uri.port, use_ssl: uri.scheme == 'https') do |http|\n http.request(req)\n end\n res\n end",
"def api_execute(path, method, options = {})\n params = options[:params]\n case method\n when :get\n req = build_http_request(Net::HTTP::Get, path, params)\n when :post\n req = build_http_request(Net::HTTP::Post, path, nil, params)\n when :put\n req = build_http_request(Net::HTTP::Put, path, nil, params)\n when :delete\n req = build_http_request(Net::HTTP::Delete, path, params)\n else\n fail \"Unknown http action: #{method}\"\n end\n http = Net::HTTP.new(host, port)\n http.read_timeout = options[:timeout] || read_timeout\n setup_https(http)\n req.basic_auth(user_name, password) if [user_name, password].all?\n Log.debug(\"Invoking: '#{req.class}' against '#{path}\")\n res = http.request(req)\n Log.debug(\"Response code: #{res.code}\")\n Log.debug(\"Response body: #{res.body}\")\n process_http_request(res)\n end",
"def api_execute(path, method, options = {})\n params = options[:params]\n case method\n when :get\n req = build_http_request(Net::HTTP::Get, path, params)\n when :post\n req = build_http_request(Net::HTTP::Post, path, nil, params)\n when :put\n req = build_http_request(Net::HTTP::Put, path, nil, params)\n when :delete\n req = build_http_request(Net::HTTP::Delete, path, params)\n else\n fail \"Unknown http action: #{method}\"\n end\n timeout = options[:timeout] || @read_timeout\n http = Net::HTTP.new(host, port)\n http.read_timeout = timeout\n setup_https(http)\n req.basic_auth(user_name, password) if [user_name, password].all?\n Log.debug(\"Invoking: '#{req.class}' against '#{path}\")\n res = http.request(req)\n Log.debug(\"Response code: #{res.code}\")\n process_http_request(res, req, params)\n end",
"def call(method, params={})\n if using_oauth? && params[:auth]\n params[:format] = 'json'\n params[:nojsoncallback] = '1'\n params.delete :auth\n \n if params[:post]\n params['method'] = method\n rsp = FlickrResponse.new @access_token.post(@rest_endpoint, params, {}).body\n else\n rsp = FlickrResponse.new @access_token.get(method_url(method, params, false)).body\n end\n else\n if params && params[:post]\n rsp = FlickrResponse.new Net::HTTP.post_form(URI.parse(@rest_endpoint), post_params(method, params)).body\n else\n url = method_url(method, params)\n rsp = FlickrResponse.new Net::HTTP.get(URI.parse(url))\n end\n end\n \n if @options[:raise_exception_on_error] && rsp.error?\n raise RuntimeError, rsp\n end\n \n rsp\n end",
"def do_authenticated_request(method, url, options = {})\n begin\n response = case client\n when BasicAuthClient\n self.class.send(method, api_url(url), options.merge(:basic_auth => client.credentials))\n when OauthClient\n # We have to wrap the result of the access_token request into an HTTParty::Response object\n # to keep duck typing with HTTParty\n result = client.send(method, api_url(url), options)\n content_type = Parser.format_from_mimetype(result.content_type)\n parsed_response = Parser.call(result.body, content_type)\n\n HTTParty::Response.new(nil, result, lambda { parsed_response })\n else\n raise CredentialsMissing\n end\n check_response_codes(response)\n response.parsed_response\n rescue Timeout::Error\n raise Unavailable.new('Service Unavailable')\n end\n end",
"def call_url address, method = :get, args = {}, username = \"\", password = \"\", default = \"\", timeout = 30\n begin\n uri = URI.parse address\n http = Net::HTTP.new uri.host, uri.port\n # See http://notetoself.vrensk.com/2008/09/verified-https-in-ruby/.\n if uri.is_a? URI::HTTPS\n http.use_ssl = true\n http.verify_mode = OpenSSL::SSL::VERIFY_NONE\n end\n # Creates an HTTP query based upon the type of method provided.\n case method\n when :get\n uri.query = URI.encode_www_form args.reduce([]) { |k, v| k << v }\n request = Net::HTTP::Get.new uri.request_uri\n when :post\n request = Net::HTTP::Post.new uri.request_uri\n request.set_form_data args\n else\n log_failure \"HTTP method #{method} not supported :`(\"\n return default\n end\n # Applies HTTP Basic authentication to request if desired by user.\n unless [username, password].all? &:empty?\n request.basic_auth username, password\n end\n # Obtains HTTP response, bounded within user-supplied timeout.\n response = Timeout::timeout(timeout) do\n http.request request\n end\n if not response.is_a? Net::HTTPSuccess\n log_failure \"Non-200 response code after #{method}: #{address}, #{args}\"\n log_failure \"Code: #{response.code}, Body:\\n#{response.body}\"\n end\n return response.body, response.code\n rescue Timeout::Error\n log_failure \"Timeout reached during HTTP #{method} to #{address}, #{args}\"\n return default, nil\n # See https://gist.github.com/245188\n rescue StandardError => e\n log_failure \"Error during HTTP #{method} to #{address}, #{args}: #{e}\"\n return default, nil\n end\n end",
"def request(_method, *args)\n catch_error do\n if self.class.interceptor\n self.class.interceptor.call(_method, *args)\n else\n Metaname::OriginalAPI.method_missing(_method, *args)\n end\n end\n end",
"def request(http_method: :get, path:'/', body: nil, query: {}, headers: {}, response_block: nil, expects: [200, 201, 204], host: nil, port: nil, auth: true, gzip: true)\n\n retried ||= false\n\n if auth && token_expired?\n raise Excon::Error::Unauthorized, \"Token expired or not valid, you need to login again, use: kontena #{token_is_for_master? ? \"master\" : \"cloud\"} login\"\n end\n\n request_headers = request_headers(headers, auth: auth, gzip: gzip)\n\n if body.nil?\n body_content = ''\n request_headers.delete(CONTENT_TYPE)\n else\n body_content = encode_body(body, request_headers[CONTENT_TYPE])\n request_headers.merge!('Content-Length' => body_content.bytesize)\n end\n\n uri = URI.parse(path)\n host_options = {}\n\n if uri.host\n host_options[:host] = uri.host\n host_options[:port] = uri.port\n host_options[:scheme] = uri.scheme\n path = uri.request_uri\n else\n host_options[:host] = host if host\n host_options[:port] = port if port\n end\n\n request_options = {\n method: http_method,\n expects: Array(expects),\n path: path_with_prefix(path),\n headers: request_headers,\n body: body_content,\n query: query\n }.merge(host_options)\n\n request_options.merge!(response_block: response_block) if response_block\n\n # Store the response into client.last_response\n @last_response = http_client.request(request_options)\n\n parse_response(@last_response)\n rescue Excon::Error::Unauthorized\n if token\n debug { 'Server reports access token expired' }\n\n if retried || !token || !token['refresh_token']\n raise Kontena::Errors::StandardError.new(401, 'The access token has expired and needs to be refreshed')\n end\n\n retried = true\n retry if refresh_token\n end\n raise Kontena::Errors::StandardError.new(401, 'Unauthorized')\n rescue Excon::Error::HTTPStatus => error\n if error.response.headers['Content-Encoding'] == 'gzip'\n error.response.body = Zlib::GzipReader.new(StringIO.new(error.response.body)).read\n end\n\n debug { \"Request #{error.request[:method].upcase} #{error.request[:path]}: #{error.response.status} #{error.response.reason_phrase}: #{error.response.body}\" }\n\n handle_error_response(error.response)\n end",
"def request(method, *args)\n defaults = {\n :api_key => @api_key,\n :auth_key => @auth\n }\n\n # Get parameters\n if args.last.kind_of? Hash\n params = args.pop\n else\n params = {}\n end\n\n # Construct query fragment\n query = defaults.merge(params).inject('?') { |s, (k, v)|\n if v.kind_of? Array\n v = v.join(',')\n end\n s << \"#{Vodpod::escape(k)}=#{Vodpod::escape(v)}&\"\n }[0..-2]\n\n # Join path fragments\n path = Vodpod::BASE_URI + args.map{|e| Vodpod::escape(e)}.join('/') + '.json'\n\n begin\n # Get URI\n case method\n when :get\n # GET request\n uri = URI.parse(path + query)\n res = Net::HTTP.start(uri.host, uri.port) do |http|\n http.open_timeout = @timeout\n http.read_timeout = @timeout\n http.get(uri.path + query)\n end\n when :post\n # POST request\n uri = URI.parse(path)\n res = Net::HTTP.start(uri.host, uri.port) do |http|\n http.open_timeout = @timeout\n http.read_timeout = @timeout\n http.post(uri.path, query[1..-1])\n end\n else\n # Don't know how to do that kind of request\n raise Error.new(\"Unsupported request method #{method.inspect}; should be one of :get, :post.\")\n end\n rescue => e\n raise Error.new(\"Error retrieving #{uri.path}#{query}: #{e.message}\")\n end\n\n # Parse response as JSON\n begin\n data = JSON.parse res.body\n rescue => e\n raise Error, \"server returned invalid json: #{e.message}\" + \"\\n\\n\" + res\n end\n\n # Check for errors\n if data[0] == false\n raise Error, data[1]['message']\n end\n\n # Return data section\n data[1]\n end",
"def _call\n protocol = Allorails.config.network_protocol\n server = Allorails.config.host\n timeout = Allorails.config.network_timeout.to_f\n\n port = protocol == 'https' ? 443 : 80\n uri = URI(protocol + '://' + server + ApiRequest::API_PATH + _path)\n method = _is_http_post ? 'POST' : 'GET'\n headers = {\n \"Content-Type\" => \"application/x-www-form-urlencoded; charset=utf-8\",\n \"User-Agent\" => \"Allopass-ApiKit-AlloRails\"\n }\n \n # use a proxy?\n use_proxy = false\n http_class = if use_proxy then Net::HTTP::Proxy('127.0.0.1', 9999) else Net::HTTP end\n\n # prepare and send HTTP request\n http_class.start(uri.host, port, :use_ssl => uri.scheme == 'https') do |http|\n \n if method == 'GET'\n uri.query = _encode_parameters\n req = http_class::Get.new uri.request_uri\n else\n #uri.query = _encode_parameters\n req = http_class::Post.new uri.request_uri\n req.body = _encode_parameters\n end \n \n # set headers\n headers.each_pair{|k, v| req[k] = v}\n \n # send the request and see if successful\n case res = http.request(req)\n when Net::HTTPSuccess then return [res.to_hash, res.body]\n else raise Allorails::ApiUnavailableResourceError, \"Request failed: #{res.body}\"\n end\n end\n end",
"def call()\ntestconnection = SimpleHTTP.new()\n response = testconnection.request() #pulls back your status code - 200 is success, 401 = access denied which means you are calling correct\n #base url, but something could be wrong with credentials or path.\n puts response\n end",
"def perform_request(method, path, params={}, body=nil, headers=nil)\n client.perform_request method, path, params, body, headers\n end",
"def call(method, params=nil)\n if params && params[:post]\n rsp = FlickrResponse.new Net::HTTP.post_form(URI.parse(@rest_endpoint), post_params(method, params)).body\n else\n url = method_url(method, params)\n rsp = FlickrResponse.new Net::HTTP.get(URI.parse(url))\n end\n \n if @options[:raise_exception_on_error] && rsp.error?\n raise RuntimeError, rsp\n end\n \n rsp\n end",
"def call(method, *arguments)\n raw_call(method.to_s, @apikey, *arguments)\n end",
"def send_request(api_method, http_method = 'GET', data = nil)\n if data.is_a?(Hash)\n data['api_username'] = @api_username\n data['api_password'] = @api_password\n data['api_key'] = @api_key\n\n post_data = data.map{|k,v| \"#{CGI::escape(k)}=#{CGI::escape(v)}\"}.join('&')\n else\n post_data = sprintf(\n \"api_username=%s&api_password=%s&api_key=%s&%s\",\n CGI::escape(@api_username),\n CGI::escape(@api_password),\n CGI::escape(@api_key),\n data\n )\n end\n\n uri = URI.parse(\"#{@api_url}/v#{@version}/client#{api_method}\")\n http = Net::HTTP.new(uri.host, uri.port)\n http.use_ssl = true\n http.verify_mode = OpenSSL::SSL::VERIFY_NONE\n\n request = http_method == 'GET' ? Net::HTTP::Get.new(uri.request_uri) : Net::HTTP::Post.new(uri.request_uri)\n request.body = !data.nil? ? post_data : ''\n\n request.initialize_http_header({\n \"User-Agent\" => \"abenity/abenity-ruby v1\"\n })\n\n response = http.request(request)\n\n return parse_response(response.body)\n end",
"def do_request(method, uripath, options = {})\n\n req = nil\n\n case method\n when 'GET'\n req = Net::HTTP::Get.new(get_request_path(uripath))\n when 'PUT'\n req = Net::HTTP::Put.new(get_request_path(uripath))\n when 'POST'\n req = Net::HTTP::Post.new(get_request_path(uripath))\n when 'DELETE'\n req = Net::HTTP::Delete.new(get_request_path(uripath))\n else\n error(nil, \"Unrecognized HTTP method #{method}\")\n end\n\n if options[:body]\n req.body = options[:body]\n end\n\n get_request_headers(options).each { |h,v| req.add_field(h, v) }\n resp = @http.request(req)\n [Integer(resp.code), resp.message, resp, resp.body]\n end",
"def request(method, path, opts)\n response = connection.send(method) do |request|\n request.url(path, opts)\n request.options[:timeout] = timeout\n request.options[:open_timeout] = open_timeout\n\n unless client_id.nil?\n request.headers['Authorization'] = \"Client #{client_id}\"\n end\n end\n\n response.body\n end",
"def perform_request(request_method, path, options = {})\n Teamsupport::REST::Request.new(self, request_method, path, options).perform\n end",
"def perform_request(method, url, params={})\n begin\n response = case method\n when :get\n get(url)\n when :post\n post(url, params)\n when :put\n put(url, params)\n when :delete\n delete(url)\n end\n parse_response(response)\n rescue Exception => e\n raise ClickSendError, e.message\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
create command for add helm repository ===============================================================
|
def do_add_repo(repository,mirror_enabled,mirror_url,mirror_ca_cert)
name = repository['name']
url = repository['url']
if !mirror_enabled
cmd = "helm repo add #{name} #{url} "
else
if mirror_ca_cert == ""
cmd = "helm repo add #{name} #{mirror_url} "
else
cmd = "helm repo add --ca-file=/var/vcap/store/action/config/mirror_ca_cert.pem #{name} #{mirror_url} "
end
end
"#{cmd};helm repo update"
end
|
[
"def add_repository(definition)\n definition = URI.parse(definition)\n commands = []\n\n if definition.scheme =~ /^(http|ftp|file)/\n if File.extname(definition.path) == '.ps1'\n commands << %(powershell.exe -NoProfile -ExecutionPolicy Bypass -Command 'iex ((new-object net.webclient).DownloadString(\\\"#{definition}\\\"))')\n else\n commands << %(C:/ProgramData/chocolatey/bin/choco.exe source add -n #{definition.host}-#{definition.path.gsub('/', '-')} -s \"#{definition}\" --debug || echo \"Oops, it seems that you don't have chocolatey installed on this system. Please ensure it's there by adding something like 'plat.add_repository 'https://chocolatey.org/install.ps1'' to your platform definition.\")\n end\n else\n raise Vanagon::Error, \"Invalid repo specification #{definition}\"\n end\n\n commands\n end",
"def add_repository(definition)\n definition = URI.parse(definition)\n commands = []\n\n if definition.scheme =~ /^(http|ftp|file)/\n if File.extname(definition.path) == '.ps1'\n commands << %(powershell.exe -NoProfile -ExecutionPolicy Bypass -Command '[System.Net.ServicePointManager]::SecurityProtocol = [System.Net.SecurityProtocolType]::Tls12;iex ((new-object net.webclient).DownloadString(\"#{definition}\"))')\n else\n commands << %(C:/ProgramData/chocolatey/bin/choco.exe source add -n #{definition.host}-#{definition.path.tr('/', '-')} -s \"#{definition}\" --debug || echo \"Oops, it seems that you don't have chocolatey installed on this system. Please ensure it's there by adding something like 'plat.add_repository 'https://chocolatey.org/install.ps1'' to your platform definition.\")\n end\n else\n raise Vanagon::Error, \"Invalid repo specification #{definition}\"\n end\n\n commands\n end",
"def install_command\n command = ['helm', 'upgrade', name, chart] +\n install_flag +\n reset_values_flag +\n optional_tls_flags +\n optional_version_flag +\n rbac_create_flag +\n namespace_flag +\n value_flag\n\n command.shelljoin\n end",
"def create\n display 'Create a new Unfuddle repository'\n \n repo = Unfuddle::Resources::Repository.create(\n :repository => {\n :title => ask('Name', :required => true),\n :abbreviation => ask('Slug', :required => true),\n :system => 'git',\n :projects => {\n :project => {\n :id => 24468 \n }\n }\n }\n )\n end",
"def add(options = {})\n run build_command('addrepo', options)\n end",
"def create(project_name, repo_name, description) ; end",
"def add_repository(name)\n system(\"#{gitlab_shell_user_home}/gitlab-shell/bin/gitlab-projects add-project #{name}.git\")\n end",
"def yum_add_repo(transport, url)\n transport.exec(\"yum-config-manager #{quiet} --add-repo #{url}\", sudo: true)\n end",
"def install_repo!\n package 'apt-transport-https'\n include_recipe \"apt-chef::#{new_resource.channel}\"\n package 'chefdk' do\n version new_resource.version unless new_resource.version == 'latest'\n end\n end",
"def add_command(*args)\n case args.size\n when 1 # Hash initializer\n args = args[0]\n cat = args[:category] ? args[:category] : :main\n content = {\n :name => args[:name],\n :symbol => args[:symbol],\n :enabled => args[:enabled].nil? ? true : args[:enabled],\n :ext => args[:ext],\n :help => args[:help],\n :child => args[:child],\n }\n else\n name = args[0]; symbol = args[1]; \n enabled = args[2].nil? ? true : args[2];\n ext = args[3]\n help = args[4]\n cat = args[5].nil? ? :main : args[5];\n child = args[6]\n content = {:name=>name, :symbol=>symbol, :enabled => enabled,\n :ext=>ext, :help => help, :child => child}\n end\n (@list[cat] ||= []) << content\n end",
"def git_add\n `git add package.json npm-shrinkwrap.json`\n end",
"def install_repo!\n include_recipe \"yum-chef::#{new_resource.channel}\"\n package 'chefdk' do\n version new_resource.version unless new_resource.version == 'latest'\n end\n end",
"def create\n response.add(exec: \"kubectl create ns #{namespace}\", env: kube_env)\n response.add(exec: \"kubectl label namespace #{namespace} istio-injection=enabled --overwrite\", env: kube_env)\n # errors.add(:kubectl_label_namespace, stderr) if exit_code.positive? and stderr.index('AlreadyExists').nil?\n\n # deploy helm into namespace\n # TODO: Need to generate tiller-rbac\n response.add(exec: kubectl(\"apply -f #{target.write_path}/tiller-k8s-ns.yml\"), env: kube_env)\n response.add(exec: 'helm init --upgrade --wait --service-account tiller', env: kube_env)\n end",
"def add_repo (repository_name)\n \"add-apt-repository -y #{repository_name} >/dev/null 2>/dev/null\"\n end",
"def install_command\n command = \"Install-Module #{@resource[:name]} -Scope AllUsers -Force\"\n command << \" -RequiredVersion #{@resource[:ensure]}\" unless [:present, :latest].include? @resource[:ensure]\n command << \" -Repository #{@resource[:source]}\" if @resource[:source]\n command << \" #{install_options(@resource[:install_options])}\" if @resource[:install_options]\n command\n end",
"def add_repository_to_app_installation(installation, repo, options = T.unsafe(nil)); end",
"def add_repository!\n apt_repository 'partner' do\n uri 'http://archive.canonical.com'\n components %w(partner)\n distribution node['lsb']['codename']\n action :add\n end\n end",
"def create\n svncmd \"checkout\", resource[:name], resource[:path]\n #svncmd \"checkout\", resource[:source], resource[:path]\n #svncmd \"checkout\", <my_repo_src>, <send_my_repo_here>\n end",
"def create_repository_from_template(repo, name, options = T.unsafe(nil)); end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
note An instance of `Gitlab::GithubImport::Representation::Note`. project An instance of `Project`. client An instance of `Gitlab::GithubImport::Client`.
|
def initialize(note, project, client)
@note = note
@project = project
@client = client
@user_finder = GithubImport::UserFinder.new(project, client)
end
|
[
"def modify_note(project_id, issue_id, note_id, content, access_token)\n rest_client.put(\"/projects/#{project_id}/issues/#{issue_id}/notes/#{note_id}\", { verify: false, body: { body: content, access_token: access_token } })\n end",
"def create\n @project_note = ProjectNote.new(project_note_params(params[:project_note]))\n\n if @project_note.save\n render json: @project_note, status: :created, location: @project_note\n else\n render json: @project_note.errors, status: :unprocessable_entity\n end\n end",
"def create\n @project_note = ProjectNote.new(project_note_params)\n\n respond_to do |format|\n if @project_note.save\n format.html { redirect_to @project_note, notice: 'Project note was successfully created.' }\n format.json { render :show, status: :created, location: @project_note }\n else\n format.html { render :new }\n format.json { render json: @project_note.errors, status: :unprocessable_entity }\n end\n end\n end",
"def mention_note(project_id, resource_id, note_id)\n get \"projects/#{project_id}/data/mentions/#{resource_id}/notes/#{note_id}\"\n end",
"def note\n DBC.require( bu?, \"Vin non bu: La note n'est pas definie\" )\n\n @note\n end",
"def add_note(note)\n response = self.class.put(url(self.id), :query => {:notes => note}, :body => \"\") # nginx reject requests without body\n raise response.response.to_s unless response.code == 200\n\n puts green(\"Note added to Issue ##{self.id}: #{note}\")\n end",
"def note note, preview=nil\n preview ||= note[0..64]\n params = {\n contact_ids: [ self.id ],\n note: note,\n note_preview: preview\n }\n @nimble.post \"contacts/notes\", params\n end",
"def initialize(issue, project, client)\n @issue = issue\n @project = project\n @client = client\n end",
"def issue_note(project, issue, id)\n get(\"/projects/#{url_encode project}/issues/#{issue}/notes/#{id}\")\n end",
"def create_note(title,contents)\n Dropio::Client.instance.create_note(self, title, contents)\n end",
"def new_note(note)\n if note.notify\n users = note.project.users.reject { |u| u.id == note.author.id }\n\n # Note: wall posts are not \"attached\" to anything, so fall back to \"Wall\"\n noteable_type = note.noteable_type.presence || \"Wall\"\n notify_method = \"note_#{noteable_type.underscore}_email\".to_sym\n\n if Notify.respond_to? notify_method\n team_without_note_author(note).map do |u|\n Notify.delay.send(notify_method, u.id, note.id)\n end\n end\n elsif note.notify_author && note.commit_author\n Notify.delay.note_commit_email(note.commit_author.id, note.id)\n end\n end",
"def create_issue_note(project, issue, body)\n post(\"/projects/#{url_encode project}/issues/#{issue}/notes\", body: { body: body })\n end",
"def create_snippet_note(project, snippet, body)\n post(\"/projects/#{url_encode project}/snippets/#{snippet}/notes\", body: { body: body })\n end",
"def patch_mention_note(project_id, resource_id, note_id, opts = {})\n patch \"projects/#{project_id}/data/mentions/#{resource_id}/notes/#{note_id}\", opts\n end",
"def note\n value(\"NOTE\")\n end",
"def note\n value('NOTE')\n end",
"def update_client_id_for_note\n self.client_id = Client.current_client_id\n end",
"def create_note(title,contents)\n Resource.client.create_note(self, title, contents)\n end",
"def reference_note\n self\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
end test Test that the graph class will not let you to_s on an incomplete graph
|
def test_graph_to_s_incomplete
sut_graph = Graph.new
sut_graph.name="test_graph"
#sut_graph.type=:digraph
sut_graph.node_style=:ellipse
sut_graph.add_edge("TEST1" , "TEST2" , "take_me_to_test_2")
assert_raises RuntimeError do
returned_obj = sut_graph.to_s
end # end assert
end
|
[
"def test_to_s_empty_graph\n graph = Graph.new\n\n assert(graph.to_s == '')\n end",
"def test_to_s\n graph = DirectedGraph.new\n vertex_a = Vertex.new('a')\n vertex_b = Vertex.new('b')\n vertex_c = Vertex.new('c')\n graph.add_vertex(vertex_a).add_vertex(vertex_b).add_vertex(vertex_c)\n graph.add_edge('a','b').add_edge('c','b')\n\n assert(graph.to_s == 'a=>b,b=>,c=>b')\n end",
"def test_to_s_only_vertices\n graph = Graph.new\n vertex_a = Vertex.new('a')\n vertex_b = Vertex.new('b')\n vertex_c = Vertex.new('c')\n graph.add_vertex(vertex_a).add_vertex(vertex_b).add_vertex(vertex_c)\n\n assert(graph.to_s == 'a=>,b=>,c=>')\n end",
"def test_to_s\n graph = Graph.new\n vertex_a = Vertex.new('a')\n vertex_b = Vertex.new('b')\n vertex_c = Vertex.new('c')\n graph.add_vertex(vertex_a).add_vertex(vertex_b).add_vertex(vertex_c)\n graph.add_edge('a','b').add_edge('c','b')\n\n assert(graph.to_s == 'a=>b,b=>a,b=>c,c=>b')\n end",
"def test_build_from_hash\n graph = DirectedGraph.new\n graph.build({'a'=>nil,'b'=>'c','c'=>nil})\n\n assert(graph.to_s == 'a=>,b=>c,c=>')\n end",
"def to_s() edges.to_s; end",
"def test_dump_invalid\n graph = Construqt::Graph.build_interface_graph_from_host(invalid_graph)\n assert(!graph, \"should be nil\")\n end",
"def test_build_from_string\n graph = DirectedGraph.new\n graph.build('a=>,b=>c,c=>')\n\n assert(graph.to_s == 'a=>,b=>c,c=>')\n end",
"def test_print_empty\n assert_output(\"Empty graph!\\n\") { @graph.print }\n end",
"def test_new_graph_not_nil\n refute_nil @g\n assert_kind_of Graph, @g\n end",
"def test_new_graph_not_nil\n refute_nil(@graph)\n assert_kind_of(Graph, @graph)\n end",
"def test_print_empty\n assert_output(/Empty graph!/) { @g.print }\n end",
"def test_build_from_hash\n graph = Graph.new\n graph.build({'a'=>'b','c'=>'b'})\n\n assert(graph.to_s == 'a=>b,b=>a,b=>c,c=>b')\n end",
"def graphobj?; false end",
"def test_has_edges_when_empty\n graph = Graph.new\n\n assert(graph.has_edges() == false)\n end",
"def graph_to_s\n string = \"\"\n @nodes.each do |name, node|\n string +=\"#{name}:\\n\\t(#{node.name}, #{node.country}, #{node.continent} #{node.timezone}, #{node.coords}, #{node.pop}, #{node.region}) => #{@edges[name]} \\n\"\n end\n string\n end",
"def test_build_from_string\n graph = Graph.new\n graph.build('a=>b,c=>b')\n\n assert(graph.to_s == 'a=>b,b=>a,b=>c,c=>b')\n end",
"def test_simple_graph\n assert_equal 6, @g.subgraph_with('0').size\n end",
"def test_has_edge_no_edge\n assert(@graph.has_edge('b', 'd') == false)\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Stubs SOAP requests to a given soap_action.
|
def stubs(soap_action)
setup :stubs, soap_action
self
end
|
[
"def dispatch(soap_action)\n response = @request.soap @soap\n Response.new response\n end",
"def execute_soap_request(action, args)\n original_action_name =\n get_service_registry.get_method_signature(action)[:original_name]\n original_action_name = action if original_action_name.nil?\n response = @client.request(original_action_name) do |soap|\n set_headers(soap, args)\n end\n return response\n end",
"def setup(soap_action, &block)\n @soap = SOAP.new @wsdl.soap_actions[soap_action]\n @wsse = WSSE.new\n\n yield_parameters &block if block\n\n @soap.namespaces[\"xmlns:wsdl\"] = @wsdl.namespace_uri\n @soap.wsse = @wsse\n end",
"def soap\n raise ArgumentError, \"Expected to be called in a block passed to #request\" unless @soap\n @soap\n end",
"def define_class_action(action)\n class_action_module.module_eval %{\n def #{action.to_s.snakecase}(body = nil, &block)\n response = client.request :wsdl, #{action.inspect}, :body => body, &block\n Savon.hooks.select(:model_soap_response).call(response) || response\n end\n }\n end",
"def execute_soap_request(action, args, extra_namespaces)\n original_action_name =\n get_service_registry.get_method_signature(action)[:original_name]\n original_action_name = action if original_action_name.nil?\n response = @client.request(original_action_name) do |soap|\n set_headers(soap, args, extra_namespaces)\n end\n return response\n end",
"def set_soap_action(input)\n soap_action = wsdl.soap_action input.to_sym\n soap_action ||= Gyoku::XMLKey.create(input).to_sym\n http.headers[\"SOAPAction\"] = %{\"#{soap_action}\"}\n end",
"def soap_action(action, options={})\n if options[:as].present?\n options[:to] ||= action\n action = options[:as]\n end\n\n if action.is_a?(Symbol)\n if soap_config.camelize_wsdl.to_s == 'lower'\n options[:to] ||= action.to_s\n action = action.to_s.camelize(:lower)\n elsif soap_config.camelize_wsdl\n options[:to] ||= action.to_s\n action = action.to_s.camelize\n end\n\n end\n\n default_response_tag = soap_config.camelize_wsdl ? 'Response' : '_response'\n default_response_tag = action+default_response_tag\n\n\n self.soap_actions[action] = options.merge(\n :in => WashOut::Param.parse_def(soap_config, options[:args]),\n :request_tag => options[:as] || action,\n :out => WashOut::Param.parse_def(soap_config, options[:return]),\n :header_out => options[:header_return].present? ? WashOut::Param.parse_def(soap_config, options[:header_return]) : nil,\n :to => options[:to] || action,\n :response_tag => options[:response_tag] || default_response_tag\n )\n end",
"def define_wcf_action(action)\n\t\t \tself.class.send(:define_method ,action.name) do |data=nil,*args|\n\t\t\t\tbody = build_body(action, data)\n\n\t\t\t\tres = send_wcf_action(action.soap_action,body,*args)\n\t\t\t\t(@cookies << res.headers[\"Set-Cookie\"]) if @save_cookeis\n\n\t\t\t\tresult = get_wcf_response(res,action.name)\n\t\t\t\tres.singleton_class.send(:define_method,:result) do\n\t\t\t\t\tresult\n\t\t\t\tend\n\t\t\t\tres\n\t\t\tend\n\t\tend",
"def execute_soap_request(action, args, extra_namespaces)\n original_action_name =\n get_service_registry.get_method_signature(action)[:original_name]\n original_action_name = action if original_action_name.nil?\n response = @client.request(original_action_name) do |soap, wsdl, http|\n soap.body = args\n header_handler.prepare_request(http, soap)\n soap.namespaces.merge!(extra_namespaces) unless extra_namespaces.nil?\n end\n return response\n end",
"def soap_action(action, options={})\n exposed_name = if action.is_a?(Symbol)\n WashOut.normalize(action, soap_config) \n else\n action.to_s\n end\n\n self.soap_actions[exposed_name] = options.merge(\n in: WashOut::Param.parse_def(soap_config, options[:args]),\n out: WashOut::Param.parse_def(soap_config, options[:return]),\n to: options[:to] || action.to_s,\n response_tag: options[:response_tag]\n )\n end",
"def setup(request, soap)\n request.url = soap.endpoint\n request.headers[\"Content-Type\"] ||= ContentType[soap.version]\n request.body = soap.to_xml\n request\n end",
"def soap(soap)\n @soap = soap\n http.endpoint @soap.endpoint.host, @soap.endpoint.port\n http.use_ssl = @soap.endpoint.ssl?\n\n log_request\n @response = http.start do |h|\n h.request request(:soap) { |request| request.body = @soap.to_xml }\n end\n log_response\n @response\n end",
"def call(soap_action, data = nil, cookies = nil)\n response = request(soap_action, data, cookies)\n\n if block_given?\n yield response\n else\n extract_result_from_response(response, soap_action)\n end\n end",
"def soap_action(operation)\n operation\n end",
"def set_soap_header\n if request.env['HTTP_SOAPACTION'].blank? || request.env['HTTP_SOAPACTION'] == %Q(\"\")\n xml = REXML::Document.new(request.raw_post)\n element = REXML::XPath.first(xml, '/soap:Envelope/soap:Body/*')\n request.env['HTTP_SOAPACTION'] = element.name if element\n end\n end",
"def set_soap_header\n if request.env['HTTP_SOAPACTION'].blank? || request.env['HTTP_SOAPACTION'] == %Q(\"\")\n xml = REXML::Document.new(request.raw_post)\n element = REXML::XPath.first(xml, '/soap:Envelope/soap:Body/*')\n request.env['HTTP_SOAPACTION'] = element.name if element\n end\n end",
"def soap_request(options = {})\n options = { error_response_codes: [404] }.merge options\n\n headers = { 'Content-Type' => CONTENT_TYPES[version] }\n if action = options.delete(:action)\n headers['SOAPAction'] = action\n elsif operation = options.delete(:operation)\n headers['SOAPAction'] = soap_action operation\n end\n\n if version == 1 && headers['SOAPAction'].blank?\n raise 'SOAPAction header value must be provided for SOAP 1.1'\n end\n\n request_options = { format: :xml, headers: headers }\n Response.new(version, perform_request(:post, endpoint, options.merge(request_options)), fault_builder).tap do |response|\n raise response.fault if response.fault?\n raise response.error if response.error?\n end\n end",
"def commit(soap_action, request)\n url = url_for(soap_action)\n headers = {\n 'Content-Type' => 'application/soap+xml; charset=utf-8',\n 'Content-Length' => request.size.to_s\n }\n response = parse(ssl_post(url, request, headers))\n Response.new(\n success?(response),\n message_from(response),\n response,\n test: test?,\n authorization: build_authorization(response)\n )\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Expects a given SOAP body Hash to be used.
|
def with(soap_body)
Savon::SOAP::XML.any_instance.expects(:body=).with(soap_body) if mock_method == :expects
self
end
|
[
"def to_hash\n @hash ||= (Crack::XML.parse(body) rescue {}).find_soap_body\n end",
"def find_soap_body\n envelope = self[self.keys.first] || {}\n body_key = envelope.keys.find { |key| /.+:Body/ =~ key } rescue nil\n body_key ? envelope[body_key].map_soap_response : {}\n end",
"def find_soap_body\n find_soap_element /.+:Body/\n end",
"def hash_body\n if hash_body?\n @attributes['authorization']['oauth_body_hash'] = body_hash\n end\n end",
"def sha1_body_hash\n digest_body_hash(OpenSSL::Digest::SHA1)\n end",
"def validate_body(body, request_headers)\n signature = get_header_value(request_headers, HEADER_SIGNATURE)\n key_id = get_header_value(request_headers, HEADER_KEY_ID)\n secret_key = @secret_key_store.get_secret_key(key_id)\n digest = OpenSSL::Digest.new(HMAC_SCHEME)\n hmac = OpenSSL::HMAC.digest(digest, secret_key, body)\n expected_signature = Base64.strict_encode64(hmac).strip\n\n unless equal_signatures?(signature, expected_signature)\n msg = \"failed to validate signature '#{signature}'\"\n raise SignatureValidationException.new(message: msg)\n end\n end",
"def build_hash_for(body)\n hasher = OpenSSL::Digest.new('sha256')\n hasher.hexdigest(body)\n end",
"def hash_body\n @hash_body ||= Oj.load(json_body)\n end",
"def hash_body?\n BODY_HASH_METHODS.key?(signature_method) && !form_encoded? &&\n (@attributes.key?('hash_body?') ? @attributes['hash_body?'] : true)\n end",
"def check_from_hash(hash)\n d = @data_object\n \n d.pay_paying_check_doc_ref = hash['pay_paying_check_doc_ref']\n d.pay_paying_check_payor = hash['pay_paying_check_payor']\n d.pay_paying_check_our_ref = hash['pay_paying_check_our_ref']\n end",
"def get_message_from_thrift_binary(body)\n binary_string = Base64.decode64(body)\n rmessage = nil\n @@deserializer_lock.synchronize do\n rmessage = @@deserializer.deserialize(Messagebus::Thrift::MessageInternal.new, binary_string)\n end\n Messagebus::Message.create_message_from_message_internal(rmessage)\n end",
"def assert_hash!(hash)\n unless hash.kind_of?(Hash)\n raise \"Passed parameter must be of type Hash, got: #{hash}\"\n end\n end",
"def from_hash(hash); end",
"def sha256_body_hash\n digest_body_hash(OpenSSL::Digest::SHA256)\n end",
"def transaction_for_hash(hash)\n raise \"Not Implemented\"\n end",
"def check_signature(body)\n received_signature = request.env['HTTP_X_HUB_SIGNATURE'] || ''\n signature = 'sha1=' + hmac_sha1(settings.github_secret, body)\n\n if !Rack::Utils.secure_compare(signature, received_signature)\n build_failed('signature mismatch')\n end\nend",
"def validate_update(body, headers)\n unless client_secret\n raise ArgumentError, \"client_secret must be set during configure\"\n end\n\n if request_signature = headers[\"X-Hub-Signature\"] || headers[\"HTTP_X_HUB_SIGNATURE\"]\n calculated_signature = OpenSSL::HMAC.hexdigest(\"sha1\", client_secret, body)\n calculated_signature == request_signature\n end\n end",
"def parse_body(body)\n @body = body\n \n if @headers[:content_type] =~ MULTIPART_FORM_DATA_RE\n parse_multi_part(body, BOUNDARY_FIX + $1) # body.dup so we keep the original request body?\n elsif content_type == FORM_URL_ENCODED\n parse_form_url_encoded(body)\n end\n end",
"def test_body_with_binary(body, opts = {})\n test_body_with_binary_with_http_info(body, opts)\n nil\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Sets up Savon to respond like there was a SOAP fault.
|
def raises_soap_fault
Savon::SOAP::Response.any_instance.expects(:soap_fault?).returns(true)
self
end
|
[
"def handle_soap_fault\n if soap_fault_message\n @soap_fault = soap_fault_message\n raise Savon::SOAPFault, @soap_fault if self.class.raise_errors?\n end\n end",
"def handle_fault(request, response)\n xml = Nokogiri::XML(response.body)\n msg = xml.xpath('/soap:Envelope/soap:Body/soap:Fault/soap:Reason/soap:Text').text\n raise SoapFault.new(\"Error from server: #{msg}\", request, response)\n end",
"def on_missing_document(soap_response)\n raise \"The response is not a valid SOAP envelope\"\n end",
"def halt\n\t\t\tthrow :halt_this_responder\n\t\tend",
"def create_fault_response(e)\n env = SOAPEnvelope.new(SOAPHeader.new, SOAPBody.new(fault(e, nil), true))\n opt = {}\n opt[:external_content] = nil\n @filterchain.reverse_each do |filter|\n env = filter.on_outbound(env, opt)\n break unless env\n end\n response_string = Processor.marshal(env, opt)\n conn_data = StreamHandler::ConnectionData.new(response_string)\n conn_data.is_fault = true\n if ext = opt[:external_content]\n mimeize(conn_data, ext)\n end\n conn_data\n end",
"def setup_response\n begin\n @response = @interceptor.intercept(request: @request) # TODO: ignored_routes, matched_routes, robots_json, & check_static_files options\n rescue SnapSearch::Exception => exception\n @config.on_exception.nil? ? raise(exception) : @config.on_exception.call(exception)\n end\n end",
"def internal_error\n self.status = 500\n self.headers = {}\n self.content = ['Internal error']\n self\n end",
"def respond_with_500\n [500, { 'Content-Type' => DEFAULT_CONTENT_TYPE }, ['Internal server error']]\n end",
"def send_bad_request_response\n\t\tthrow :halt, [ 400, 'Bad Request' ]\n\tend",
"def render_soap_error(message, code=nil)\n render :template => \"wash_out/#{soap_config.wsdl_style}/error\", :status => 500,\n :layout => false,\n :locals => { :error_message => message, :error_code => (code || 'Server') },\n :content_type => 'text/xml'\n end",
"def error!(status, message)\n request.halt status, {error: message}.to_json\n end",
"def render_soap_error(message, code=nil)\n render :template => \"wash_out/#{soap_config.wsdl_style}/error\", :status => 500,\n :layout => false,\n :locals => { :error_message => message, :error_code => (code || 'Server') },\n :content_type => 'text/xml'\n end",
"def render_soap_error(message, options = {})\n @namespace = NAMESPACE\n soap_error_response = render_to_string :template => 'wash_with_soap/error', :status => 500,\n :locals => { :error_message => message }\n\n if options[:ws_security] == \"encrypt\" || options[:ws_security] == \"sign\" || options[:ws_security] == \"sign_encrypt\"\n soap_error_response = ws_security_apply(soap_error_response, options)\n end\n\n render :xml => soap_error_response\n end",
"def response!\n return response if !response.errors?\n\n raise response.to_exception\n end",
"def set_soap_headers\n self.headers['Content-Type'] = \"text/xml;charset=utf-8\"\n self.headers['SOAPAction'] = \"\\\"\\\"\"\n return \"headers set to soap\"\n end",
"def serialize_soap_fault(ex)\n logger.debug \"Serializing SOAP Fault: #{ex.inspect}\"\n\n xb = Builder::XmlMarkup.new\n xb.instruct!\n\n xb.tag! 'e:Envelope', 'xmlns:e' => soap_env_ns do |xenv|\n xenv.tag! 'e:Body' do |xbody|\n xbody.tag! 'e:Fault' do |xf|\n case soap_version\n when :soap11\n xf.faultcode \"e:Server.#{ex.class.name}\"\n xf.faultstring ex.message\n when :soap12\n xf.tag! 'e:Code' do |xcode|\n xcode.tag! 'e:Value', 'e:Receiver'\n xcode.tag! 'e:Subcode' do |xsubcode|\n xsubcode.tag! 'e:Value', ex.class.name\n end\n end\n xf.tag! 'e:Reason', ex.message\n else\n raise \"Unsupported SOAP version: #{soap_version}\"\n end\n end\n end\n end\n end",
"def generate_soap_fault(endpoint)\n puts \"checking endpoint #{endpoint}\"\n status = {:action => 'soap_fault'}\n data = %x[curl --max-time 20 --header \"Content-Type: text/xml\" --data \"<?xml version=\"1.0\"?>\" #{endpoint}]\n \n pieces = data.split\n if pieces[0] == '<?xml'\n status.merge!({:result=> 0, :message => data})\n else\n status.merge!({:result=> 1, :message => data})\n end\n return status\n end",
"def customize_response\n cbs = ExceptionHandler.config.customize_response_by_status\n # could also add support for customizations by exception type\n\n if (cbs.kind_of? Hash) && (cbs[@status].kind_of? Hash)\n customize_response_with(cbs[@status])\n end\n end",
"def validate_response!\n if error = FrOData::Errors::ERROR_MAP[status]\n raise error.new(response, error_message)\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Wrap tests that use Mocha and skip if unavailable.
|
def uses_mocha(test_name)
require 'mocha' unless Object.const_defined?(:Mocha)
rescue LoadError => load_error
$stderr.puts "Skipping #{test_name} tests. `gem install mocha` and try again."
else
yield
end
|
[
"def uses_mocha(test_name)\n require 'mocha'\n yield\nrescue LoadError\n $stderr.puts \"Skipping #{test_name} tests. `gem install mocha` and try again.\"\nend",
"def uses_mocha(test_name)\n require 'mocha' unless Object.const_defined?(:Mocha)\n yield\nrescue LoadError => load_error\n raise unless load_error.message =~ /mocha/i\n $stderr.puts \"Skipping #{test_name} tests. `gem install mocha` and try again.\"\nend",
"def skip\n 'skip spec until we find good way to implement it'.should.not.be.nil\n end",
"def wrapper\n setup if respond_to?(:setup)\n yield ? true : false\n rescue Skip\n nil\n rescue Exception => ex\n ex.class == AssertionError ? false : ex\n ensure\n teardown if respond_to?(:teardown)\n end",
"def ignore_tests\n nil\n end",
"def mocha_teardown; end",
"def skip_unless_proxy_agent(tests)\n msg = \"Skipping all tests; '#{tests[:resource_name]}' \"\\\n '(or test file) is not supported without a proxy agent'\n banner = '#' * msg.length\n raise_skip_exception(\"\\n#{banner}\\n#{msg}\\n#{banner}\\n\", self) unless proxy_agent\n end",
"def exit_skipping_test()\n Process.exit 99\nend",
"def make_parallel_tests_available\n return if defined?(ParallelTests)\n return unless probably_running_parallel_tests?\n\n require \"parallel_tests\"\n rescue LoadError\n warn(\"SimpleCov guessed you were running inside parallel tests but couldn't load it. Please file a bug report with us!\")\n end",
"def test_ruby19_minitest_mocking_required\n setup_test('Test', 'minitest/unit')\n result = turn 'tmp/test.rb'\n assert result.index('PASS')\n end",
"def run\n skip_message = nil\n DemoLogger.set_log_file self.location + '.log'\n DemoLogger.log.info(\"----------------------- Minitest - starting setup for test '#{self.location}' -----------------------\")\n if self.class.annotations && self.class.annotations[self.location[self.location.index('#') + 1, self.location.length].to_sym]\n known = self.class.annotations[self.location[self.location.index('#') + 1, self.location.length].to_sym][:known_issue]\n if known != nil && known != ''\n skip_message = known_issue known\n end\n if skip_message\n with_info_handler do\n time_it do\n capture_exceptions do\n skip(skip_message)\n end\n end\n end\n end\n end\n super if skip_message.nil?\n self\n end",
"def uses_shoulda(&blk)\n begin\n require 'rubygems'\n require 'shoulda'\n yield\n rescue Gem::LoadError\n $stderr.puts \"Sorry, you need to install shoulda to run these tests: `gem install shoulda`\"\n end\nend",
"def skip_unless_supported(tests)\n pattern = tests[:platform]\n agent_only = tests[:agent_only] | false\n if agent_only && agent.nil?\n msg = \"Skipping all tests; '#{tests[:resource_name]}' \"\\\n '(or test file) is not supported agentlessly'\n banner = '#' * msg.length\n raise_skip_exception(\"\\n#{banner}\\n#{msg}\\n#{banner}\\n\", self)\n end\n return false if pattern.nil? || platform.match(tests[:platform])\n msg = \"Skipping all tests; '#{tests[:resource_name]}' \"\\\n '(or test file) is not supported on this node'\n banner = '#' * msg.length\n raise_skip_exception(\"\\n#{banner}\\n#{msg}\\n#{banner}\\n\", self)\n end",
"def pass(suite, test, test_runner); end",
"def skip_a_test_that(description=nil,&block)\n description = make_up_name(block) if description.nil?\n STDERR.puts \"warning: test 'test that #{description}' is being skipped\" unless $FOR_TESTING_ONLY_SKIP_STDERR\n end",
"def fail_if_no_examples; end",
"def mocha_teardown\n Mockery.teardown\n end",
"def puke(klass, meth, err)\n case err\n when MiniTest::Skip\n @turn_test.skip!(err)\n turn_reporter.skip(err)\n when MiniTest::Assertion\n @turn_test.fail!(err)\n turn_reporter.fail(err)\n else\n @turn_test.error!(err)\n turn_reporter.error(err)\n end\n super(klass, meth, err)\n end",
"def build_test\n \n return if skip_method(__method__)\n \n build_test_fixtures\n build_test_helper\n build_functional_test\n build_integration_test\n build_unit_test\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
lexer rules lexer rule t__6! (T__6) (in CSV.g)
|
def t__6!
# -> uncomment the next line to manually enable rule tracing
# trace_in( __method__, 1 )
type = T__6
channel = ANTLR3::DEFAULT_CHANNEL
# - - - - main rule block - - - -
# at line 7:8: '+'
match( 0x2b )
@state.type = type
@state.channel = channel
ensure
# -> uncomment the next line to manually enable rule tracing
# trace_out( __method__, 1 )
end
|
[
"def t__34!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 14 )\n\n type = T__34\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 23:9: ','\n match( 0x2c )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 14 )\n\n end",
"def t__18!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 12 )\n\n type = T__18\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 14:9: ','\n match( 0x2c )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 12 )\n\n end",
"def t__36!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 16 )\n\n type = T__36\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 25:9: 'char'\n match( \"char\" )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 16 )\n\n end",
"def t__65!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 43)\n\n type = T__65\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 49:9: '!'\n match(?!)\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 43)\n\n end",
"def t__11!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 1)\n\n type = T__11\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 7:9: ';'\n match(?;)\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 1)\n\n end",
"def t__26!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 19 )\n\n type = T__26\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 35:9: 'g'\n match( 0x67 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 19 )\n\n end",
"def t__65!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 4 )\n\n type = T__65\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 10:9: ']'\n match( 0x5d )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 4 )\n\n end",
"def t__71!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 16)\n\n type = T__71\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 22:9: ';'\n match(?;)\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 16)\n\n end",
"def t__15!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 4 )\n\n type = T__15\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 20:9: 'u'\n match( 0x75 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 4 )\n\n end",
"def t__65!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 1 )\n\n type = T__65\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 7:9: '\\\\r'\n match( 0xd )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 1 )\n\n end",
"def t__15!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 6)\n\n type = T__15\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 21:9: ')'\n match(?))\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 6)\n\n end",
"def t__67!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 12)\n\n type = T__67\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 18:9: 'lexer'\n match(\"lexer\")\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 12)\n\n end",
"def t__36!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 29 )\n\n type = T__36\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 45:9: 'T'\n match( 0x54 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 29 )\n\n end",
"def t__67!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 12 )\n\n type = T__67\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 18:9: 'lexer'\n match( \"lexer\" )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 12 )\n\n end",
"def t__43!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 36 )\n\n type = T__43\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 52:9: 'u'\n match( 0x75 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 36 )\n\n end",
"def t__71!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 16 )\n\n type = T__71\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 22:9: ';'\n match( 0x3b )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 16 )\n\n end",
"def t__44!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 22)\n\n type = T__44\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 28:9: ':'\n match(?:)\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 22)\n\n end",
"def t__36!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 28 )\n\n type = T__36\n channel = ANTLR3::DEFAULT_CHANNEL\n\n\n # - - - - main rule block - - - -\n # at line 34:9: '?'\n match(0x3f)\n\n\n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 28 )\n\n end",
"def t__12!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 3)\n\n type = T__12\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 18:9: ';'\n match(?;)\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 3)\n\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
lexer rule number! (NUMBER) (in CSV.g)
|
def number!
# -> uncomment the next line to manually enable rule tracing
# trace_in( __method__, 2 )
type = NUMBER
channel = ANTLR3::DEFAULT_CHANNEL
# - - - - main rule block - - - -
# at line 11:10: ( '0' .. '9' )+
# at file 11:10: ( '0' .. '9' )+
match_count_1 = 0
while true
alt_1 = 2
look_1_0 = @input.peek( 1 )
if ( look_1_0.between?( 0x30, 0x39 ) )
alt_1 = 1
end
case alt_1
when 1
# at line 11:11: '0' .. '9'
match_range( 0x30, 0x39 )
else
match_count_1 > 0 and break
eee = EarlyExit(1)
raise eee
end
match_count_1 += 1
end
@state.type = type
@state.channel = channel
ensure
# -> uncomment the next line to manually enable rule tracing
# trace_out( __method__, 2 )
end
|
[
"def num!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 5 )\n\n type = NUM\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 37:9: ( '1' .. '9' ) ( '0' .. '9' )*\n # at line 37:9: ( '1' .. '9' )\n # at line 37:10: '1' .. '9'\n match_range( 0x31, 0x39 )\n\n # at line 37:19: ( '0' .. '9' )*\n while true # decision 1\n alt_1 = 2\n look_1_0 = @input.peek( 1 )\n\n if ( look_1_0.between?( 0x30, 0x39 ) )\n alt_1 = 1\n\n end\n case alt_1\n when 1\n # at line 37:20: '0' .. '9'\n match_range( 0x30, 0x39 )\n\n else\n break # out of loop for decision 1\n end\n end # loop for decision 1\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 5 )\n\n end",
"def number!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 116 )\n\n\n\n type = NUMBER\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 276:10: ( DIGIT )+\n # at file 276:10: ( DIGIT )+\n match_count_2 = 0\n while true\n alt_2 = 2\n look_2_0 = @input.peek( 1 )\n\n if ( look_2_0.between?( 0x30, 0x39 ) )\n alt_2 = 1\n\n end\n case alt_2\n when 1\n # at line \n if @input.peek( 1 ).between?( 0x30, 0x39 )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n\n end\n\n\n\n else\n match_count_2 > 0 and break\n eee = EarlyExit(2)\n\n\n raise eee\n end\n match_count_2 += 1\n end\n\n\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 116 )\n\n\n end",
"def number!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 32 )\n\n type = NUMBER\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 60:12: ( INTEGER )+\n # at file 60:12: ( INTEGER )+\n match_count_2 = 0\n while true\n alt_2 = 2\n look_2_0 = @input.peek( 1 )\n\n if ( look_2_0.between?( 0x30, 0x39 ) )\n alt_2 = 1\n\n end\n case alt_2\n when 1\n # at line 60:12: INTEGER\n integer!\n\n else\n match_count_2 > 0 and break\n eee = EarlyExit(2)\n\n\n raise eee\n end\n match_count_2 += 1\n end\n\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 32 )\n\n end",
"def num_i!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 25 )\n\n\n\n type = NUM_I\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 46:8: ( '0' .. '9' )+\n # at file 46:8: ( '0' .. '9' )+\n match_count_4 = 0\n while true\n alt_4 = 2\n look_4_0 = @input.peek( 1 )\n\n if ( look_4_0.between?( 0x30, 0x39 ) )\n alt_4 = 1\n\n end\n case alt_4\n when 1\n # at line \n if @input.peek( 1 ).between?( 0x30, 0x39 )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n\n end\n\n\n\n else\n match_count_4 > 0 and break\n eee = EarlyExit(4)\n\n\n raise eee\n end\n match_count_4 += 1\n end\n\n\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 25 )\n\n\n end",
"def digit!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 53 )\n\n type = DIGIT\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 352:8: '0' .. '9'\n match_range( 0x30, 0x39 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 53 )\n\n end",
"def num_f!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 26 )\n\n\n\n type = NUM_F\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 47:8: ( '0' .. '9' )+ '.' ( '0' .. '9' )+\n # at file 47:8: ( '0' .. '9' )+\n match_count_5 = 0\n while true\n alt_5 = 2\n look_5_0 = @input.peek( 1 )\n\n if ( look_5_0.between?( 0x30, 0x39 ) )\n alt_5 = 1\n\n end\n case alt_5\n when 1\n # at line \n if @input.peek( 1 ).between?( 0x30, 0x39 )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n\n end\n\n\n\n else\n match_count_5 > 0 and break\n eee = EarlyExit(5)\n\n\n raise eee\n end\n match_count_5 += 1\n end\n\n\n match( 0x2e )\n # at file 47:24: ( '0' .. '9' )+\n match_count_6 = 0\n while true\n alt_6 = 2\n look_6_0 = @input.peek( 1 )\n\n if ( look_6_0.between?( 0x30, 0x39 ) )\n alt_6 = 1\n\n end\n case alt_6\n when 1\n # at line \n if @input.peek( 1 ).between?( 0x30, 0x39 )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n\n end\n\n\n\n else\n match_count_6 > 0 and break\n eee = EarlyExit(6)\n\n\n raise eee\n end\n match_count_6 += 1\n end\n\n\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 26 )\n\n\n end",
"def digit!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 37 )\n\n type = DIGIT\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 136:8: '0' .. '9'\n match_range( 0x30, 0x39 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 37 )\n\n end",
"def dec_number!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 8 )\n\n type = DEC_NUMBER\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 11:14: ( '0' .. '9' )+\n # at file 11:14: ( '0' .. '9' )+\n match_count_4 = 0\n while true\n alt_4 = 2\n look_4_0 = @input.peek( 1 )\n\n if ( look_4_0.between?( 0x30, 0x39 ) )\n alt_4 = 1\n\n end\n case alt_4\n when 1\n # at line 11:14: '0' .. '9'\n match_range( 0x30, 0x39 )\n\n else\n match_count_4 > 0 and break\n eee = EarlyExit(4)\n\n\n raise eee\n end\n match_count_4 += 1\n end\n\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 8 )\n\n end",
"def readnumber!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 12 )\n\n type = READNUMBER\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 365:13: 'readNumber'\n match( \"readNumber\" )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 12 )\n\n end",
"def digit!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 40 )\n\n type = DIGIT\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 181:12: ( '0' .. '9' )+\n # at file 181:12: ( '0' .. '9' )+\n match_count_3 = 0\n while true\n alt_3 = 2\n look_3_0 = @input.peek( 1 )\n\n if ( look_3_0.between?( 0x30, 0x39 ) )\n alt_3 = 1\n\n end\n case alt_3\n when 1\n # at line 181:13: '0' .. '9'\n match_range( 0x30, 0x39 )\n\n else\n match_count_3 > 0 and break\n eee = EarlyExit(3)\n\n\n raise eee\n end\n match_count_3 += 1\n end\n\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 40 )\n\n end",
"def digit!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 38 )\n\n type = DIGIT\n channel = ANTLR3::DEFAULT_CHANNEL\n\n\n # - - - - main rule block - - - -\n # at line 162:11: ( '0' .. '9' )\n # at line 162:11: ( '0' .. '9' )\n # at line 162:12: '0' .. '9'\n match_range(0x30, 0x39)\n\n\n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 38 )\n\n end",
"def number\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 6 )\n return_value = NumberReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n\n root_0 = nil\n __NUM22__ = nil\n\n tree_for_NUM22 = nil\n stream_NUM = ANTLR3::AST::RewriteRuleTokenStream.new( @adaptor, \"token NUM\" )\n\n begin\n # at line 34:11: NUM\n __NUM22__ = match( NUM, TOKENS_FOLLOWING_NUM_IN_number_199 )\n stream_NUM.add( __NUM22__ )\n # AST Rewrite\n # elements: NUM\n # token labels: \n # rule labels: return_value\n # token list labels: \n # rule list labels: \n # wildcard labels: \n\n return_value.tree = root_0\n stream_return_value = return_value ? subtree_stream( \"rule return_value\", return_value.tree ) : subtree_stream( \"token return_value\" )\n\n root_0 = @adaptor.create_flat_list\n # 34:15: -> ^( NUMBER NUM )\n # at line 34:18: ^( NUMBER NUM )\n root_1 = @adaptor.create_flat_list\n root_1 = @adaptor.become_root( @adaptor.create_from_type( NUMBER, \"NUMBER\" ), root_1 )\n\n @adaptor.add_child( root_1, stream_NUM.next_node )\n\n @adaptor.add_child( root_0, root_1 )\n\n\n\n return_value.tree = root_0\n # - - - - - - - rule clean up - - - - - - - -\n return_value.stop = @input.look( -1 )\n\n\n return_value.tree = @adaptor.rule_post_processing( root_0 )\n @adaptor.set_token_boundaries( return_value.tree, return_value.start, return_value.stop )\n\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n return_value.tree = @adaptor.create_error_node( @input, return_value.start, @input.look(-1), re )\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 6 )\n\n end\n \n return return_value\n end",
"def k_num!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 47 )\n\n\n\n type = K_NUM\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 441:4: ( 'round' | 'aleatorio' | 'between' )\n # at line 441:4: ( 'round' | 'aleatorio' | 'between' )\n alt_14 = 3\n case look_14 = @input.peek( 1 )\n when 0x72 then alt_14 = 1\n when 0x61 then alt_14 = 2\n when 0x62 then alt_14 = 3\n else\n raise NoViableAlternative( \"\", 14, 0 )\n\n end\n case alt_14\n when 1\n # at line 441:5: 'round'\n match( \"round\" )\n\n\n when 2\n # at line 441:13: 'aleatorio'\n match( \"aleatorio\" )\n\n\n when 3\n # at line 441:25: 'between'\n match( \"between\" )\n\n\n end\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 47 )\n\n\n end",
"def integer!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 1 )\n\n\n\n type = INTEGER\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 28:10: ( '0' .. '9' )+\n # at file 28:10: ( '0' .. '9' )+\n match_count_1 = 0\n while true\n alt_1 = 2\n look_1_0 = @input.peek( 1 )\n\n if ( look_1_0.between?( 0x30, 0x39 ) )\n alt_1 = 1\n\n end\n case alt_1\n when 1\n # at line \n if @input.peek( 1 ).between?( 0x30, 0x39 )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n\n end\n\n\n\n else\n match_count_1 > 0 and break\n eee = EarlyExit(1)\n\n\n raise eee\n end\n match_count_1 += 1\n end\n\n\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 1 )\n\n\n end",
"def numeric!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 6 )\n\n type = NUMERIC\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 8:11: 'numeric'\n match( \"numeric\" )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 6 )\n\n end",
"def integer!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 26 )\n\n\n\n type = INTEGER\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 191:10: ( '0' .. '9' )+\n # at file 191:10: ( '0' .. '9' )+\n match_count_1 = 0\n while true\n alt_1 = 2\n look_1_0 = @input.peek( 1 )\n\n if ( look_1_0.between?( 0x30, 0x39 ) )\n alt_1 = 1\n\n end\n case alt_1\n when 1\n # at line \n if @input.peek( 1 ).between?( 0x30, 0x39 )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n\n end\n\n\n\n else\n match_count_1 > 0 and break\n eee = EarlyExit(1)\n\n\n raise eee\n end\n match_count_1 += 1\n end\n\n\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 26 )\n\n\n end",
"def int!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 48)\n\n type = INT\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 526:7: ( '0' .. '9' )+\n # at file 526:7: ( '0' .. '9' )+\n match_count_10 = 0\n loop do\n alt_10 = 2\n look_10_0 = @input.peek(1)\n\n if (look_10_0.between?(?0, ?9)) \n alt_10 = 1\n\n end\n case alt_10\n when 1\n # at line 526:7: '0' .. '9'\n match_range(?0, ?9)\n\n else\n match_count_10 > 0 and break\n eee = EarlyExit(10)\n\n\n raise eee\n end\n match_count_10 += 1\n end\n\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 48)\n\n end",
"def t__34!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 14 )\n\n type = T__34\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 23:9: ','\n match( 0x2c )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 14 )\n\n end",
"def integer!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 39 )\n\n type = INTEGER\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 1058:4: ( '0' .. '9' )+\n # at file 1058:4: ( '0' .. '9' )+\n match_count_7 = 0\n while true\n alt_7 = 2\n look_7_0 = @input.peek( 1 )\n\n if ( look_7_0.between?( 0x30, 0x39 ) )\n alt_7 = 1\n\n end\n case alt_7\n when 1\n # at line 1058:4: '0' .. '9'\n match_range( 0x30, 0x39 )\n\n else\n match_count_7 > 0 and break\n eee = EarlyExit(7)\n\n\n raise eee\n end\n match_count_7 += 1\n end\n\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 39 )\n\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.