query
stringlengths 7
9.5k
| document
stringlengths 10
1.07M
| negatives
listlengths 19
19
| metadata
dict |
---|---|---|---|
GET /electronica_consejeros GET /electronica_consejeros.json
|
def index
@electronica_consejeros = ElectronicaConsejero.all
end
|
[
"def index\r\n @electrica_consejeros = ElectricaConsejero.all\r\n end",
"def index\n @conseilles = Conseille.all\n respond_to do |format|\n format.html\n format.json { render json: @conseilles}\n end\n end",
"def index\r\n @consejeros = Consejero.all\r\n end",
"def index\n @tecnicas = Tecnica.all\n render json: @tecnicas\n end",
"def index\n @convos = Convo.all\n render json: @convos\n end",
"def index\n @codigos = Codigo.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @codigos }\n end\n end",
"def index\n @fornecedores = Fornecedor.page(params[:page]).per(NUMERO_POR_PAGINA)\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @fornecedores }\n end\n end",
"def index\n @cooperativas = Cooperativa.where(:status_id => Status.find_by_descricao('Ativo'))\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render :json => @cooperativas }\n end\n end",
"def index\n @cofis = Cofi.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @cofis }\n end\n end",
"def index\n @ice_ores = IceOre.all\n \n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @ice_ores }\n end\n end",
"def index\n \t@episodios = Episodio.where(serie_id: params[:serie_id],ativo: true)\n render json: @episodios\n end",
"def index\r\n @mecanica_consejeros = MecanicaConsejero.all\r\n end",
"def index\n @contas = Conta.all\n respond_to do |format|\n format.json { render json: @contas.to_json, status: :ok }\n end\n end",
"def index\n #@transacaos = Transacao.all\n @numero_conta = Contum.find_by_primeiro_corr(current_correntistum.cpf)\n @transacaos = Transacao.where(:nro_conta=>@numero_conta.numero)\n #@transacaos = Transacao.where(:nro_conta=>current_correntistum.contum.numero)\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @transacaos }\n end\n end",
"def index\n @instituicoes = Instituicao.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @instituicoes }\n end\n end",
"def index\n @colegios = Colegio.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @colegios }\n end\n end",
"def index\n @ores = Ore.all\n \n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @ores }\n end\n end",
"def index\n @comentarios_estrategia = ComentariosEstrategium.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @comentarios_estrategia }\n end\n end",
"def index\n @concursos = Concurso.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @concursos }\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
POST /electronica_consejeros POST /electronica_consejeros.json
|
def create
respond_to do |format|
if @electronica_consejero.save
format.html { redirect_to @electronica_consejero, notice: 'Se añadió un nombre de consejero de ingeniería electrónica correctamente.' }
format.json { render :show, status: :created, location: @electronica_consejero }
else
format.html { render :new }
format.json { render json: @electronica_consejero.errors, status: :unprocessable_entity }
end
end
end
|
[
"def create\n @condominios = Condominio.new(params[:condominio])\n\n respond_to do |format|\n if @condominios.save\n format.html { redirect_to @condominios, notice: 'Condominio was successfully created.' }\n format.json { render json: @condominios, status: :created, location: @condominios }\n else\n format.html { render action: \"new\" }\n format.json { render json: @condominios.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @condominio = Condominio.new(condominio_params)\n\n respond_to do |format|\n if @condominio.save\n format.html { redirect_to @condominio, notice: 'Condominio was successfully created.' }\n format.json { render :show, status: :created, location: @condominio }\n else\n format.html { render :new }\n format.json { render json: @condominio.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @condomino = Condomino.new(params[:condomino])\n\n respond_to do |format|\n if @condomino.save\n format.html { redirect_to @condomino, notice: 'Condomino was successfully created.' }\n format.json { render json: @condomino, status: :created, location: @condomino }\n else\n format.html { render action: \"new\" }\n format.json { render json: @condomino.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\r\n\r\n respond_to do |format|\r\n if @electronica_asignatura.save\r\n format.html { redirect_to @electronica_asignatura, notice: 'La asignatura del programa de ingeniería electrónica fue creada correctamente.' }\r\n format.json { render :show, status: :created, location: @electronica_asignatura }\r\n else\r\n format.html { render :new }\r\n format.json { render json: @electronica_asignatura.errors, status: :unprocessable_entity }\r\n end\r\n end\r\n end",
"def destroy\r\n @electronica_consejero.destroy\r\n respond_to do |format|\r\n format.html { redirect_to electronica_consejeros_url, notice: 'El nombre del consejero de ingeniería electrónica se eliminó correctamente.' }\r\n format.json { head :no_content }\r\n end\r\n end",
"def create\r\n\r\n respond_to do |format|\r\n if @electrica_consejero.save\r\n format.html { redirect_to @electrica_consejero, notice: 'Se añadió un nombre de consejero de ingeniería eléctrica correctamente.' }\r\n format.json { render :show, status: :created, location: @electrica_consejero }\r\n else\r\n format.html { render :new }\r\n format.json { render json: @electrica_consejero.errors, status: :unprocessable_entity }\r\n end\r\n end\r\n end",
"def nuevo\n if params[:caso_id].nil?\n respond_to do |format|\n format.html { render inline: 'Falta identificacion del caso' }\n end\n else\n @evento = Evento.new\n cid = params[:caso_id].to_i\n @evento.caso_id = cid\n @caso = @evento.caso\n @caso.current_usuario = current_usuario\n if @evento.save\n h=@evento.as_json\n h['evento'] = @evento.id\n respond_to do |format|\n format.js { render text: h }\n format.json { render json: h, status: :created }\n format.html { render inline: h.to_s }\n end\n else\n respond_to do |format|\n format.html { render action: \"error\" }\n format.json { \n render json: @evento.errors, \n status: :unprocessable_entity\n }\n end\n end\n end\n end",
"def create\n @conteo = Conteo.new(conteo_params)\n\n respond_to do |format|\n if @conteo.save\n format.html { redirect_to @conteo, notice: 'Conteo was successfully created.' }\n format.json { render :show, status: :created, location: @conteo }\n else\n format.html { render :new }\n format.json { render json: @conteo.errors, status: :unprocessable_entity }\n end\n end\n end",
"def nuevo\n if !params[:caso_id] || params[:caso_id] == ''\n respond_to do |format|\n format.html { render inline: 'Falta identificacion del caso' }\n end\n return\n end\n @ubicacion = Ubicacion.new\n @ubicacion.id_caso = params[:caso_id]\n @ubicacion.id_pais = 170\n if !@ubicacion.save(validate: false)\n return reterror\n end\n respond_to do |format|\n format.js { render text: @ubicacion.id.to_s }\n format.json { render json: @ubicacion.id.to_s, status: :created }\n format.html { render inline: @ubicacion.id.to_s }\n end\n end",
"def create\n @condominium = Condominium.new(condominium_params)\n respond_to do |format|\n if @condominium.save\n format.json { render :show, status: :created, object: @condominium }\n else\n format.json { render json: @condominium.errors, status: :unprocessable_entity }\n end\n end\n end",
"def escalas_prefeitura_new\n @escala = Escala.new\n\n if params[:escala].nil? == false\n\t\t dia = params[:escala][\"data_execucao(3i)\"].to_i\n\n \tmes = params[:escala][\"data_execucao(2i)\"].to_i\n \tano = params[:escala][\"data_execucao(1i)\"].to_i\n \thi = params[:escala][\"horario_inicio_execucao(4i)\"].to_i\n \tmi = params[:escala][\"horario_inicio_execucao(5i)\"].to_i\n \thf = params[:escala][\"horario_fim_execucao(4i)\"].to_i\n \tmf = params[:escala][\"horario_fim_execucao(5i)\"].to_i\n \thora_inicio = Time.zone.local(ano,mes,dia,hi,mi,0)\n \thora_fim = Time.zone.local(ano,mes,dia,hf,mf, 0)\n\n $array_escalas.push(params[:escala])\n\n\n respond_to do |format|\n format.html # escalas_prefeitura_new.html.erb\n format.json { render json: @escala }\n end\n end\n end",
"def create\n @inventario_cosa_registro = InventarioCosaRegistro.new(inventario_cosa_registro_params)\n\n respond_to do |format|\n if @inventario_cosa_registro.save\n format.html { redirect_to @inventario_cosa_registro, notice: 'Inventario cosa registro was successfully created.' }\n format.json { render :show, status: :created, location: @inventario_cosa_registro }\n else\n format.html { render :new }\n format.json { render json: @inventario_cosa_registro.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n \n\n respond_to do |format|\n if @encuestaseno.save\n format.html { redirect_to @encuestaseno, notice: 'Encuesta creada exitosamente.' }\n format.json { render :show, status: :created, location: @encuestaseno }\n else\n format.html { render :new }\n format.json { render json: @encuestaseno.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @inventario_cosa = InventarioCosa.new(inventario_cosa_params)\n\n respond_to do |format|\n if @inventario_cosa.save\n format.html { redirect_to @inventario_cosa, notice: 'Inventario cosa was successfully created.' }\n format.json { render :show, status: :created, location: @inventario_cosa }\n else\n format.html { render :new }\n format.json { render json: @inventario_cosa.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @conexao = Conexao.new(conexao_params)\n set_caixas\n respond_to do |format|\n if @conexao.save\n format.html { redirect_to @conexao, notice: 'Conexão criada com sucesso.' }\n format.json { render :show, status: :created, location: @conexao }\n else\n format.html { render :new }\n format.json { render json: @conexao.errors, status: :unprocessable_entity }\n end\n end\n end",
"def subida\n if params[:carnet]\n response = HTTParty.get(ENV['RUTA_EXTERNA_WHO']+\"/subidasexternas/buscar_mixto/?carnet=\"+params[:carnet].to_s)\n render json: response.body\n elsif (params[:nac] && params[:cedula])\n response = HTTParty.get(ENV['RUTA_EXTERNA_WHO']+\"/subidasexternas/buscar_mixto/?nac=\"+params[:nac] +\"&cedula=\"+ params[:cedula])\n render json: response.body\n elsif params[:numeroCnp]\n @recurso = Recurso.new(cnpnumero: params[:numeroCnp], procesado: false, carpeta: params[:file])\n if @recurso.save\n render json: @recurso, status: :created\n else\n render json: @recurso.errors, status: :unprocessable_entity\n end\n end\n end",
"def create\n @cotizclomul = Cotizclomul.new(cotizclomul_params)\n @cotizclomul.confirmacion = 'Por confirmar'\n\n numerocaracteresclave = 20\n caracteresclavecompra = %w{ 0 1 2 3 4 5 6 7 8 9 a b c d e f g h i j k l m n o p q r s t u v w x y z A B C D E F G H I J K L M N O P Q R S T U V W X Y Z }\n @cotizclomul.clavecompra = ''\n numerocaracteresclave.times do\n indiceletraescogida = rand(caracteresclavecompra.length)\n @cotizclomul.clavecompra = @cotizclomul.clavecompra + caracteresclavecompra[indiceletraescogida]\n end\n\n respond_to do |format|\n if @cotizclomul.save\n RemisorCotizacionesMailer.confirmacioncotizclomul(@cotizclomul).deliver_now\n p HTTParty.post('http://localhost:3002/api/prices', {body: @cotizclomul.to_json, headers: {'Content-Type': 'application/json'}})\n format.html { redirect_to @cotizclomul, notice: 'Cotizclomul was successfully created.' }\n format.json { render :show, status: :created, location: @cotizclomul }\n else\n format.html { render :new }\n format.json { render json: @cotizclomul.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @centro_atencion = CentroAtencion.new(centro_atencion_params)\n #@hola = 'prueba en donde estoy'\n respond_to do |format|\n if @centro_atencion.save\n format.html { redirect_to @centro_atencion, notice: 'Centro atencion was successfully created.' }\n format.json { render :show, status: :created, location: @centro_atencion }\n else\n format.html { render :new }\n format.json { render json: @centro_atencion.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @consorcio = Consorcio.new(consorcio_params)\n\n respond_to do |format|\n if @consorcio.save\n format.html { redirect_to @consorcio, notice: 'Consorcio was successfully created.' }\n format.json { render :show, status: :created, location: @consorcio }\n else\n format.html { render :new }\n format.json { render json: @consorcio.errors, status: :unprocessable_entity }\n end\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
PATCH/PUT /electronica_consejeros/1 PATCH/PUT /electronica_consejeros/1.json
|
def update
respond_to do |format|
if @electronica_consejero.update(electronica_consejero_params)
format.html { redirect_to @electronica_consejero, notice: 'El nombre del consejero de ingeniería electrónica se actualizó correctamente.' }
format.json { render :show, status: :ok, location: @electronica_consejero }
else
format.html { render :edit }
format.json { render json: @electronica_consejero.errors, status: :unprocessable_entity }
end
end
end
|
[
"def update\n @condominios = Condominio.find(params[:id])\n\n respond_to do |format|\n if @condominios.update_attributes(params[:condominio])\n format.html { redirect_to @condominios, notice: 'Condominio was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @condominios.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @pelicula = Pelicula.find(params[:id])\n @pelicula.update(update_params)\n render json: @pelicula, status: :ok\n end",
"def update\n @opcion = Opcion.find(params[:id])\n\n if @opcion.update(params[:opcion])\n head :no_content\n else\n render json: @opcion.errors, status: :unprocessable_entity\n end\n end",
"def update\n respond_to do |format|\n if @sivic_discipulo.update(sivic_discipulo_params_netested)\n format.html { redirect_to @sivic_discipulo, notice: 'Registro alterado com sucesso.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @sivic_discipulo.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @veiculo = Veiculo.find(params[:id])\n\n respond_to do |format|\n if @veiculo.update_attributes(params[:veiculo])\n format.html { redirect_to @veiculo, :notice => 'Veiculo was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @veiculo.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @consumo = Consumo.find(params[:id])\n\n respond_to do |format|\n if @consumo.update_attributes(params[:consumo])\n format.html { redirect_to @consumo.cliente, :notice => 'Consumo alterado com sucesso.' }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @consumo.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @servico_cruzeiro.update(servico_cruzeiro_params)\n format.html { redirect_to @servico_cruzeiro, notice: 'Cruzeiro was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @servico_cruzeiro.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\r\n respond_to do |format|\r\n if @electronica_asignatura.update(electronica_asignatura_params)\r\n format.html { redirect_to @electronica_asignatura, notice: 'La asignatura del programa de ingeniería eléctrica fue actualizada correctamente.' }\r\n format.json { render :show, status: :ok, location: @electronica_asignatura }\r\n else\r\n format.html { render :edit }\r\n format.json { render json: @electronica_asignatura.errors, status: :unprocessable_entity }\r\n end\r\n end\r\n end",
"def update\n respond_to do |format|\n if @objeto.update(caracteristica_params)\n set_redireccion\n format.html { redirect_to @redireccion, notice: 'Caracteristica was successfully updated.' }\n format.json { render :show, status: :ok, location: @objeto }\n else\n format.html { render :edit }\n format.json { render json: @objeto.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @inventario_cosa.update(inventario_cosa_params)\n format.html { redirect_to @inventario_cosa, notice: 'Inventario cosa was successfully updated.' }\n format.json { render :show, status: :ok, location: @inventario_cosa }\n else\n format.html { render :edit }\n format.json { render json: @inventario_cosa.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @nota_tecnica.update(nota_tecnica_params)\n format.html { redirect_to @nota_tecnica, notice: 'Nota tecnica was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @nota_tecnica.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @oficio = Oficio.find(params[:id])\n\n respond_to do |format|\n if @oficio.update_attributes(params[:oficio])\n format.html { redirect_to @oficio, notice: 'Oficio was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @oficio.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @conexao.update(conexao_params)\n format.html { redirect_to @conexao, notice: 'Conexao was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @conexao.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @cliente_fisico.update(cliente_fisico_params)\n format.html { redirect_to @cliente_fisico, notice: 'Cliente fisico was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @cliente_fisico.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n require 'rest-client'\n response = RestClient.put('localhost:3001/colores/'+@color.id.to_s, color_params.as_json, {:Authorization => 'admin irizREhyoG6Ejwr4AcjsQME9'})\n if response.code == 200\n @color = JSON.parse(response.body)\n\n format.html { redirect_to @color, notice: \"Color was successfully updated.\" }\n format.json { render :show, status: :ok, location: @color }\n else\n format.html { render :edit, status: :unprocessable_entity }\n format.json { render json: @color.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @inventario_cosa_registro.update(inventario_cosa_registro_params)\n format.html { redirect_to @inventario_cosa_registro, notice: 'Inventario cosa registro was successfully updated.' }\n format.json { render :show, status: :ok, location: @inventario_cosa_registro }\n else\n format.html { render :edit }\n format.json { render json: @inventario_cosa_registro.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @servico_pacote.update(servico_pacote_params)\n format.html { redirect_to @servico_pacote, notice: 'Pacote was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @servico_pacote.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @tecnico = Tecnico.find(params[:id])\n\n respond_to do |format|\n if @tecnico.update_attributes(params[:tecnico])\n format.html { redirect_to @tecnico, :notice => 'Tecnico was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @tecnico.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @foto = Fotosresposta.find(params[:id])\n\n respond_to do |format|\n if @foto.update_attributes(params[:fotosresposta])\n format.html { redirect_to @foto, notice: 'Respostas actualizadas com sucesso.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @foto.errors, status: :unprocessable_entity }\n end\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
DELETE /electronica_consejeros/1 DELETE /electronica_consejeros/1.json
|
def destroy
@electronica_consejero.destroy
respond_to do |format|
format.html { redirect_to electronica_consejeros_url, notice: 'El nombre del consejero de ingeniería electrónica se eliminó correctamente.' }
format.json { head :no_content }
end
end
|
[
"def destroy\n @condominios = Condominio.find(params[:id])\n @condominios.destroy\n\n respond_to do |format|\n format.html { redirect_to condominia_url }\n format.json { head :ok }\n end\n end",
"def destroy\r\n @electronica_asignatura.destroy\r\n respond_to do |format|\r\n format.html { redirect_to electronica_asignaturas_url, notice: 'La asignatura del programa de ingeniería eléctrica fue eliminada correctamente.' }\r\n format.json { head :no_content }\r\n end\r\n end",
"def delete_floor_plan(args = {}) \n delete(\"/files.json/floorplan/images\", args)\nend",
"def destroy\n @json.destroy\n\n head :no_content\n end",
"def destroy\n @asignatura.destroy\n respond_to do |format|\n format.json { head :no_content }\n end\n end",
"def destroy\n @examen_colocacion_idioma.destroy\n respond_to do |format|\n format.html { redirect_to examen_colocacion_idiomas_url, notice: 'La solicitud de examen de colocación se eliminó correctamente.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @condomino = Condomino.find(params[:id])\n @condomino.destroy\n\n respond_to do |format|\n format.html { redirect_to condominos_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n arquivo = Arquivo.find(@pregoestitulosgrafico.arquivo_id)\n\n File.delete(arquivo.caminho)\n\n pregoestitulo = Pregoestitulo.find(@pregoestitulosgrafico.pregoestitulo_id)\n \n @pregoestitulosgrafico.destroy\n respond_to do |format|\n format.html { redirect_to pregoestitulo, notice: 'Arquivo excluído com sucesso.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @sala_de_conferencium.destroy\n respond_to do |format|\n format.html { redirect_to sala_de_conferencia_url, notice: 'Sala de conferencium was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @contadore.destroy\n respond_to do |format|\n format.html { redirect_to contadores_url }\n format.json { head :no_content }\n end\n end",
"def destroy\r\n @sistemas_consejero.destroy\r\n respond_to do |format|\r\n format.html { redirect_to sistemas_consejeros_url, notice: 'El nombre del consejero de ingeniería de sistemas se eliminó correctamente.' }\r\n format.json { head :no_content }\r\n end\r\n end",
"def destroy\n @agronomiaquimica = Agronomiaquimica.find(params[:id])\n @agronomiaquimica.destroy\n\n respond_to do |format|\n format.html { redirect_to agronomiaquimicas_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @indicador_economico.destroy\n respond_to do |format|\n format.html { redirect_to indicador_economicos_url, notice: 'Indicador economico eliminado exitosamente.' }\n format.json { head :no_content }\n end\n end",
"def destroy\r\n @cambiar_consejero.destroy\r\n respond_to do |format|\r\n format.html { redirect_to cambiar_consejeros_url, notice: 'Su petición para cambiar de consejero fue eliminada.' }\r\n format.json { head :no_content }\r\n end\r\n end",
"def destroy\n @datos_insumos_reactivo.destroy\n respond_to do |format|\n format.html { redirect_to datos_insumos_reactivos_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @uchronia = Uchronia.find(params[:id])\n @uchronia.destroy\n\n respond_to do |format|\n format.html { redirect_to uchronias_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @ejercicio1 = Ejercicio1.find(params[:id])\n @ejercicio1.destroy\n\n respond_to do |format|\n format.html { redirect_to ejercicio1s_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @cliente_fisico.destroy\n respond_to do |format|\n format.html { redirect_to cliente_fisicos_url }\n format.json { head :no_content }\n end\n end",
"def delete_aos_version(args = {}) \n delete(\"/aosversions.json/#{args[:aosVersionId]}\", args)\nend"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
GET /aggregate_metadata/new GET /aggregate_metadata/new.json
|
def new
@aggregate_metadatum = AggregateMetadatum.new
respond_to do |format|
format.html # new.html.erb
format.json { render json: @aggregate_metadatum }
end
end
|
[
"def new\n @aggregate = Aggregate.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @aggregate }\n end\n end",
"def new\n @aggregate_metric_metadata = AggregateMetricMetadata.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @aggregate_metric_metadata }\n end\n end",
"def new\n @aggregate_detail = AggregateDetail.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @aggregate_detail }\n end\n end",
"def new\n @generic_table_aggregation = GenericTable::Aggregation.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @generic_table_aggregation }\n end\n end",
"def new\n @aggregate = Aggregate.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @aggregate }\n end\n end",
"def create\n @aggregate_metric_metadata = AggregateMetricMetadata.new(params[:aggregate_metric_metadata])\n\n respond_to do |format|\n if @aggregate_metric_metadata.save\n format.html { redirect_to(@aggregate_metric_metadata, :notice => 'Aggregate metric metadata was successfully created.') }\n format.xml { render :xml => @aggregate_metric_metadata, :status => :created, :location => @aggregate_metric_metadata }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @aggregate_metric_metadata.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def new\n @aggregate_host = AggregateHost.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @aggregate_host }\n end\n end",
"def new\n @lo_metadata_schema = LoMetadataSchema.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @lo_metadata_schema }\n end\n end",
"def new\n @meta_data_group = MetaDataGroup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @meta_data_group }\n end\n end",
"def new\n @metadata = Metadata.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @metadata }\n end\n end",
"def new\n @meta_data_field = MetaDataField.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @meta_data_field }\n end\n end",
"def new\n @analysis = Analysis.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @analysis }\n end\n end",
"def create\n @aggregate_detail = AggregateDetail.new(params[:aggregate_detail])\n\n respond_to do |format|\n if @aggregate_detail.save\n format.html { redirect_to @aggregate_detail, notice: 'Aggregate detail was successfully created.' }\n format.json { render json: @aggregate_detail, status: :created, location: @aggregate_detail }\n else\n format.html { render action: \"new\" }\n format.json { render json: @aggregate_detail.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @annotation = Annotation.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @annotation }\n end\n end",
"def new\n @analysis_item = AnalysisItem.new\n\n respond_to do |format|\n format.json { render json: @analysis_item }\n end\n end",
"def new\n @analysis = Analysis.new\n\n respond_to do |format|\n format.html\n format.json { render json: @analysis }\n end\n end",
"def new\n @analytic = Analytic.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @analytic }\n end\n end",
"def new\n @collect_query = CollectQuery.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @collect_query }\n end\n end",
"def new\n @analysis = Analyse.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @analysis }\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
DELETE /aggregate_metadata/1 DELETE /aggregate_metadata/1.json
|
def destroy
@aggregate_metadatum = AggregateMetadatum.find(params[:id])
@aggregate_metadatum.destroy
respond_to do |format|
format.html { redirect_to aggregate_metadata_url }
format.json { head :no_content }
end
end
|
[
"def destroy\n @aggregate_metric_metadata = AggregateMetricMetadata.find(params[:id])\n @aggregate_metric_metadata.destroy\n\n respond_to do |format|\n format.html { redirect_to(aggregate_metric_metadatas_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @aggregate = Aggregate.find(params[:id])\n @aggregate.destroy\n\n respond_to do |format|\n format.html { redirect_to aggregates_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @aggregate_detail = AggregateDetail.find(params[:id])\n @aggregate_detail.destroy\n\n respond_to do |format|\n format.html { redirect_to aggregate_details_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @aggregate = Aggregate.find(params[:id])\n @aggregate.destroy\n\n respond_to do |format|\n format.html { redirect_to(aggregates_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @aggregate.destroy\n respond_to do |format|\n format.html { redirect_to aggregates_url, notice: 'Aggregate was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @aggregate_metric = AggregateMetric.find(params[:id])\n @aggregate_metric.destroy\n\n respond_to do |format|\n format.html { redirect_to(aggregate_metrics_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @generic_table_aggregation = GenericTable::Aggregation.find(params[:id])\n @generic_table_aggregation.destroy\n\n respond_to do |format|\n format.html { redirect_to generic_table_aggregations_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event_agg.destroy\n respond_to do |format|\n format.html { redirect_to event_aggs_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n PathwayMap.where(:annotation_collection_id => @annotation_collection.id).destroy_all\n PathwayCount.where(:countable_type=> 'AnnotationCollection', :countable_id => @annotation_collection.id).destroy_all\n @annotation_collection.destroy\n\n respond_to do |format|\n format.html { redirect_to :root }\n format.json { head :no_content }\n end\n end",
"def destroy\n @eventagg.destroy\n respond_to do |format|\n format.html { redirect_to eventaggs_url }\n format.json { head :no_content }\n end\n end",
"def delete_aggregate(aggregate)\n raise NotImplementedError\n end",
"def delete_analysis(analysis_id); rest_delete(\"#{link('analyses')}/#{analysis_id}\"); nil; end",
"def destroy\n @aggregate_host = AggregateHost.find(params[:id])\n @aggregate_host.destroy\n\n respond_to do |format|\n format.html { redirect_to aggregate_hosts_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @metadata = Metadata.find(params[:id])\n @metadata.destroy\n\n respond_to do |format|\n format.html { redirect_to(metadata_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @analysis_item = AnalysisItem.find(params[:id])\n @analysis_item.destroy\n\n respond_to do |format|\n format.json { no_content_as_json }\n end\n end",
"def destroy\n @analytic = Analytic.find(params[:id])\n @analytic.destroy\n\n respond_to do |format|\n format.html { redirect_to analytics_url }\n format.json { head :no_content }\n end\n end",
"def delete_record(asset)\n post(\"metadata.delete\", self.class.xml_doc.request { |r|\n r.uuid asset.uuid\n }) rescue nil # Geonetwork 500's if the record doesn't exist...\n end",
"def destroy\n @metadata_source = MetadataSource.find(params[:id])\n @metadata_source.destroy\n\n respond_to do |format|\n format.html { redirect_to metadata_sources_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @lo_metadata_schema = LoMetadataSchema.find(params[:id])\n @lo_metadata_schema.destroy\n\n respond_to do |format|\n format.html { redirect_to lo_metadata_schemas_url }\n format.json { head :no_content }\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Custom contexts This contexts provides all of the base hooks for testing against a cinchbot. It reduces the boilerplate code needed to run each test. It should only be used in the outermost context, as it creates and runs fake IRCd and cinchbot instances.
|
def describe_with_cinchbot(comment, &block)
context_class = context comment.to_s do
before(:context) do
@config = YAML.load_file File.join(TEST_ROOT, '/_config/cinchize.yml')
@ircd = Ircd.new @config['servers']['network_test']['nick'], @config['servers']['network_test']['channels'].first
@cinchbot = start_cinchbot
@ircd.accept_client
end
after(:example) do
@ircd.flush_read
end
after(:context) do
kill_cinchbot @cinchbot
@ircd.close
end
end
def response_to(message, lines: 1)
responses = []
@ircd.tester_send_channel message
lines.times do
responses << @ircd.gets
end
return responses.first if lines == 1
responses
end
def response_to_private_message(message, lines: 1)
responses = []
@ircd.tester_send_bot message
lines.times do
responses << @ircd.gets
end
return responses.first if lines == 1
responses
end
context_class.class_eval &block
end
|
[
"def set_context\n require 'factory_bot_rails'\n require './test/contexts'\n include Contexts\n puts 'Contexts enabled'\n if Contexts.respond_to?(:create_all)\n create_all\n puts 'Contexts built'\n end\nend",
"def context\n @context ||= new_context_mock(context_options, &finalize_proc)\n end",
"def context(name = nil, &block)\n name ||= Helpers.generate_uuid\n # context \"created with defaults\" ... 'ContextCreatedWithDefaults'\n class_name = Helpers.to_const_name(name.to_s, PREFIX, SUFFIX)\n if const_defined?(class_name)\n klass = const_get(class_name)\n if ( klass.superclass == self rescue nil )\n warn \"duplicate context definition with the name #{name.inspect} \" +\n \"found at #{caller.first} it is going to be merged with \" +\n \"the previous context definition\"\n else\n raise \"could not create a context with the name #{name.inspect} \" +\n \"as a constant #{class_name} is already defined and is not \" +\n \"another context definition\"\n end\n else\n klass = Class.new(self)\n klass.extend Test::Unit::Context\n klass.context_name = name\n # NOTE: make sure by default we run \"inherited\" setup/teardown hooks\n # unless context code does re-define the hook method e.g. `def setup`\n # instead of using the `setup do` or the setup method marker syntax :\n klass.class_eval do\n def setup; super; end\n def cleanup; super; end\n def teardown; super; end\n end\n const_set(class_name, klass)\n end\n context_definitions << klass\n klass.class_eval(&block)\n klass\n end",
"def build_unit_test_contexts\n create_stores\n create_employees\n create_assignments\n create_jobs\n create_shifts\n create_shift_jobs\n create_flavors\n create_store_flavors\n end",
"def contexts\n @contexts ||= Hash.new\n end",
"def setup_context\n apply_generator_values_from_config\n Generator.add_attr_to_context(:have_git, have_git?)\n Generator.add_attr_to_context(:skip_git_init, false)\n config.each do |k, v|\n Generator.add_attr_to_context(k, v)\n end\n # inject the arbitrary args supplied on cmdline, default = []\n config[:generator_arg].each do |k, v|\n Generator.add_attr_to_context(k, v)\n end\n end",
"def setup\n result = instance_eval(&@block)\n contexts.each { |context| context.setup }\n result\n end",
"def context\n extract_ids\n apply_builders\n @ctx\n end",
"def context(nodes, &block)\n nodes = nodes.first.is_a?(Matcher) ? nodes.first : WhitelistMatcher.new(nodes)\n @contexts << context = { nodes: nodes, block: block }\n\n # Evaluate the new context for existing nodes\n each { |node| eval_context(context, node) }\n\n context\n end",
"def context(description, &block)\n context = Speciny::MatcherGroup.new(description, &block)\n # Add all the `before` blocks to be executed if any where defined\n # higher up.\n #\n # For example consider the following spec:\n #\n # describe \"Before block\" do\n # before do\n # @somestring = \"somestring\"\n # end\n #\n # it \"should have access to instance variable \n # in describe before block\" do\n #\n # @somestring.should == \"somestring\"\n #\n # end\n #\n # scenario \"Nested scenarios should have access to\n # anything set in parent before block\" do\n #\n # it \"have access to parent variable\" do\n # @somestring.should == \"somestring\"\n # end\n #\n # end\n # end\n #\n @before.each do |order, values|\n values.each { |before_block| context.before(order, &before_block) }\n end\n # Now call the `run!` method for the scenario\n context.run!\n end",
"def run!\n # Only run tests if the set-up passes\n if before.passed?\n # Run tests and report results\n new_context = if Context.async?\n run_tests_async!\n else\n run_tests!\n end\n # Run the clean-up file, if present\n after.run!\n else\n # The set-up file failed, All tests in this context and sub-contexts cannot be run.\n new_context = set_reason before.result, before.reason\n # Push the setup file to the 'stack trace'\n new_context = new_context.push before.file\n # Report the results of tests\n new_context.report\n end\n new_context\n end",
"def build_context\n InterceptionContext.new(@interceptors)\n end",
"def context\n env[CONTEXT_KEY] ||= {}\n end",
"def context_methods\n super + [:context]\n end",
"def setup_context\n Generator.context.have_git = have_git?\n Generator.context.skip_git_init = false\n end",
"def suite_contexts\n @suite_contexts ||= self.suite.tests.inject([]) do |contexts, test|\n contexts << test.context_info.klass\n end.uniq\n end",
"def setup_run_context\n @run_context = policy_builder.setup_run_context(specific_recipes, run_context)\n assert_cookbook_path_not_empty(run_context)\n run_status.run_context = run_context # backcompat for chefspec\n run_context\n end",
"def with_context_mock(options={}, &finalize_proc)\n all_options = context_options.merge(options)\n @context = new_context_mock(all_options, &finalize_proc)\n yield\n ensure\n @context = nil\n end",
"def context\n @_context ||= {\n :argv => START_CTX[:argv].map { |arg| arg.dup },\n :cwd => START_CTX[:cwd].dup,\n 0 => START_CTX[0].dup,\n }.tap do |ctx|\n rewrite_context(ctx)\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
message has been sent?
|
def msg_sent?
true
end
|
[
"def message_sent?\n message.sent?\n end",
"def message_sent?\n message.state == 'sent'\n end",
"def sent?\n self.status == 'sent'\n end",
"def isMyMessage(message)\r\n return message[:type] == :Sent\r\n end",
"def has_next_message\n\t\tend",
"def receive_message\n #$>.puts \"Received complete message\"\n true\n end",
"def has_message?\n has_message\n # && messages.count > 0\n end",
"def message_to_bot?\n @message.scan(/^\\<\\@#{@client.self.id}\\>/).count.positive? || @channel[0] == \"D\"\n end",
"def event_was_sent?( type )\n\t\treturn !self.find_events( type ).empty?\n\tend",
"def work(message)\n if message.is_a?(Message)\n self.count = count + 1\n\n Concurrent::IVar.new(:ok)\n else\n expected_messages_received?\n end\n end",
"def replied?\n receiver.ties_to?(sender)\n end",
"def was_sent_by?(actor)\n (self.sender.class == actor.class) && (self.sender.id == actor.id)\n end",
"def send session, msg\n session.send msg; true\n end",
"def key_exists?\n old_message = !@store.msetnx(msg_id, :status =>\"incomplete\", :expires => @expires_at.to_i, :timeout => (now + timeout).to_i)\n if old_message\n logger.debug \"Beetle: received duplicate message: #{msg_id} on queue: #{@queue}\"\n end\n old_message\n end",
"def buffer_message?\n @on_message || !@on_message_frame\n end",
"def reply?\n self.message_kind.to_s == REPLY\n end",
"def send_success\n end",
"def flushed?\n @send_buffer.empty?\n end",
"def send_message(message)\n\t\t\t@observers.each() { |observer|\n\t\t\t\tif(observer.handle_message(message) == false)\n\t\t\t\t\treturn(false)\n\t\t\t\tend\n\t\t\t}\n\t\t\t\n\t\t\treturn(true)\n\t\tend"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Returns an array of symbols. one symbol (column name) for each column that contains errors.
|
def get_columns_with_errors(model_instance)
list = model_instance.errors.map {|e| e[0].to_sym}
end
|
[
"def errors\r\n\t\t\tresult = []\r\n\t\t\t@table.each do |key, symbol|\r\n\t\t\t\tresult << InternalError.new(\"no domain symbol set on #{key.inspect}\") if symbol.class <= DomainValueSymbol && symbol.domain_symbol.nil?\r\n\t\t\tend\r\n\t\t\treturn result\r\n\t\tend",
"def symbol cols\n decode_values :symbol, cols\n end",
"def errors_for row\n ie = insert_error_for row\n return ie.errors if ie\n []\n end",
"def get_error_codes\n return [] unless has_errors\n\n @errors.keys\n end",
"def related_error_columns\n return @related_error_columns if defined? @related_error_columns\n @related_error_columns = []\n if status_questions\n @related_error_columns = find_related_columns status_questions.map{|sq|sq.my_column_name}\n end\n return @related_error_columns\n end",
"def errors\n if missing.empty?\n return RipperErrors.new(@code_lines.map(&:original).join).call.errors\n end\n\n missing.map { |miss| why(miss) }\n end",
"def header_array\n @columns.collect {|c|\n if @table[c]\n @table[c][:name].to_s\n else\n \"\"\n end\n }.compact\n end",
"def humanized_errors\n _errors.map do |code|\n case code\n when \"UNKNOWN_ERROR\" then \"An unknown error has occurred.\"\n when \"ISA_SEGMENT_NOT_DETECTED_FIRST\" then \"ISA segment was not detected first.\"\n when \"INVALID_COMPONENT_SEPARATOR\" then \"Either no component separator was found or it was not of length one.\"\n else errorMessageHandler(code)\n end\n end.compact\n end",
"def error_fields\n\t\treturn self.missing | self.invalid.keys\n\tend",
"def errors\n @fields.flat_map(&:errors)\n end",
"def error_messages\n errors ? errors.map { |prop, msg| \"#{prop} : #{msg}\" } : []\n end",
"def errors\n result = []\n result.push(:title) unless valid_string? @title\n result.push(:author) unless valid_string? @author\n result.push(:release_date) unless valid_date? @release_date\n result.push(:publisher) unless valid_string? @publisher\n result.push(:isbn) unless valid_integer? @isbn\n result\n end",
"def by_slot\n @errors.inject({ }) do |hash, error|\n sn = error.slotname\n hash[sn] ||= []\n hash[sn] << error\n hash\n end\n end",
"def attribute_errors\n errors = []\n self.class.all_attrs.values.each do |attr|\n attr.error_messages(attributes[attr.name]).each do |message|\n errors << \"#{to_s} #{message}\"\n end\n end\n errors\n end",
"def symbol col\n decode_value :symbol, col\n end",
"def build_error_methods_list(method_errors)\n output = \"<td><ol>\"\n method_errors.each do |itm|\n output << build_alternate_method_error_column(itm)\n end\n output << \"</ol></td>\"\n end",
"def column_names\n columns.map { |c| c.name }\n end",
"def build_errors_csv\n CSV.generate do |csv|\n csv << %w[\n appeal_type\n appeal_id\n error\n error_message\n callstack\n ]\n @errors.each do |error|\n csv << [\n error.appeal_type,\n error.appeal_id,\n error.error,\n error.message,\n error.callstack\n ].flatten\n end\n end\n end",
"def split_symbol(sym)\n s = sym.to_s\n if m = COLUMN_REF_RE1.match(s)\n m[1..3]\n elsif m = COLUMN_REF_RE2.match(s)\n [nil, m[1], m[2]]\n elsif m = COLUMN_REF_RE3.match(s)\n [m[1], m[2], nil]\n else\n [nil, s, nil]\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
GET /item_classifications GET /item_classifications.json
|
def index
@item_classifications = ItemClassification.all
end
|
[
"def list_item_classifications(companyId, itemId, options={}) path = \"/api/v2/companies/#{companyId}/items/#{itemId}/classifications\"\n get(path, options, AvaTax::VERSION) end",
"def classifications(classification)\n params = {\n classification: classification\n }.compact\n\n _get(\"/account/classifications\", params) { |json| json }\n end",
"def get_item_classification(companyId, itemId, id) path = \"/api/v2/companies/#{companyId}/items/#{itemId}/classifications/#{id}\"\n get(path, {}, AvaTax::VERSION) end",
"def create_item_classifications(companyId, itemId, model) path = \"/api/v2/companies/#{companyId}/items/#{itemId}/classifications\"\n post(path, model, {}, AvaTax::VERSION) end",
"def classifications\n if params[:id].downcase == \"any\"\n out = ShelfListing.all.pluck(:classification).uniq.to_a.sort\n else\n out = ShelfListing.where(classification_system: params[:id]).pluck(:classification).uniq.to_a.sort\n end\n render json: [\"Any\"] + out\n end",
"def classifications\n classifications = @data[\"spData\"][\"classifications\"]\n if not classifications\n raise Holdings::Error, NO_CLASSIFICATIONS_ERROR\n end\n classifications.first[\"classifications\"]\n end",
"def classification\n id = params[:id]\n uri = \"/repositories/#{params[:repo_id]}/classifications/#{id}\"\n response = JSONModel::HTTP::get_json(\"/search/published_tree\", :node_uri => uri)\n\n tree = ASUtils.json_parse(response['tree_json'])\n\n data = tree['direct_children']\n .map {|child| JSONModel(:classification_term).id_for(child['record_uri'])}\n .map {|id| JSONModel(:classification_term).find(id, :repo_id => params[:repo_id], \"resolve[]\" => [\"linked_records\"]) }\n .map {|term|\n term.to_hash.merge({:container_children => term['linked_records']\n .map {|linked_record| linked_record['_resolved']['instances'].map{|instance| instance.merge({'dates' => linked_record['_resolved']['dates'], 'resource_title' => linked_record['_resolved']['title'], 'resource_data' => JSONModel.parse_reference(linked_record['ref'])})} }\n .flatten\n .reject{|instance| instance['instance_type'] == 'digital_object'}\n .map {|instance|\n result = {:name => instance['resource_title'], :resource_data => instance['resource_data']}\n if date = instance['dates'][0]\n if date['expression']\n result[:date] = date['expression']\n elsif date['begin'] || date['end']\n result[:date] = [date['begin'], date['end']].compact.join('-')\n end\n end\n\n container = instance['container'] || {}\n if(container['type_1'] && container['indicator_1'])\n result[:container_1] = I18n.t(\"enumerations.container_type.#{container[\"type_1\"]}\") + \" #{container['indicator_1']}\"\n end\n\n if(container['type_2'] && container['indicator_2'])\n result[:container_2] = I18n.t(\"enumerations.container_type.#{container[\"type_2\"]}\") + \" #{container['indicator_2']}\"\n end\n\n result\n }\n })\n }\n render :json => ASUtils.to_json(data)\n end",
"def item_classes\r\n BnetApi::make_request('/wow/data/item/classes')\r\n end",
"def subclassifications\n if params[:id].downcase == \"any\"\n render json: [\"Any\"] + ShelfListing.all.pluck(:subclassification).uniq.to_a\n else\n render json: [\"Any\"] + ShelfListing.where(classification: params[:id]).pluck(:subclassification).uniq.to_a\n end\n end",
"def update_item_classification(companyId, itemId, id, model) path = \"/api/v2/companies/#{companyId}/items/#{itemId}/classifications/#{id}\"\n put(path, model, {}, AvaTax::VERSION) end",
"def add_classification\n @bib.classification.each do |c|\n case c.type\n when \"type\" then @item[\"type\"] = c.value\n when \"mendeley\" then @item[\"mendeley-tags\"] = c.value\n end\n end\n end",
"def classifications\n title.classifications\n end",
"def index\n @classifieds = Classified.all\n \n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @classifieds }\n end\n end",
"def create\n @item_classification = ItemClassification.new(item_classification_params)\n\n respond_to do |format|\n if @item_classification.save\n format.html { redirect_to @item_classification, notice: 'Classificação criada com sucesso.' }\n format.json { render :show, status: :created, location: @item_classification }\n else\n format.html { render :new }\n format.json { render json: @item_classification.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @item_classification = ItemClassification.new(params[:item_classification])\n\n respond_to do |format|\n if @item_classification.save\n format.html { redirect_to @item_classification, notice: 'Item classification was successfully created.' }\n format.json { render json: @item_classification, status: :created, location: @item_classification }\n else\n format.html { render layout: 'form', action: \"new\" }\n format.json { render json: @item_classification.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n Rails.logger.debug \"[ClassificationsController.create] params.class: #{params.class}, params: #{params}\"\n @classifaction = Classification.new transaction_data: params\n @classifaction.classifiers << StatusCodeClassifier.classify( @classifaction.transaction_data )\n @classifaction.classify\n\n if @classifaction.save\n render json: @classifaction.as_jsonapi, status: :created\n else\n render json: { error: 'oops' }, status: 500\n end\n end",
"def classifications\n classes = Hash.new\n\n h = Hash.new\n if b510 = @doc.at_xpath(\"//b510\")\n # Collect the main classification\n b511_ns = b510.xpath(\".//b511\")\n mclasses = []\n if b511_ns != nil\n b511_ns.each do |cls|\n mc = extract_inner_text(cls)\n mclasses << mc unless mc.empty?\n end\n h.store(:mainclass, mclasses) unless mclasses.empty?\n end\n\n # Collect the further classification, if any\n b512_ns = b510.xpath(\".//b512\")\n fclasses = []\n if b512_ns != nil\n b512_ns.each do |cls|\n fc = extract_inner_text(cls)\n fclasses << fc unless fc.empty?\n end\n h.store(:subclass, fclasses) unless fclasses.empty?\n end\n end\n classes.store(:domestic_classifications, h) unless h.empty?\n\n h = Hash.new\n if b520 = @doc.at_xpath(\"//b520\")\n h = Hash.new\n\n # Collect the main classification\n b521_ns = b520.xpath(\".//b521\")\n mclasses = []\n if b521_ns != nil\n b521_ns.each do |cls|\n mc = extract_inner_text(cls)\n mclasses << mc unless mc.empty?\n end\n h.store(:mainclass, mclasses) unless mclasses.empty?\n end\n\n # Collect the further classification, if any\n b522_ns = b520.xpath(\".//b522\")\n fclasses = []\n if b522_ns != nil\n b522_ns.each do |cls|\n fc = extract_inner_text(cls)\n fclasses << fc unless fc.empty?\n end\n h.store(:subclass, fclasses) unless fclasses.empty?\n end\n\n # Collect the country\n b527 = b520.at_xpath(\".//b527\")\n country = extract_inner_text(b527)\n h.store(:country, country) unless country.empty?\n end\n classes.store(:international_classifications, h) unless h.empty?\n\n classes\n end",
"def classifications(limit_results = false)\n @classifications = []\n @limit_results = limit_results\n calculate_classifications\n @classifications\n end",
"def index\n @classifieds = Classified.all\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
POST /item_classifications POST /item_classifications.json
|
def create
@item_classification = ItemClassification.new(item_classification_params)
respond_to do |format|
if @item_classification.save
format.html { redirect_to @item_classification, notice: 'Classificação criada com sucesso.' }
format.json { render :show, status: :created, location: @item_classification }
else
format.html { render :new }
format.json { render json: @item_classification.errors, status: :unprocessable_entity }
end
end
end
|
[
"def create_item_classifications(companyId, itemId, model) path = \"/api/v2/companies/#{companyId}/items/#{itemId}/classifications\"\n post(path, model, {}, AvaTax::VERSION) end",
"def create\n @item_classification = ItemClassification.new(params[:item_classification])\n\n respond_to do |format|\n if @item_classification.save\n format.html { redirect_to @item_classification, notice: 'Item classification was successfully created.' }\n format.json { render json: @item_classification, status: :created, location: @item_classification }\n else\n format.html { render layout: 'form', action: \"new\" }\n format.json { render json: @item_classification.errors, status: :unprocessable_entity }\n end\n end\n end",
"def list_item_classifications(companyId, itemId, options={}) path = \"/api/v2/companies/#{companyId}/items/#{itemId}/classifications\"\n get(path, options, AvaTax::VERSION) end",
"def create\n Rails.logger.debug \"[ClassificationsController.create] params.class: #{params.class}, params: #{params}\"\n @classifaction = Classification.new transaction_data: params\n @classifaction.classifiers << StatusCodeClassifier.classify( @classifaction.transaction_data )\n @classifaction.classify\n\n if @classifaction.save\n render json: @classifaction.as_jsonapi, status: :created\n else\n render json: { error: 'oops' }, status: 500\n end\n end",
"def update_item_classification(companyId, itemId, id, model) path = \"/api/v2/companies/#{companyId}/items/#{itemId}/classifications/#{id}\"\n put(path, model, {}, AvaTax::VERSION) end",
"def submit_classification(params)\n\n require 'uri'\n require \"net/http\"\n\n uri = URI(@classifications_endpoint)\n \n req = Net::HTTP::Post.new(uri.path, {'BOT_AUTH' => ENV['SCRIBE_BOT_TOKEN']})\n req.body = params.to_params \n http = Net::HTTP.new(uri.host, uri.port)\n\n response = http.start {|http| http.request(req) }\n\n begin\n JSON.parse response.body\n rescue\n nil\n end\n end",
"def add_classification\n @bib.classification.each do |c|\n case c.type\n when \"type\" then @item[\"type\"] = c.value\n when \"mendeley\" then @item[\"mendeley-tags\"] = c.value\n end\n end\n end",
"def get_item_classification(companyId, itemId, id) path = \"/api/v2/companies/#{companyId}/items/#{itemId}/classifications/#{id}\"\n get(path, {}, AvaTax::VERSION) end",
"def index\n @item_classifications = ItemClassification.all\n end",
"def create\n @item_class = ItemClass.new(params[:item_class])\n\n respond_to do |format|\n if @item_class.save\n format.html { redirect_to @item_class, notice: 'Item class was successfully created.' }\n format.json { render json: @item_class, status: :created, location: @item_class }\n else\n format.html { render action: \"new\" }\n format.json { render json: @item_class.errors, status: :unprocessable_entity }\n end\n end\n end",
"def classifications(classification)\n params = {\n classification: classification\n }.compact\n\n _get(\"/account/classifications\", params) { |json| json }\n end",
"def create\n @classification = Classification.new(classification_params)\n\n respond_to do |format|\n if @classification.save\n format.html { redirect_to @classification }\n format.json { render :show, status: :created, location: @classification }\n else\n format.html { render :new }\n format.json { render json: @classification.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @classification = Classification.new(admin_classification_params)\n\n respond_to do |format|\n if @classification.save\n format.html { redirect_to admin_classifications_path, notice: 'Classification was successfully created.' }\n format.json { render :show, status: :created, location: @classification }\n else\n format.html { render :new }\n format.json { render json: @classification.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @classified = Classified.new(params[:classified])\n\n respond_to do |format|\n if @classified.save\n format.html { redirect_to @classified, notice: 'Classified was successfully created.' }\n format.json { render json: @classified, status: :created, location: @classified }\n else\n format.html { render action: \"new\" }\n format.json { render json: @classified.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @classified = Classified.new(classified_params)\n respond_to do |format|\n if @classified.save\n format.html { redirect_to @classified, notice: 'Classified was successfully created.' }\n format.json { render :show, status: :created, location: @classified }\n else\n format.html { render :new }\n format.json { render json: @classified.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @item_classification = ItemClassification.find(params[:id])\n\n respond_to do |format|\n if @item_classification.update_attributes(params[:item_classification])\n format.html { redirect_to @item_classification, notice: 'Item classification was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render layout: 'form', action: \"edit\" }\n format.json { render json: @item_classification.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @company_classification = CompanyClassification.new\n @company_classification.company_id = 1 #params[:]\n @company_classification.classification_id = 1 #params[:]\n @company_classification.status = 1\n @company_classification.save\n render :json => @company_classifications.to_json(:include =>[:company,:classification])\n\n \n end",
"def update\n respond_to do |format|\n if @item_classification.update(item_classification_params)\n format.html { redirect_to @item_classification, notice: 'Classificação atualizada com sucesso.' }\n format.json { render :show, status: :ok, location: @item_classification }\n else\n format.html { render :edit }\n format.json { render json: @item_classification.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @risk_classification = RiskClassification.new(params[:risk_classification])\n\n respond_to do |format|\n if @risk_classification.save\n format.html { redirect_to @risk_classification, notice: 'Risk classification was successfully created.' }\n format.json { render json: @risk_classification, status: :created, location: @risk_classification }\n else\n format.html { render action: \"new\" }\n format.json { render json: @risk_classification.errors, status: :unprocessable_entity }\n end\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
PATCH/PUT /item_classifications/1 PATCH/PUT /item_classifications/1.json
|
def update
respond_to do |format|
if @item_classification.update(item_classification_params)
format.html { redirect_to @item_classification, notice: 'Classificação atualizada com sucesso.' }
format.json { render :show, status: :ok, location: @item_classification }
else
format.html { render :edit }
format.json { render json: @item_classification.errors, status: :unprocessable_entity }
end
end
end
|
[
"def update_item_classification(companyId, itemId, id, model) path = \"/api/v2/companies/#{companyId}/items/#{itemId}/classifications/#{id}\"\n put(path, model, {}, AvaTax::VERSION) end",
"def update\n @item_classification = ItemClassification.find(params[:id])\n\n respond_to do |format|\n if @item_classification.update_attributes(params[:item_classification])\n format.html { redirect_to @item_classification, notice: 'Item classification was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render layout: 'form', action: \"edit\" }\n format.json { render json: @item_classification.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @item_class = ItemClass.find(params[:id])\n\n respond_to do |format|\n if @item_class.update_attributes(params[:item_class])\n format.html { redirect_to @item_class, notice: 'Item class was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @item_class.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @classification.update(classification_params)\n format.html { redirect_to @classification, notice: 'Classification was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @classification.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @classified = @user.classifieds.find(params[:id])\n\n respond_to do |format|\n if @classified.update_attributes(params[:classified])\n format.html { redirect_to @classified, notice: 'Classified was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @classified.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @classified = Classified.find(params[:id])\n\n respond_to do |format|\n if @classified.update_attributes(params[:classified])\n format.html { redirect_to @classified, notice: 'Classified was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @classified.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create_item_classifications(companyId, itemId, model) path = \"/api/v2/companies/#{companyId}/items/#{itemId}/classifications\"\n post(path, model, {}, AvaTax::VERSION) end",
"def update\n @risk_classification = RiskClassification.find(params[:id])\n\n respond_to do |format|\n if @risk_classification.update_attributes(params[:risk_classification])\n format.html { redirect_to @risk_classification, notice: 'Risk classification was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @risk_classification.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @requirement_item.update(requirement_item_params)\n format.html { redirect_to @requirement_item, notice: 'Requirement item was successfully updated.' }\n format.json { render :show, status: :ok, location: @requirement_item }\n else\n format.html { render :edit }\n format.json { render json: @requirement_item.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @classification.update(classification_params)\n format.html { redirect_to @classification, notice: t('controller.successfully_updated', model: t('activerecord.models.classification')) }\n format.json { head :no_content }\n else\n @classification_types = ClassificationType.all\n format.html { render action: \"edit\" }\n format.json { render json: @classification.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @classification = Classification.find(params[:id])\n\n respond_to do |format|\n if @classification.update_attributes(params[:classification])\n format.html { redirect_to(@classification, :notice => 'Classification was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @classification.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @item_specification = ItemSpecification.find(params[:id])\n\n respond_to do |format|\n if @item_specification.update_attributes(item_specification_params)\n format.html { redirect_to @item_specification, notice: 'Item specification was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @item_specification.errors, status: :unprocessable_entity }\n end\n end\n end",
"def _update_item(http, headers, path, body, name)\n resp = retry_request(http, \"PATCH\", path, body, headers)\n if resp.is_a?(Net::HTTPOK)\n Chef::Log.info(\"Updated keystone item '#{name}'\")\n else\n _raise_error(resp, \"Unable to update item '#{name}'\", \"_update_item\")\n end\nend",
"def update\n @classification = Classification.find(params[:id])\n\n respond_to do |format|\n if @classification.update_attributes(params[:classification])\n flash[:notice] = 'Classification was successfully updated.'\n format.html { redirect_to(@classification) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @classification.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n\n #update the item of request_item\n if (params[:request_item].present?)\n @request_item.item = params[:request_item][:item].present? ? Item.new(name: params[:request_item][:item][:name]) : @request_item.item\n end\n #update all other parameters\n if @request_item.update(request_item_params)\n render json: @request_item\n else\n render json: @request_item.errors, status: :bad_request\n end\n\n end",
"def update\n respond_to do |format|\n if @firstclassification.update(firstclassification_params)\n format.html { redirect_to @firstclassification, notice: 'Firstclassification was successfully updated.' }\n format.json { render :show, status: :ok, location: @firstclassification }\n else\n format.html { render :edit }\n format.json { render json: @firstclassification.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @classifier = Classifier.find(params[:id])\n\n respond_to do |format|\n if @classifier.update_attributes(params[:classifier])\n format.html { redirect_to @classifier, notice: 'Classifier was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @classifier.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @classified.update(classified_params)\n format.html { redirect_to @classified, notice: 'Classified was successfully updated.' }\n format.json { render :show, status: :ok, location: @classified }\n else\n format.html { render :edit }\n format.json { render json: @classified.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @consideration_item.update(consideration_item_params)\n format.html { redirect_to @consideration_item, notice: 'Consideration item was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @consideration_item.errors, status: :unprocessable_entity }\n end\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
DELETE /item_classifications/1 DELETE /item_classifications/1.json
|
def destroy
@item_classification.destroy
respond_to do |format|
format.html { redirect_to item_classifications_url, notice: 'Classificação destruída com sucesso.' }
format.json { head :no_content }
end
end
|
[
"def destroy\n @item_classification = ItemClassification.find(params[:id])\n @item_classification.destroy\n\n respond_to do |format|\n format.html { redirect_to item_classifications_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @item_class = ItemClass.find(params[:id])\n @item_class.destroy\n\n respond_to do |format|\n format.html { redirect_to item_classes_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @accounting_classification.destroy\n respond_to do |format|\n format.html { redirect_to accounting_classifications_url, notice: 'Item Eliminado' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @classification = Classification.find(params[:id])\n @classification.destroy\n\n respond_to do |format|\n format.html { redirect_to classifications_url, notice: t('controller.successfully_deleted', model: t('activerecord.models.classification')) }\n format.json { head :no_content }\n end\n end",
"def destroy\n @item_specification = ItemSpecification.find(params[:id])\n @item_specification.destroy\n\n respond_to do |format|\n format.html { redirect_to item_specifications_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @risk_classification = RiskClassification.find(params[:id])\n @risk_classification.destroy\n\n respond_to do |format|\n format.html { redirect_to risk_classifications_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @consideration_item.destroy\n respond_to do |format|\n format.html { redirect_to consideration_items_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @classification = Classification.find(params[:id])\n @classification.destroy\n\n respond_to do |format|\n format.html { redirect_to(classifications_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @accident_item.destroy\n respond_to do |format|\n format.html { redirect_to accident_items_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n render status: 200, json: @request_item.destroy\n end",
"def destroy\n @research_item = ResearchItem.find(params[:id])\n @research_item.destroy\n\n respond_to do |format|\n format.html { redirect_to research_items_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @classification.destroy\n respond_to do |format|\n format.html { redirect_to admin_classifications_url, notice: 'Classification was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @clasification = Clasification.find(params[:id])\n @clasification.destroy\n\n respond_to do |format|\n format.html { redirect_to clasifications_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @api_v1_checklist_item.destroy\n respond_to do |format|\n format.html { redirect_to api_v1_checklist_items_url, notice: 'Checklist item was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @requirement_item.destroy\n respond_to do |format|\n format.html { redirect_to requirement_items_url, notice: 'Requirement item was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @company_classification.destroy\n respond_to do |format|\n format.html { redirect_to company_classifications_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @firstclassification.destroy\n respond_to do |format|\n format.html { redirect_to firstclassifications_url, notice: 'Firstclassification was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @classified = Classified.find(params[:id])\n @classified.destroy\n\n respond_to do |format|\n format.html { redirect_to classifieds_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @clitem.destroy\n respond_to do |format|\n format.html { redirect_to clitems_url }\n format.json { head :no_content }\n\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Find an `at_version` argument passed to a parent node. If one is found, then a design collection further up the AST has been filtered to reflect designs at that version, and so for consistency we should only present versions up to the given version here.
|
def at_version_arg(parent)
::Gitlab::Graphql::FindArgumentInParent.find(parent, :at_version, limit_depth: 4)
end
|
[
"def at_version_arg(parent)\n # TODO: remove coercion when the compatibility layer is removed\n # See: https://gitlab.com/gitlab-org/gitlab/-/issues/257883\n version_id = ::Gitlab::Graphql::FindArgumentInParent.find(parent, :at_version, limit_depth: 4)\n version_id &&= VersionID.coerce_isolated_input(version_id)\n version_id\n end",
"def version_at(input)\n input = Time.parse(input) unless input.is_a?(Time)\n current = updated_at <= input\n match = versions.\n where(:created_at.lte => input).\n desc(:created_at).\n desc(:number).\n first\n if match\n if current && updated_at >= match.created_at\n self\n else\n version(match.number)\n end\n elsif current\n self\n else\n raise VersionNotFoundError.new(\"no version at #{input}\")\n end\n end",
"def versioned(current_at)\n return self if Settings::DisableVDef || !current_at\n\n # Add a second, to avoid rounding issues between Rails and DB\n current_at += 1.second\n\n matched_version = nil\n avs = all_versions\n # Iterate through all available versions.\n # Since the versions are getting older as we progress,\n # as soon as the definition version\n # was updated or created less recently than the item current_at\n # timestamp, then we know it was the one the item\n # was created from. Break the loop.\n avs.each do |version|\n matched_version = version\n break if (version.updated_at || version.created_at) < current_at\n end\n\n # If no version was matched it probably means the item was created\n # from the current version. Return nil.\n # If the version matched is the first in the list then just\n # return nil and let the caller decide.\n return if matched_version.nil? || matched_version.def_version == avs.first.def_version\n\n # Return the matched version\n matched_version\n end",
"def cached_stateful_version(parent_node)\n version_gid = Gitlab::Graphql::FindArgumentInParent.find(parent_node, :at_version)\n\n # Caching is scoped to an `issue_id` to allow us to cache the\n # most recent `Version` for an issue\n Gitlab::SafeRequestStore.fetch([request_cache_base_key, 'stateful_version', object.issue_id, version_gid]) do\n if version_gid\n GitlabSchema.object_from_id(version_gid, expected_type: ::DesignManagement::Version)&.sync\n else\n object.issue.design_versions.most_recent\n end\n end\n end",
"def find_parent\n VersionRelease.find_parent(version_id, category)\n end",
"def version_at(timestamp)\n # short-circuit if the current state is valid\n return self if self.updated_at <= timestamp\n\n version = versions.first(\n :conditions => ['created_at > ?', timestamp],\n :order => 'created_at ASC')\n version.reify if version\n end",
"def load_version(assoc, id, transaction_id, version_at)\n assoc.klass.paper_trail.version_class.\n where(\"item_type = ?\", assoc.klass.base_class.name).\n where(\"item_id = ?\", id).\n where(\"created_at >= ? OR transaction_id = ?\", version_at, transaction_id).\n order(\"id\").\n limit(1).\n first\n end",
"def handle_has_version(tree)\n predicate = 'dcterms:hasVersion'\n predicate_term = RDF::DC.hasVersion\n elements = tree.xpath(\"./#{predicate}\", PREFIXES)\n if elements.length > 1\n puts \"Found #{elements.length} elements for #{predicate} in #{@work_uri}\"\n end\n elements.each do |has_version_element|\n item_lists = has_version_element.xpath('./rdf:list', PREFIXES)\n item_lists.each do |item_list|\n list_items = item_list.xpath('./rdf:li', PREFIXES)\n list_items.each do |item|\n add_statement(@work_uri, predicate_term, item.text)\n end\n end\n end\n end",
"def versioned_asset\n a=self.asset\n a=a.parent if a.class.name.end_with?(\"::Version\")\n if version.nil?\n a.latest_version\n else\n a.find_version(version)\n end \n end",
"def after(version)\n where([\"#{original_class.versioned_foreign_key} = ? and version_from > ?\", version.send(original_class.versioned_foreign_key), version.version_from]).\n order('version_from ASC').\n first\n end",
"def design_version_added(version)\n events = DesignManagement::Action.events\n link_href = designs_path(version: version.id)\n\n version.designs_by_event.map do |(event_name, designs)|\n note_data = self.class.design_event_note_data(events[event_name])\n icon_name = note_data[:icon]\n n = designs.size\n\n body = \"%s [%d %s](%s)\" % [note_data[:past_tense], n, 'design'.pluralize(n), link_href]\n\n create_note(NoteSummary.new(noteable, project, author, body, action: icon_name))\n end\n end",
"def editable_version\n parent_version.nil? ? self : parent_version \n end",
"def versioned_asset\r\n s=self.sop\r\n s=s.parent if s.class.name.end_with?(\"::Version\")\r\n if version.nil?\r\n s.latest_version\r\n else\r\n s.find_version(sop_version)\r\n end\r\n end",
"def diff_since_version(version)\n other_audit = auditable.audits.where(\"version <= ? AND id != ?\", version, id).order(\"version DESC\").limit(1).first\n\n diff(other_audit)\n end",
"def load_version(assoc_klass, id, transaction_id, version_at)\n assoc_klass.paper_trail.version_class.\n where(\"item_type = ?\", assoc_klass.base_class.name).\n where(\"item_id = ?\", id).\n where(\"created_at >= ? OR transaction_id = ?\", version_at, transaction_id).\n order(\"id\").limit(1).first\n end",
"def load_version(assoc, model, transaction_id, version_at)\n base_class_name = assoc.klass.base_class.name\n versions = load_versions(assoc, model, transaction_id, version_at, base_class_name)\n case versions.length\n when 0\n nil\n when 1\n versions.first\n else\n case ::PaperTrail.config.association_reify_error_behaviour\n when \"warn\"\n version = versions.first\n version.logger&.warn(\n FoundMoreThanOne.new(base_class_name, versions.length).message\n )\n version\n when \"ignore\"\n versions.first\n else # \"error\"\n raise FoundMoreThanOne.new(base_class_name, versions.length)\n end\n end\n end",
"def select_version(version)\n upgrade_to_select.select(version)\n end",
"def find_child_by_designation search_designation\n\t\t\t\traise 'deprecated'\n\n\t\t\t\t@logger.debug \"Searching in container [#{level_type}] #{designation} for #{search_designation} (any level type)\"\n\t\t\t\t@children.each { |c|\n\t\t\t\t\t# @logger.debug \"Comparing: #{c.designation} == #{search_designation} ?\"\n\t\t\t\t\t# @logger.debug \"Comparing: #{c.designation.class} == #{search_designation.class} ?\"\n\t\t\t\t\tif c.designation == search_designation\n\t\t\t\t\t\t@logger.debug \"Found: #{c.detail}\"\n\t\t\t\t\t\treturn c\n\t\t\t\t\tend\n\t\t\t\t}\n\t\t\t\t@logger.debug \"Not Found.\"\n\t\t\t\treturn nil\n\t\t\tend",
"def version_guard(version)\n if version.to_f <= options[:api_version].to_f\n yield\n else\n raise APIVersionError, \"You must set an `api_version` of at least #{version} \"\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
test_reviewer_home Description: This method does the functional testing for the reviewer methods
|
def test_reviewer_home
post('index', {}, {})
assert_response(:success)
assert_template('tracker/index')
post('index', {}, lee_hweng_session)
assert_response(:success)
assert_template('tracker/reviewer_home')
#follow_redirect
#assert_no_tag :content => "POST Placement Review"
end
|
[
"def test_reviewer_view\n \n # Verify that the reviewer view is called when the user is \n # logged in as a reviewer.\n mx234a_pre_art = design_reviews(:mx234a_pre_artwork)\n mx234a = designs(:mx234a)\n \n get(:view,{ :id => mx234a_pre_art.id}, ted_dft_session)\n assert_response(:success)\n assert_equal(mx234a_pre_art.id, assigns(:design_review).id)\n assert_equal(mx234a.id, assigns(:design_review).design.id)\n assert_equal(14, assigns(:review_results).size)\n assert_equal(4, assigns(:design_review).design_review_comments.size)\n assert_equal(nil, assigns(:designers))\n assert_equal(nil, assigns(:priorities))\n assert_equal(nil, assigns(:fab_houses))\n \n # Verify information for PCB during a pre-artwork review.\n get(:view, {:id => mx234a_pre_art.id}, jim_pcb_design_session)\n assert_response(:success)\n assert_equal(mx234a_pre_art.id, assigns(:design_review).id)\n assert_equal(mx234a.id, assigns(:design_review).design.id)\n assert_equal(14, assigns(:review_results).size)\n assert_equal(4, assigns(:design_review).design_review_comments.size)\n assert_equal(5, assigns(:designers).size)\n assert_equal(3, assigns(:priorities).size)\n assert_equal(nil, assigns(:fab_houses))\n \n # Verify information for SLM Vendor during a pre-artwork review.\n get(:view, { :id => mx234a_pre_art.id }, dan_slm_vendor_session)\n assert_equal(mx234a_pre_art.id, assigns(:design_review).id)\n assert_equal(mx234a.id, assigns(:design_review).design.id)\n assert_equal(14, assigns(:review_results).size)\n assert_equal(4, assigns(:design_review).design_review_comments.size)\n assert_equal(nil, assigns(:designers))\n assert_equal(nil, assigns(:priorities))\n\n fab_houses = assigns(:fab_houses)\n assert_equal(8, fab_houses.size)\n\n selected_fab_houses = %w(IBM Merix OPC)\n for fab_house in fab_houses\n assert_equal(selected_fab_houses.include?(fab_house.name), \n fab_house[:selected])\n end\n \n end",
"def reviewer_home_setup\n\n @my_processed_reviews = DesignReview.my_processed_reviews(@logged_in_user)\n @my_unprocessed_reviews = DesignReview.my_unprocessed_reviews(@logged_in_user)\n @reviews_assigned_to_peers = DesignReview.reviews_assigned_to_peers(@logged_in_user)\n\n end",
"def test_view\n \n # Verify that the default view is called when the user is not\n # logged in.\n mx234a_pre_art = design_reviews(:mx234a_pre_artwork)\n mx234a = designs(:mx234a)\n \n get(:view, { :id => mx234a_pre_art.id }, {})\n assert_response(:success)\n\n get(:view, { :id => mx234a_pre_art.id }, {})\n assert_equal(mx234a_pre_art.id, assigns(:design_review).id)\n assert_equal(mx234a.id, assigns(:design_review).design.id)\n assert_equal(14, assigns(:review_results).size)\n assert_equal(4, assigns(:design_review).design_review_comments.size)\n \n get(:view, {}, {})\n assert_redirected_to(:controller => 'tracker', :action => 'index')\n #assert_equal('No ID was provided - unable to access the design review',\n # flash['notice'])\n\n end",
"def test_repost_review\n \n mx234a_pre_artwork = design_reviews(:mx234a_pre_artwork)\n pre_art_review = ReviewType.get_pre_artwork\n \n post(:repost_review,\n { :design_review_id => mx234a_pre_artwork.id },\n cathy_designer_session)\n\n assert_equal(mx234a_pre_artwork.design.id, assigns(:design_review).design.id)\n assert_equal(pre_art_review.id, assigns(:design_review).review_type_id)\n\n reviewer_list = assigns(:reviewers)\n assert_equal(14, reviewer_list.size)\n\n expected_values = [ set_group('CE-DFM Engineer', 8, 3),\n set_group('CE-DFT Engineer', 7, 2),\n set_group('Component Development', 15, 2),\n set_group('Hardware Engineer (EE)', 5, 4),\n set_group('Mechanical Engineer', 10, 2),\n set_group('Mechanical Mfg Engineer', 11, 2),\n set_group('New Product Planner', 13, 2),\n set_group('PCB Design Input Gate', 14, 2),\n set_group('PCB Design Manager', 12, 1),\n set_group('PCB Mechanical Engineer', 16, 2),\n set_group('SLM BOM', 17, 1),\n set_group('SLM Vendor', 18, 1),\n set_group('TDE Engineer', 9, 2),\n set_group('Valor', 6, 4) ]\n\n for review_group in reviewer_list\n expected_val = expected_values.shift\n\n assert_equal(expected_val[:group], review_group.role.display_name)\n assert_equal(expected_val[:group_id], review_group.role_id)\n assert_equal(expected_val[:reviewer_count], review_group.role.active_users.size)\n end\n\n end",
"def test_moderator_fetch_reviews\n # Setup\n @user = setup_user\n @user2 = setup_user\n\n assign_http(Config['turtle']['host'])\n\n headers = { 'Authorization' => \"Bearer #{@user.oauth_token}\" }\n params = { 'promo_id' => @active_promo_id }\n params['promo_team'] = @promo['TeamNames'].sample unless @promo['TeamNames'].empty?\n\n put '/usr', params, headers\n assert_response(@response, :success)\n\n # Step 1\n business_listings = get_promo_listings\n assert(business_listings.length > 1)\n\n # Step 2\n assign_http(Config['panda']['host'])\n\n count = 0\n int_xxids = []\n sources = ['CSE','XX3','XXMOBILE','CSE','XX3','XXMOBILE','CSE','XX3','XXMOBILE']\n business_listings.each do |business|\n break if count > 6\n int_xxids << business['Int_Xxid']\n\n params = {\n 'body' => 'This business is very business-like and I would do business with this business again if I have business with them.',\n 'source' => sources[count],\n 'subject' => 'Review made by API',\n 'value' => rand(1..5),\n 'listing_id' => business['Int_Xxid'],\n 'oauth_token' => @user.oauth_token,\n 'promo_id' => @active_promo_id\n }\n\n put '/usr/reviews', params\n assert_response(@response, :success)\n\n params = {\n 'body' => 'This business is very business-like and I would do business with this business again if I have business with them.',\n 'source' => sources[count],\n 'subject' => 'Review made by API',\n 'value' => rand(1..5),\n 'listing_id' => business['Int_Xxid'],\n 'oauth_token' => @user2.oauth_token,\n }\n\n put '/usr/reviews', params\n assert_response(@response, :success)\n\n count += 1\n end\n\n # Step 3\n # Only Promo Reviews check - include_only_promo_reviews\n params = { 'include_only_promo_reviews' => true }\n\n get '/mod/reviews', params\n assert_response(@response, :success)\n assert(@parsed_response['Reviews'])\n assert_equal(10, @parsed_response['Reviews'].length, @parsed_response)\n @parsed_response['Reviews'].each do |review|\n assert(review['PromoId'], review)\n end\n\n # Default Check - promo id only\n params['promo_id'] = @active_promo_id\n\n get '/mod/reviews', params\n assert_response(@response, :success)\n assert(@parsed_response['Reviews'])\n assert_equal(10, @parsed_response['Reviews'].length, @parsed_response)\n @parsed_response['Reviews'].each do |review|\n assert_equal(@active_promo_id, review['PromoId'],\n \"Expected the review PromoId to match #{@active_promo_id}: #{review}\")\n end\n\n # Default Check - promo id + author user id\n params['author_user_id'] = @user.id\n\n get '/mod/reviews', params\n assert_response(@response, :success)\n assert(@parsed_response['Reviews'])\n assert_equal(count, @parsed_response['Reviews'].length, @parsed_response)\n @parsed_response['Reviews'].each do |review|\n assert_equal(@user.id, review['AuthorUserId'],\n \"Expected the review AuthorUserId to match #{@user.id}: #{review}\")\n end\n\n # Status check - Approved\n params['status'] = 'approved'\n\n get '/mod/reviews', params\n assert_response(@response, :success)\n assert(@parsed_response['Reviews'])\n assert_equal(0, @parsed_response['Reviews'].length, @parsed_response)\n @parsed_response['Reviews'].each do |review|\n assert_equal(0, review['Suppressed'],\n \"Expected the review Suppressed to match 0: #{review}\")\n end\n\n # Status check - Rejected\n params['status'] = 'rejected'\n\n get '/mod/reviews', params\n assert_response(@response, :success)\n assert(@parsed_response['Reviews'])\n assert_equal(0, @parsed_response['Reviews'].length, @parsed_response)\n @parsed_response['Reviews'].each do |review|\n assert_equal(0, review['Suppressed'],\n \"Expected the review Suppressed to match 0: #{review}\")\n end\n\n # Status check - Assigned\n params['status'] = 'assigned'\n\n get '/mod/reviews', params\n assert_response(@response, :success)\n assert(@parsed_response['Reviews'])\n assert_equal(0, @parsed_response['Reviews'].length, @parsed_response)\n @parsed_response['Reviews'].each do |review|\n assert_nil(review['ModeratorId'],\n \"Expected the review ModeratorId to be unassigned: #{review}\")\n assert_equal(0, review['Moderated'],\n \"Expected the review Moderated to match 0: #{review}\")\n end\n\n # Status check - Unassigned\n params['status'] = 'unassigned'\n\n get '/mod/reviews', params\n assert_response(@response, :success)\n assert(@parsed_response['Reviews'])\n assert_equal(count, @parsed_response['Reviews'].length, @parsed_response)\n @parsed_response['Reviews'].each do |review|\n assert_nil(review['ModeratorId'],\n \"Expected the review ModeratorId to be unassigned: #{review}\")\n assert_equal(0, review['Moderated'],\n \"Expected the review Moderated to match 0: #{review}\")\n end\n\n # Limit check\n params = {\n 'promo_id' => @active_promo_id,\n 'h' => 1\n }\n\n get '/mod/reviews', params\n assert_response(@response, :success)\n assert(@parsed_response['Reviews'])\n assert_equal(params['h'], @parsed_response['Reviews'].length, @parsed_response)\n no_offset = @parsed_response['Reviews']\n\n # Offset check\n params['o'] = 1\n\n get '/mod/reviews', params\n assert_response(@response, :success)\n assert(@parsed_response['Reviews'])\n assert_equal(params['h'], @parsed_response['Reviews'].length, @parsed_response)\n no_offset.each do |review|\n refute_match(review, @parsed_response['Reviews'][0])\n end\n\n # Int_Xxid check\n int_xxid = int_xxids[(rand(count))]\n\n params = {\n 'promo_id' => @active_promo_id,\n 'author_user_id' => @user.id,\n 'status' => 'unassigned',\n 'int_xxid' => int_xxid,\n }\n\n get '/mod/reviews', params\n assert_response(@response, :success)\n assert(@parsed_response['Reviews'])\n assert_equal(1, @parsed_response['Reviews'].length, @parsed_response)\n assert_equal(int_xxid.to_i, @parsed_response['Reviews'][0]['Int_Xxid'])\n\n int_xxid = int_xxids[(rand(count))]\n\n # User Email check\n params = {\n 'promo_id' => @active_promo_id,\n 'author_email' => @user.email,\n 'status' => 'unassigned',\n 'int_xxid' => int_xxid,\n }\n\n get '/mod/reviews', params\n assert_response(@response, :success)\n assert(@parsed_response['Reviews'])\n assert_equal(1, @parsed_response['Reviews'].length, @parsed_response)\n assert_equal(int_xxid.to_i, @parsed_response['Reviews'][0]['Int_Xxid'])\n assert_equal(@user.email, @parsed_response['Reviews'][0]['Email'])\n\n # Step 4\n params = {\n 'promo_id' => @active_promo_id,\n 'include_total_unmoderated_count' => true, # AS-7229\n }\n\n get '/mod/reviews', params\n assert_response(@response, :success)\n assert(@parsed_response['Reviews'])\n assert(@parsed_response['TotalUnmoderatedCount'] >= 0)\n assert_equal(10, @parsed_response['Reviews'].length, @parsed_response)\n @parsed_response['Reviews'].each do |review|\n assert_equal(@active_promo_id, review['PromoId'],\n \"Expected the review PromoId to match #{@active_promo_id}: #{review}\")\n end\n\n # Step 5\n # Sorting by source\n params = {\n 'include_only_promo_reviews' => true,\n 'author_user_id' => @user.id,\n 's' => 'source:desc'\n }\n\n get '/mod/reviews', params\n assert_response(@response, :success)\n assert(@parsed_response['Reviews'])\n assert_equal(count, @parsed_response['Reviews'].length, @parsed_response)\n @parsed_response['Reviews'].each do |review|\n assert(review['PromoId'], review)\n assert(sources.include?(review['Source']),\n \"Expected the review Source to match of the specified sources, #{sources}: #{review}\")\n end\n assert_equal('XXMOBILE', @parsed_response['Reviews'].first['Source'])\n assert_equal('CSE', @parsed_response['Reviews'].last['Source'])\n\n # Step 6\n # Non Promo Review check\n params = {\n 'include_only_promo_reviews' => false,\n 'author_user_id' => @user2.id,\n 's' => 'source:asc'\n }\n\n get '/mod/reviews', params\n assert_response(@response, :success)\n assert(@parsed_response['Reviews'])\n assert_equal(count, @parsed_response['Reviews'].length, @parsed_response)\n @parsed_response['Reviews'].each do |review|\n assert_equal(@user2.id, review['AuthorUserId'],\n \"Expected the review AuthorUserId to match #{@user.id}: #{review}\")\n assert(sources.include?(review['Source']),\n \"Expected the review Source to match of the specified sources, #{sources}: #{review}\")\n end\n assert_equal('CSE', @parsed_response['Reviews'].first['Source'])\n assert_equal('XXMOBILE', @parsed_response['Reviews'].last['Source'])\n end",
"def test_manager_view\n \n # Verify that the manager view is called when the user is \n # logged in as a manager.\n mx234a_pre_art = design_reviews(:mx234a_pre_artwork)\n mx234a = designs(:mx234a)\n\n get(:view, { :id => mx234a_pre_art.id }, jim_manager_session)\n assert_equal(mx234a_pre_art.id, assigns(:design_review).id)\n assert_equal(mx234a.id, assigns(:design_review).design.id)\n assert_equal(14, assigns(:review_results).size)\n assert_equal(4, assigns(:design_review).design_review_comments.size)\n \n end",
"def test_nodesigner_view\n \n # Verify that the designer view is called when the user is \n # logged in as a designer.\n mx234a_pre_art = design_reviews(:mx234a_pre_artwork)\n mx234a = designs(:mx234a)\n \n get(:view, { :id => mx234a_pre_art.id }, scott_designer_session)\n assert_response(:success)\n assert_equal(mx234a_pre_art.id, assigns(:design_review).id)\n assert_equal(mx234a.id, assigns(:design_review).design.id)\n assert_equal(14, assigns(:review_results).size)\n assert_equal(4, assigns(:design_review).design_review_comments.size)\n \n end",
"def test_00171_homepage_featured_review_widget\n @browser.wait_until { @home_page.featured_r_widget.present? }\n @browser.wait_until { @home_page.featured_r_widget.posts.size > 0 }\n\n # verify the title link would redirect to conversation page.\n q_title = @home_page.featured_r_widget.posts[0].title\n @home_page.featured_r_widget.posts[0].click_title_link\n @browser.wait_until { @convdetail_page.conv_detail.present? }\n assert_equal @convdetail_page.conv_title.when_present.text, q_title, \"title doesn't match\"\n\n @home_page.navigate_in\n @browser.wait_until { @home_page.featured_r_widget.posts.size > 0 }\n\n # verify avatar link would redirect to profile page\n @home_page.featured_r_widget.posts[0].click_avatar\n @browser.wait_until { @profile_page.profile_page.present? }\n author_name = @profile_page.profile_page_author_name_betaon.when_present.text\n\n @home_page.navigate_in\n @browser.wait_until { @home_page.featured_r_widget.posts.size > 0 }\n\n # verify author link would redirect to profile page\n @home_page.featured_r_widget.posts[0].click_author_link\n @browser.wait_until { @profile_page.profile_page.present? }\n assert_equal @profile_page.profile_page_author_name_betaon.when_present.text, author_name, \"Author doesn't match\"\n\n @home_page.navigate_in\n @browser.wait_until { @home_page.featured_r_widget.posts.size > 0 }\n\n # verify topic link would redirect to the correct topic page\n post_in_topic = @home_page.featured_r_widget.posts[0].in_topic_link.when_present.text\n\n @home_page.featured_r_widget.posts[0].click_topic_link\n @browser.wait_until { @topicdetail_page.topic_filter.present? }\n assert_equal @topicdetail_page.topic_title.when_present.text, post_in_topic, \"Title doesn't match\"\n end",
"def test_post_results_and_hold\n\n #\n # THE PRE-ARTWORK REVIEW\n #\n expected_results = {\n '7' => \"No Response\", '8' => \"No Response\", '5' => \"No Response\",\n '15' => \"No Response\", '10' => \"No Response\", '11' => \"No Response\",\n '14' => \"No Response\", '16' => \"No Response\", '13' => \"No Response\",\n '17' => \"No Response\", '18' => \"No Response\", '9' => \"No Response\",\n '6' => \"No Response\", '12' => \"No Response\"\n }\n\n mail_subject = 'Catalyst/AC/(pcb252_234_a0_g): Pre-Artwork '\n reviewer_result_list= [\n # Espo - CE-DFT Reviewer\n {:user_id => @espo.id,\n :role_id => @ce_dft.id,\n :comment => 'espo comment while in-review',\n :result => 'APPROVED',\n :ignore => false,\n :review_result_id => design_review_results(:mx234a_pre_artwork_ce_dft).id,\n :role_id_tag => 'role_id_7',\n :review_status => @in_review,\n :expected_results => {\n :comments_count => 5,\n :review_status_id => @in_review.id,\n :mail_subject => mail_subject + ' CE-DFT - APPROVED - See comments',\n :notice => \"Design Review updated with comments and the review result - mail was sent\"\n }\n },\n # Heng Kit Too - DFM Reviewer\n {:user_id => @heng_k.id,\n :role_id => @dfm.id,\n :comment => 'HKT comment while on-hold',\n :result => 'WAIVED',\n :ignore => false,\n :review_result_id => design_review_results(:mx234a_pre_artwork_dfm).id,\n :role_id_tag => ':role_id_8',\n :review_status => @on_hold,\n :expected_results => {\n :comments_count => 6,\n :review_status_id => @on_hold.id,\n :mail_subject => mail_subject + '- Comments added',\n :notice => \"Design Review status is 'Review On-Hold': comments were recorded and review results were discarded - mail was sent\"\n }\n },\n # Heng Kit Too - DFM Reviewer\n {:user_id => @heng_k.id,\n :role_id => @dfm.id,\n :comment => 'HKT comment while pending repost',\n :result => 'WAIVED',\n :ignore => false,\n :review_result_id => design_review_results(:mx234a_pre_artwork_dfm).id,\n :role_id_tag => ':role_id_8',\n :review_status => @pending_repost,\n :expected_results => {\n :comments_count => 7,\n :review_status_id => @pending_repost.id,\n :mail_subject => mail_subject + '- Comments added',\n :notice => \"Design Review status is 'Pending Repost': comments were recorded and review results were discarded - mail was sent\"\n }\n }\n ]\n\n mx234a = design_reviews(:mx234a_pre_artwork)\n\n mx234a.design_review_results.each do |rr|\n rr.result = 'No Response'\n rr.save\n end\n \n mx234a_review_results = mx234a.design_review_results\n\n assert_equal(14, mx234a_review_results.size)\n assert_equal(4, mx234a.design_review_comments.size)\n mx234a_review_results.each { |rr| assert_equal(\"No Response\", rr.result) }\n\n reviewer_result_list.each do |reviewer_result|\n\n if reviewer_result[:review_status] == @on_hold\n mx234a.place_on_hold\n elsif reviewer_result[:review_status] == @pending_repost\n mx234a.review_status_id = @pending_repost.id\n mx234a.save\n else\n mx234a.remove_from_hold(@in_review)\n expected_results[reviewer_result[:role_id].to_s] = reviewer_result[:result]\n end\n mx234a.reload\n\n rev = User.find(reviewer_result[:user_id]).name\n reviewer_session = set_session(reviewer_result[:user_id], Role.find(reviewer_result[:role_id]).name)\n\n post(:reviewer_results,\n { :post_comment => { \"comment\" => reviewer_result[:comment] },\n reviewer_result[:role_id_tag] => { reviewer_result[:review_result_id] => reviewer_result[:result] },\n :design_review => { \"id\" => mx234a.id } },\n reviewer_session)\n assert_redirected_to(:action => :post_results)\n\n #follow_redirect\n # \"follow_redirect\" is part of integration testing and should not be in\n # used in a functional test\n if false #comment out section\n\n #assert_equal(reviewer_result[:expected_results][:notice], flash['notice'])\n\n assert_equal(1, @emails.size)\n email = @emails.pop\n assert_equal(0, @emails.size)\n assert_equal(reviewer_result[:expected_results][:mail_subject],\n email.subject)\n \n design_review_comments = DesignReviewComment.find_all_by_design_review_id(mx234a.id)\n assert_equal(reviewer_result[:expected_results][:comments_count], \n design_review_comments.size)\n if reviewer_result[:comment] != ''\n assert_equal(reviewer_result[:comment], design_review_comments.pop.comment)\n end\n\n review_results = DesignReviewResult.find_all_by_design_review_id(mx234a.id)\n\n for review_result in review_results\n assert_equal(expected_results[review_result.role_id.to_s],\n review_result.result)\n end\n\n pre_art_design_review = DesignReview.find(mx234a.id)\n assert_equal(reviewer_result[:expected_results][:review_status_id],\n pre_art_design_review.review_status_id)\n end #suppress \"follow_redirect\" section\n end\n\n #Verify the existing priority and designer.\n mx234a_pre_art_dr = DesignReview.find(mx234a.id)\n mx234a_design = mx234a_pre_art_dr.design\n high = Priority.find_by_name('High')\n low = Priority.find_by_name('Low')\n bob_g = User.find_by_last_name(\"Goldin\")\n scott_g = User.find_by_last_name(\"Glover\")\n patrice_m = User.find_by_last_name(\"Michaels\")\n cathy_m = User.find_by_last_name(\"McLaren\")\n\n assert_equal(high.id, mx234a_design.priority_id)\n assert_equal(5000, mx234a_design.designer_id)\n assert_equal(5001, mx234a_design.peer_id)\n\n release_review = ReviewType.get_release\n pre_art_review = ReviewType.get_pre_artwork\n for mx234a_dr in mx234a_design.design_reviews\n assert_equal(high.id, mx234a_dr.priority_id)\n if release_review.id === mx234a_dr.review_type_id\n assert_equal(patrice_m.name, User.find(mx234a_dr.designer_id).name)\n elsif pre_art_review.id == mx234a_dr.review_type_id\n assert_equal(cathy_m.name, User.find(mx234a_dr.designer_id).name)\n else\n assert_equal(bob_g.name, User.find(mx234a_dr.designer_id).name)\n end\n end\n\n assert_equal(ReviewType.get_pre_artwork.id,\n mx234a_design.phase_id)\n\n # Handle special processing cases\n assert_equal(0, mx234a_design.board.fab_houses.size)\n assert_equal(3, mx234a_design.fab_houses.size)\n fab_houses = mx234a_design.fab_houses.sort_by { |fh| fh.name }\n assert_equal(fab_houses(:ibm).id, fab_houses[0].id.to_i)\n assert_equal(fab_houses(:merix).id, fab_houses[1].id.to_i)\n assert_equal(fab_houses(:opc).id, fab_houses[2].id.to_i)\n \n comment_count = mx234a.design_review_comments.size\n # Verify the behavior when the review is pending and on hold\n updates = [{:review_status => @pending_repost,\n :notice => \"Design Review status is 'Pending Repost': comments were recorded and review results were discarded - mail was sent\",\n :fab_house => {'1' => '1', '2' => '0', '3' => '1',\n '4' => '1', '5' => '0', '6' => '0',\n '7' => '0', '8' => '1'},\n :fab_house_count => 4,\n :fab_house_list => ['AdvantechPWB', 'Coretec', \n 'Merix', 'OPC']},\n {:review_status => @on_hold,\n :notice => \"Design Review status is 'Review On-Hold': comments were recorded and review results were discarded - mail was sent\",\n :fab_house => {'1' => '0', '2' => '0', '3' => '0',\n '4' => '0', '5' => '1', '6' => '0',\n '7' => '1', '8' => '1'},\n :fab_house_count => 3,\n :fab_house_list => ['DDI Anaheim', 'MEI', 'OPC']}]\n \n slm_vendor_session = dan_slm_vendor_session\n updates.each do |update|\n\n review_status = update[:review_status]\n if review_status.id == @on_hold.id\n mx234a.place_on_hold\n else\n mx234a.review_status_id = review_status.id\n mx234a.save\n end\n mx234a.reload\n \n post(:reviewer_results,\n { :post_comment => { \"comment\" => \"#{review_status.name}\" },\n :role_id_18 => { 11 => 'APPROVED' },\n :design_review => { \"id\" => mx234a.id },\n :fab_house => update[:fab_house] },\n slm_vendor_session) \n assert_redirected_to(:action => :post_results)\n #follow_redirect\n # \"follow_redirect\" is part of integration testing and should not be in\n # used in a functional test\n if false #comment out section\n\n email = @emails.pop\n assert_equal(0, @emails.size)\n # Expect comments - the fab houses changed\n assert_equal(mail_subject + '- Comments added', email.subject)\n\n assert_equal(update[:fab_house_count], mx234a.design.fab_houses.size)\n assert_equal(update[:fab_house_count], mx234a.design.board.fab_houses.size)\n \n if update[:fab_house_count] > 0\n design_fab_houses = mx234a.design.fab_houses.sort_by { |fh| fh.name }\n board_fab_houses = mx234a.design.board.fab_houses.sort_by { |fh| fh.name }\n \n 0.upto(update[:fab_house_count]-1) do |i|\n assert_equal(update[:fab_house_list][i], design_fab_houses[i].name)\n assert_equal(update[:fab_house_list][i], board_fab_houses[i].name)\n end\n end\n \n comment_count += 2\n assert_equal(comment_count, mx234a.design_review_comments.size)\n \n #assert_equal(update[:notice], flash['notice'])\n end #suppress follow_redirect\n end \n\n # Handle special proessing for PCB Design Manager\n comment_count = mx234a.design_review_comments.size\n # Verify the behavior when the review is pending and on hold\n updates = [{:review_status => @pending_repost,\n :notice => \"Design Review status is 'Pending Repost': comments were recorded and review results were discarded - mail was sent\",\n :fab_house => {'1' => '1', '2' => '0', '3' => '1',\n '4' => '1', '5' => '0', '6' => '0',\n '7' => '0', '8' => '1'},\n :fab_house_count => 4,\n :fab_house_list => ['AdvantechPWB', 'Coretec', \n 'Merix', 'OPC']},\n {:review_status => @on_hold,\n :notice => \"Design Review status is 'Review On-Hold': comments were recorded and review results were discarded - mail was sent\",\n :fab_house => {'1' => '0', '2' => '0', '3' => '0',\n '4' => '0', '5' => '1', '6' => '0',\n '7' => '1', '8' => '1'},\n :fab_house_count => 3,\n :fab_house_list => ['DDI Anaheim', 'MEI', 'OPC']}]\n \n email = []\n pcb_design_session = jim_pcb_design_session\n updates.each do |update|\n \n review_status = update[:review_status]\n if review_status.id == @on_hold.id\n mx234a.place_on_hold\n else\n mx234a.review_status_id = review_status.id\n mx234a.save\n end\n mx234a.reload\n \n post(:reviewer_results,\n { :post_comment => { \"comment\" => 'Absolutely!' },\n :role_id_12 => { '100' => 'APPROVED' },\n :design_review => { \"id\" => mx234a.id },\n :designer => { :id => scott_g.id },\n :peer => { :id => bob_g.id },\n :priority => { :id => low.id } },\n pcb_design_session)\n\n assert_redirected_to(:action => :post_results)\n #follow_redirect\n # \"follow_redirect\" is part of integration testing and should not be in\n # used in a functional test\n if false #comment out section\n\n email = @emails.pop\n assert_equal(0, @emails.size)\n # Expect comments - the fab houses changed\n assert_equal(mail_subject + '- Comments added', email.subject)\n\n comment_count += 1\n assert_equal(comment_count, mx234a.design_review_comments.size)\n \n #assert_equal(update[:notice], flash['notice'])\n end # suppress follow_redirect\n end\n\n mx234a.reload\n \n designer_email = User.find(mx234a.design.pcb_input_id).email\n\n assert(!email.cc.detect { |addr| addr == designer_email })\n \n mx234a_pre_art_dr = DesignReview.find(mx234a.id)\n mx234a_design = Design.find(mx234a_pre_art_dr.design_id)\n\n assert_equal(low.id, mx234a_design.priority_id)\n assert_equal(scott_g.id, mx234a_design.designer_id)\n\n for mx234a_dr in mx234a_design.design_reviews\n assert_equal(low.name, Priority.find(mx234a_dr.priority_id).name)\n case ReviewType.find(mx234a_dr.review_type_id).name\n when 'Pre-Artwork'\n assert_equal(cathy_m.name, User.find(mx234a_dr.designer_id).name)\n when 'Release'\n assert_equal(patrice_m.name, User.find(mx234a_dr.designer_id).name)\n else\n assert_equal(scott_g.name, User.find(mx234a_dr.designer_id).name)\n end\n end\n\n end",
"def test_admin_view\n \n # Verify that the admin view is called when the user is \n # logged in as an admin.\n mx234a_pre_art = design_reviews(:mx234a_pre_artwork)\n mx234a = designs(:mx234a)\n \n get(:view, {:id => mx234a_pre_art.id}, cathy_admin_session)\n assert_response(:success)\n assert_equal(mx234a_pre_art.id, assigns(:design_review).id)\n assert_equal(mx234a.id, assigns(:design_review).design.id)\n assert_equal(14, assigns(:review_results).size)\n assert_equal(4, assigns(:design_review).design_review_comments.size)\n\n end",
"def test_ID_25839_edit_profile_home_patch\n login_as_user1\n go_to_edit_profile_page\n verify_users_default_town_name_displayed_as_home_patch \"test_ID_25835_edit_profile_desc\" , \"Bellmore Patch\"\n verify_hometown_change_is_saved \"Bellmore Patch\",\"Garden City Patch\"\n end",
"def test_post_results\n\n #\n # THE PRE-ARTWORK REVIEW\n #\n expected_results = {\n '7' => \"No Response\", '8' => \"No Response\", '5' => \"No Response\", \n '15' => \"No Response\", '10' => \"No Response\", '11' => \"No Response\",\n '14' => \"No Response\", '16' => \"No Response\", '13' => \"No Response\",\n '17' => \"No Response\", '18' => \"No Response\", '9' => \"No Response\",\n '6' => \"No Response\", '12' => \"No Response\" }\n\n mail_subject = 'Catalyst/AC/(pcb252_234_a0_g): Pre-Artwork '\n reviewer_result_list= [\n # Espo - CE-DFT Reviewer\n {:user_id => @espo.id,\n :role_id => @ce_dft.id,\n :comment => 'This is good!',\n :result => 'APPROVED',\n :ignore => false,\n :review_result_id => design_review_results(:mx234a_pre_artwork_ce_dft).id,\n :role_id_tag => 'role_id_7',\n :expected_results => {\n :comments_count => 5,\n :review_status_id => @in_review.id,\n :mail_subject => mail_subject + ' CE-DFT - APPROVED - See comments'\n }\n },\n # Heng Kit Too - DFM Reviewer\n {:user_id => @heng_k.id,\n :role_id => @dfm.id,\n :comment => 'This is good enough to waive.',\n :result => 'WAIVED',\n :ignore => false,\n :review_result_id => design_review_results(:mx234a_pre_artwork_dfm).id,\n :role_id_tag => ':role_id_8',\n :expected_results => {\n :comments_count => 6,\n :review_status_id => @in_review.id,\n :mail_subject => mail_subject + ' DFM - WAIVED - See comments'\n }\n },\n # Dave Macioce - Library Reviewer\n {:user_id => @dave_m.id,\n :role_id => @library.id,\n :comment => 'Yankees Suck!!!',\n :result => 'REJECTED',\n :ignore => false,\n :review_result_id => design_review_results(:mx234a_pre_artwork_lib),\n :role_id_tag => ':role_id_15',\n :expected_results => {\n :comments_count => 7,\n :review_status_id => @pending_repost.id,\n :mail_subject => mail_subject + ' Library - REJECTED - See comments'\n }\n },\n # Lee Shaff- HW Reviewer\n {:user_id => @lee_s.id,\n :role_id => @hweng.id,\n :comment => 'No Comment',\n :result => 'APPROVED',\n :ignore => false,\n :review_result_id => design_review_results(:mx234a_pre_artwork_hw).id,\n :role_id_tag => ':role_id_5',\n :expected_results => {\n :comments_count => 8,\n :review_status_id => @in_review.id,\n :mail_subject => mail_subject + ' HWENG - APPROVED - See comments'\n }\n },\n # Dave Macioce - Library Reviewer\n {:user_id => @dave_m.id,\n :role_id => @library.id,\n :comment => '',\n :result => 'APPROVED',\n :ignore => false,\n :review_result_id => design_review_results(:mx234a_pre_artwork_lib).id,\n :role_id_tag => ':role_id_15',\n :expected_results => {\n :comments_count => 8,\n :review_status_id => @in_review.id,\n :mail_subject => mail_subject + ' Library - APPROVED - No comments'\n }\n },\n # Espo - CE-DFT Reviewer\n {:user_id => @espo.id,\n :role_id => @ce_dft.id,\n :comment => 'This is no good!',\n :result => 'REJECTED',\n :ignore => true,\n :review_result_id => design_review_results(:mx234a_pre_artwork_ce_dft).id,\n :role_id_tag => 'role_id_7',\n :expected_results => {\n :comments_count => 9,\n :review_status_id => @in_review.id,\n :mail_subject => mail_subject + '- Comments added'\n }\n },\n # Espo - CE-DFT Reviewer\n {:user_id => @espo.id,\n :role_id => @ce_dft.id,\n :comment => 'Just kidding!',\n :result => 'APPROVED',\n :ignore => false,\n :review_result_id => design_review_results(:mx234a_pre_artwork_ce_dft).id,\n :role_id_tag => 'role_id_7',\n :expected_results => {\n :comments_count => 10,\n :review_status_id => @in_review.id,\n :mail_subject => mail_subject + ' CE-DFT - APPROVED - See comments'\n }\n },\n # Tom Flak - Mehanical\n {:user_id => @tom_f.id,\n :role_id => @mechanical.id,\n :comment => 'This is good!',\n :result => 'APPROVED',\n :ignore => false,\n :review_result_id => design_review_results(:mx234a_pre_artwork_mech).id,\n :role_id_tag => 'role_id_10',\n :expected_results => {\n :comments_count => 11,\n :review_status_id => @in_review.id,\n :mail_subject => mail_subject + ' Mechanical - APPROVED - See comments'\n }\n },\n # Anthony Gentile - Mechanical MFG\n {:user_id => @anthony_g.id,\n :role_id => @mechanical_mfg.id,\n :comment => '',\n :result => 'APPROVED',\n :ignore => false,\n :review_result_id => design_review_results(:mx234a_pre_artwork_mech_mfg).id,\n :role_id_tag => 'role_id_11',\n :expected_results => {\n :comments_count => 11,\n :review_status_id => @in_review.id,\n :mail_subject => mail_subject + ' Mechanical-MFG - APPROVED - No comments'\n }\n },\n # Cathy McLaren - PCB Input Gate\n {:user_id => @cathy_m.id,\n :role_id => @pcb_input_gate.id,\n :comment => 'I always have something to say.',\n :result => 'APPROVED',\n :ignore => false,\n :review_result_id => design_review_results(:mx234a_pre_artwork_pcb_ig).id,\n :role_id_tag => 'role_id_14',\n :expected_results => {\n :comments_count => 12,\n :review_status_id => @in_review.id,\n :mail_subject => mail_subject + ' PCB Input Gate - APPROVED - See comments'\n }\n },\n # John Godin - PCB Mehanical\n {:user_id => @john_g.id,\n :role_id => @pcb_mechanical.id,\n :comment => '',\n :result => 'APPROVED',\n :ignore => false,\n :review_result_id => design_review_results(:mx234a_pre_artwork_pcb_mech).id,\n :role_id_tag => 'role_id_16',\n :expected_results => {\n :comments_count => 12,\n :review_status_id => @in_review.id,\n :mail_subject => mail_subject + ' PCB Mechanical - APPROVED - No comments'\n }\n },\n # Matt Disanzo - Planning\n {:user_id => @matt_d.id,\n :role_id => @planning.id,\n :comment => 'Comment before entering result.',\n :result => nil,\n :ignore => false,\n :review_result_id => design_review_results(:mx234a_pre_artwork_plan).id,\n :role_id_tag => 'role_id_13',\n :expected_results => {\n :comments_count => 13,\n :review_status_id => @in_review.id,\n :mail_subject => mail_subject + '- Comments added'\n }\n },\n # Matt Disanzo - Planning\n {:user_id => @matt_d.id,\n :role_id => @planning.id,\n :comment => 'Testing.',\n :result => 'APPROVED',\n :ignore => false,\n :review_result_id => design_review_results(:mx234a_pre_artwork_plan).id,\n :role_id_tag => 'role_id_13',\n :expected_results => {\n :comments_count => 14,\n :review_status_id => @in_review.id,\n :mail_subject => mail_subject + ' Planning - APPROVED - See comments'\n }\n },\n # Matt Disanzo - Planning\n {:user_id => @matt_d.id,\n :role_id => @planning.id,\n :comment => 'Comment after entering result.',\n :result => nil,\n :ignore => false,\n :review_result_id => design_review_results(:mx234a_pre_artwork_plan).id,\n :role_id_tag => 'role_id_13',\n :expected_results => {\n :comments_count => 15,\n :review_status_id => @in_review.id,\n :mail_subject => mail_subject + '- Comments added'\n }\n },\n # Arthur Davis - SLM BOM\n {:user_id => @art_d.id,\n :role_id => @slm_bom.id,\n :comment => '',\n :result => 'APPROVED',\n :ignore => false,\n :review_result_id => design_review_results(:mx234a_pre_artwork_slm_bom).id,\n :role_id_tag => 'role_id_17',\n :expected_results => {\n :comments_count => 15,\n :review_status_id => @in_review.id,\n :mail_subject => mail_subject + ' SLM BOM - APPROVED - No comments'\n }\n },\n # Rich Ahamed - TDE\n {:user_id => @rich_a.id,\n :role_id => @tde.id,\n :comment => '',\n :result => 'APPROVED',\n :ignore => false,\n :review_result_id => design_review_results(:mx234a_pre_artwork_tde).id,\n :role_id_tag => 'role_id_9',\n :expected_results => {\n :comments_count => 15,\n :review_status_id => @in_review.id,\n :mail_subject => mail_subject + ' TDE - APPROVED - No comments'\n }\n },\n # Lisa Austin - Valor\n {:user_id => @lisa_a.id,\n :role_id => @valor.id,\n :comment => '',\n :result => 'APPROVED',\n :ignore => false,\n :review_result_id => design_review_results(:mx234a_pre_artwork_valor).id,\n :role_id_tag => 'role_id_6',\n :expected_results => {\n :comments_count => 15,\n :review_status_id => @in_review.id,\n :mail_subject => mail_subject + ' Valor - APPROVED - No comments'\n }\n },\n\n ]\n\n mx234a = design_reviews(:mx234a_pre_artwork)\n\n update_mx234a = DesignReview.find(mx234a.id)\n update_mx234a.review_status_id = @in_review.id\n update_mx234a.save\n\n mx234a_review_results = DesignReviewResult.find_all_by_design_review_id(mx234a.id)\n for mx234a_review_result in mx234a_review_results\n mx234a_review_result.result = 'No Response'\n mx234a_review_result.save\n end\n\n mx234a_review_results = DesignReviewResult.find_all_by_design_review_id(mx234a.id)\n\n assert_equal(14, mx234a_review_results.size)\n assert_equal(4, \n DesignReviewComment.find_all_by_design_review_id(mx234a.id).size)\n for review_result in mx234a_review_results\n assert_equal(\"No Response\", review_result.result)\n end\n\n repost = false\n for reviewer_result in reviewer_result_list\n\n if repost\n update_mx234a = DesignReview.find(mx234a.id)\n update_mx234a.review_status_id = @in_review.id\n update_mx234a.save\n end\n \n rev = User.find(reviewer_result[:user_id]).name\n\n reviewer_session = set_session(reviewer_result[:user_id], Role.find(reviewer_result[:role_id]).name)\n if reviewer_result[:result]\n post(:reviewer_results,\n { :post_comment => { \"comment\" => reviewer_result[:comment] },\n reviewer_result[:role_id_tag] => { reviewer_result[:review_result_id] => reviewer_result[:result] },\n :design_review => { \"id\" => mx234a.id } },\n reviewer_session,\n {:review_results => mx234a_review_results } ) #flash values\n if !reviewer_result[:ignore]\n expected_results[reviewer_result[:role_id].to_s] = reviewer_result[:result]\n end\n else\n post(:reviewer_results,\n { :post_comment => { \"comment\" => reviewer_result[:comment] },\n :design_review => { \"id\" => mx234a.id } },\n reviewer_session,\n {:review_results => mx234a_review_results } ) #flash values\n end\n\n if reviewer_result[:result] != 'REJECTED'\n assert_redirected_to(:action => :post_results)\n else\n if !reviewer_result[:ignore]\n expected_results.each { |k,v| \n expected_results[k] = 'WITHDRAWN' if v == 'APPROVED'\n }\n end\n \n assert_redirected_to(:action => :confirm_rejection)\n #follow_redirect\n # \"follow_redirect\" is part of integration testing and should not be in\n # used in a functional test\n if false #comment out section\n assert_equal(mx234a.id, assigns(:design_review_id))\n end #suppress follow_redirect\n repost = true\n end\n\n if !reviewer_result[:ignore]\n post(:post_results, {}, reviewer_session,\n {:review_results => mx234a_review_results } ) #flash values\n else\n post(:post_results, { :note => 'ignore' }, reviewer_session,\n {:review_results => mx234a_review_results } ) #flash values\n end\n\n email = @emails.pop\n assert_equal(0, @emails.size)\n assert_equal(reviewer_result[:expected_results][:mail_subject],\n email.subject)\n \n design_review_comments = DesignReviewComment.find_all_by_design_review_id(mx234a.id)\n assert_equal(reviewer_result[:expected_results][:comments_count], \n design_review_comments.size)\n if reviewer_result[:comment] != ''\n assert_equal(reviewer_result[:comment], design_review_comments.pop.comment)\n end\n\n review_results = DesignReviewResult.find_all_by_design_review_id(mx234a.id)\n\n review_results.each do |review_result|\n assert_equal(expected_results[review_result.role_id.to_s],\n review_result.result)\n end\n\n pre_art_design_review = DesignReview.find(mx234a.id)\n assert_equal(reviewer_result[:expected_results][:review_status_id],\n pre_art_design_review.review_status_id)\n end\n\n #Verify the existing priority and designer.\n mx234a_pre_art_dr = DesignReview.find(mx234a.id)\n mx234a_design = mx234a_pre_art_dr.design\n high = Priority.find_by_name('High')\n low = Priority.find_by_name('Low')\n bob_g = User.find_by_last_name(\"Goldin\")\n scott_g = User.find_by_last_name(\"Glover\")\n patrice_m = User.find_by_last_name(\"Michaels\")\n cathy_m = User.find_by_last_name(\"McLaren\")\n\n assert_equal(high.id, mx234a_design.priority_id)\n assert_equal(5000, mx234a_design.designer_id)\n assert_equal(5001, mx234a_design.peer_id)\n\n release_review = ReviewType.get_release\n pre_art_review = ReviewType.get_pre_artwork\n for mx234a_dr in mx234a_design.design_reviews\n assert_equal(high.id, mx234a_dr.priority_id)\n if release_review.id === mx234a_dr.review_type_id\n assert_equal(patrice_m.name, User.find(mx234a_dr.designer_id).name)\n elsif pre_art_review.id == mx234a_dr.review_type_id\n assert_equal(cathy_m.name, User.find(mx234a_dr.designer_id).name)\n else\n assert_equal(bob_g.name, User.find(mx234a_dr.designer_id).name)\n end\n end\n\n assert_equal(ReviewType.get_pre_artwork.id,\n mx234a_design.phase_id)\n\n # Handle special processing cases\n assert_equal(0, mx234a_design.board.fab_houses.size)\n assert_equal(3, mx234a_design.fab_houses.size)\n fab_houses = mx234a_design.fab_houses.sort_by { |fh| fh.name }\n assert_equal(fab_houses(:ibm).id, fab_houses[0].fab_house_id.to_i)\n assert_equal(fab_houses(:merix).id, fab_houses[1].fab_house_id.to_i)\n assert_equal(fab_houses(:opc).id, fab_houses[2].fab_house_id.to_i)\n \n reviewer_session = dan_slm_vendor_session\n post(:reviewer_results,\n { :post_comment => { \"comment\" => '' },\n :role_id_18 => { 11 => 'APPROVED' },\n :design_review => { \"id\" => mx234a.id },\n :fab_house => { '1' => '0', '2' => '0',\n '3' => '1', '4' => '1',\n '5' => '0', '6' => '0',\n '7' => '0', '8' => '0' } },\n reviewer_session,\n {:review_results => mx234a_review_results } ) #flash values\n assert_redirected_to(:action => :post_results)\n post(:post_results, {}, reviewer_session,\n {:review_results => mx234a_review_results } ) #flash values\n\n email = @emails.pop\n assert_equal(0, @emails.size)\n # Expect comments - the fab houses changed\n assert_equal(mail_subject + ' SLM-Vendor - APPROVED - See comments',\n email.subject)\n \n\n design_update = Design.find(mx234a_design.id)\n assert_equal(2, design_update.board.fab_houses.size)\n assert_equal(2, design_update.fab_houses.size)\n fab_houses = design_update.fab_houses.sort_by { |fh| fh.name }\n assert_equal(fab_houses(:advantech).id, fab_houses[0].fab_house_id.to_i)\n assert_equal(fab_houses(:coretec).id, fab_houses[1].fab_house_id.to_i)\n fab_houses = design_update.board.fab_houses.sort_by { |fh| fh.name }\n assert_equal(fab_houses(:advantech).id, fab_houses[0].fab_house_id.to_i)\n assert_equal(fab_houses(:coretec).id, fab_houses[1].fab_house_id.to_i)\n\n comments = DesignReviewComment.find_all_by_design_review_id(mx234a.id)\n assert_equal(16, comments.size)\n assert_equal('Updated the fab houses - Added: AdvantechPWB, Coretec - Removed: OPC, Merix, IBM', \n comments.pop.comment)\n\n expected_results[\"18\"] = 'APPROVED'\n review_results = DesignReviewResult.find_all_by_design_review_id(mx234a.id)\n for review_result in review_results\n assert_equal(expected_results[review_result.role_id.to_s],\n review_result.result)\n end\n\n pre_art_design_review = DesignReview.find(mx234a.id)\n assert_equal(@in_review.id, pre_art_design_review.review_status_id)\n assert_equal('09-May-06',\n pre_art_design_review.completed_on.format_dd_mon_yy)\n\n\n # Handle special proessing for PCB Design Manager\n reviewer_session = jim_pcb_design_session\n post(:reviewer_results,\n { :post_comment => { \"comment\" => 'Absolutely!' },\n :role_id_12 => { '100' => reviewer_result[:result] },\n :design_review => { \"id\" => mx234a.id },\n :designer => { :id => scott_g.id },\n :peer => { :id => bob_g.id },\n :priority => { :id => low.id } },\n reviewer_session)\n post(:post_results, {}, reviewer_session)\n\n email = @emails.shift\n assert_equal(1, @emails.size)\n\n assert_equal(mail_subject + ' PCB Design - APPROVED - See comments',\n email.subject)\n email = @emails.shift\n assert_equal(0, @emails.size)\n\n assert_equal('Catalyst/AC/(pcb252_234_a0_g): The Pre-Artwork design review is complete',\n email.subject)\n\n mx234a_pre_art_dr = DesignReview.find(mx234a.id)\n mx234a_design = Design.find(mx234a_pre_art_dr.design_id)\n\n assert_equal(low.id, mx234a_design.priority_id)\n assert_equal(scott_g.id, mx234a_design.designer_id)\n\n for mx234a_dr in mx234a_design.design_reviews\n assert_equal(low.name, Priority.find(mx234a_dr.priority_id).name)\n case ReviewType.find(mx234a_dr.review_type_id).name\n when 'Pre-Artwork'\n assert_equal(cathy_m.name, User.find(mx234a_dr.designer_id).name)\n when 'Release'\n assert_equal(patrice_m.name, User.find(mx234a_dr.designer_id).name)\n else\n assert_equal(scott_g.name, User.find(mx234a_dr.designer_id).name)\n end\n end\n\n assert_equal(ReviewType.get_placement.id,\n mx234a_design.phase_id)\n assert_equal('Review Completed', mx234a_pre_art_dr.review_status.name)\n assert_equal(Time.now.format_dd_mon_yy,\n mx234a_pre_art_dr.completed_on.format_dd_mon_yy)\n assert_equal(17, \n DesignReviewComment.find_all_by_design_review_id(mx234a.id).size)\n\n\n reviewer_session = dan_slm_vendor_session\n post(:reviewer_results,\n { :post_comment => { \"comment\" => 'This is a test.' },\n :design_review => { \"id\" => mx234a.id },\n :fab_house => { '1' => '0', '2' => '0',\n '3' => '0', '4' => '0',\n '5' => '1', '6' => '1',\n '7' => '0', '8' => '0' } },\n reviewer_session,\n {:review_results => mx234a_review_results } ) #flash values\n \n assert_redirected_to(:action => :post_results)\n post(:post_results, {}, reviewer_session,\n {:review_results => mx234a_review_results } ) #flash values\n \n email = @emails.pop\n assert_equal(0, @emails.size)\n # Expect comments - the fab houses changed\n assert_equal(mail_subject + '- Comments added', email.subject)\n \n #\n # THE PLACEMENT REVIEW\n #\n expected_results = { '7' => \"No Response\", '8' => \"No Response\",\n '5' => \"No Response\", '10' => \"No Response\",\n '11' => \"No Response\", '9' => \"No Response\" }\n\n mail_subject = 'Catalyst/AC/(pcb252_234_a0_g): Placement '\n reviewer_result_list= [\n # Espo - CE-DFT Reviewer\n {:user_id => @espo.id,\n :role_id => @ce_dft.id,\n :comment => 'This is good!',\n :result => 'APPROVED',\n :review_result_id => design_review_results(:mx234a_placement_ce_dft).id,\n :role_id_tag => 'role_id_7',\n :expected_results => {\n :comments_count => 2,\n :review_status_id => @in_review.id,\n :mail_count => 1,\n :mail_subject => mail_subject + ' CE-DFT - APPROVED - See comments'\n }\n },\n # Heng Kit Too - DFM Reviewer\n {:user_id => @heng_k.id,\n :role_id => @dfm.id,\n :comment => 'This is good enough to waive.',\n :result => 'WAIVED',\n :review_result_id => design_review_results(:mx234a_placement_dfm).id,\n :role_id_tag => ':role_id_8',\n :expected_results => {\n :comments_count => 3,\n :review_status_id => @in_review.id,\n :mail_count => 1,\n :mail_subject => mail_subject + ' DFM - WAIVED - See comments'\n }\n },\n # Lee Shaff- HW Reviewer\n {:user_id => @lee_s.id,\n :role_id => @hweng.id,\n :comment => 'No Comment',\n :result => 'APPROVED',\n :review_result_id => design_review_results(:mx234a_placement_hw).id,\n :role_id_tag => ':role_id_5',\n :expected_results => {\n :comments_count => 4,\n :review_status_id => @in_review.id,\n :mail_count => 1,\n :mail_subject => mail_subject + ' HWENG - APPROVED - See comments'\n }\n },\n # Tom Flak - Mehanical\n {:user_id => @tom_f.id,\n :role_id => @mechanical.id,\n :comment => 'This is good!',\n :result => 'APPROVED',\n :review_result_id => design_review_results(:mx234a_placement_mech).id,\n :role_id_tag => 'role_id_10',\n :expected_results => {\n :comments_count => 5,\n :review_status_id => @in_review.id,\n :mail_count => 1,\n :mail_subject => mail_subject + ' Mechanical - APPROVED - See comments'\n }\n },\n # Anthony Gentile - Mechanical MFG\n {:user_id => @anthony_g.id,\n :role_id => @mechanical_mfg.id,\n :comment => '',\n :result => 'APPROVED',\n :review_result_id => design_review_results(:mx234a_placement_mech_mfg).id,\n :role_id_tag => 'role_id_11',\n :expected_results => {\n :comments_count => 5,\n :review_status_id => @in_review.id,\n :mail_count => 1,\n :mail_subject => mail_subject + ' Mechanical-MFG - APPROVED - No comments'\n }\n },\n # Rich Ahamed - TDE\n {:user_id => @rich_a.id,\n :role_id => @tde.id,\n :comment => '',\n :result => 'APPROVED',\n :review_result_id => design_review_results(:mx234a_placement_tde).id,\n :role_id_tag => 'role_id_9',\n :expected_results => {\n :comments_count => 5,\n :review_status_id => @review_complete.id,\n :mail_count => 2,\n :mail_subject => mail_subject + ' TDE - APPROVED - No comments'\n }\n }\n ]\n\n mx234a = design_reviews(:mx234a_placement)\n\n update_mx234a = DesignReview.find(mx234a.id)\n update_mx234a.review_status_id = @in_review.id\n update_mx234a.save\n\n mx234a_review_results = DesignReviewResult.find_all_by_design_review_id(mx234a.id)\n for mx234a_review_result in mx234a_review_results\n mx234a_review_result.result = 'No Response'\n mx234a_review_result.save\n end\n\n mx234a_review_results = DesignReviewResult.find_all_by_design_review_id(mx234a.id)\n\n assert_equal(reviewer_result_list.size,\n mx234a_review_results.size)\n assert_equal(1, \n DesignReviewComment.find_all_by_design_review_id(mx234a.id).size)\n for review_result in mx234a_review_results\n assert_equal(\"No Response\", review_result.result)\n end\n\n repost = false\n for reviewer_result in reviewer_result_list\n\n if repost\n update_mx234a = DesignReview.find(mx234a.id)\n update_mx234a.review_status_id = @in_review.id\n update_mx234a.update\n end\n \n rev = User.find(reviewer_result[:user_id]).name\n reviewer_session = set_session(reviewer_result[:user_id], Role.find(reviewer_result[:role_id]).name)\n if reviewer_result[:result]\n post(:reviewer_results,\n { :post_comment => { \"comment\" => reviewer_result[:comment] },\n reviewer_result[:role_id_tag] => { reviewer_result[:review_result_id] => reviewer_result[:result] },\n :design_review => { \"id\" => mx234a.id } },\n reviewer_session,\n {:review_results => mx234a_review_results } ) #flash values)\n expected_results[reviewer_result[:role_id].to_s] = reviewer_result[:result]\n else\n post(:reviewer_results,\n { :post_comment => { \"comment\" => reviewer_result[:comment] },\n :design_review => { \"id\" => mx234a.id } },\n reviewer_session,\n {:review_results => mx234a_review_results } )\n end\n\n if reviewer_result[:result] != 'REJECTED'\n assert_redirected_to(:action => :post_results)\n else\n expected_results.each { |k,v| \n expected_results[k] = 'WITHDRAWN' if v == 'APPROVED'\n }\n \n assert_redirected_to(:action => :confirm_rejection)\n #follow_redirect\n # \"follow_redirect\" is part of integration testing and should not be in\n # used in a functional test\n if false #comment out section\n assert_equal(mx234a.id, assigns(:design_review_id))\n end #suppress follow_redirect\n repost = true\n end\n\n post(:post_results, {}, reviewer_session)\n\n assert_equal(reviewer_result[:expected_results][:mail_count], \n @emails.size)\n email = @emails.pop\n\n if @emails.size > 0\n assert_equal(\"Catalyst/AC/(pcb252_234_a0_g): The Placement design review is complete\",\n email.subject)\n email = @emails.pop\n end\n \n assert_equal(reviewer_result[:expected_results][:mail_subject],\n email.subject)\n \n design_review_comments = DesignReviewComment.find_all_by_design_review_id(mx234a.id)\n assert_equal(reviewer_result[:expected_results][:comments_count], \n design_review_comments.size)\n if reviewer_result[:comment] != ''\n assert_equal(reviewer_result[:comment],\n design_review_comments.pop.comment)\n end\n\n review_results = DesignReviewResult.find_all_by_design_review_id(mx234a.id)\n\n for review_result in review_results\n assert_equal(expected_results[review_result.role_id.to_s],\n review_result.result)\n end\n\n placement_design_review = DesignReview.find(mx234a.id)\n assert_equal(reviewer_result[:expected_results][:review_status_id],\n placement_design_review.review_status_id)\n end\n\n mx234a_design.reload\n mx234a_placement_dr = DesignReview.find(mx234a.id)\n assert_equal(ReviewType.get_routing.id,\n mx234a_design.phase_id)\n assert_equal('Review Completed', \n mx234a_placement_dr.review_status.name)\n assert_equal(Time.now.format_dd_mon_yy,\n mx234a_placement_dr.completed_on.format_dd_mon_yy)\n\n #\n # THE ROUTING REVIEW\n #\n expected_results = { '7' => \"No Response\", '8' => \"No Response\",\n '5' => \"No Response\", '18' => \"No Response\",\n '11' => \"No Response\" }\n\n mail_subject = 'Catalyst/AC/(pcb252_234_a0_g): Routing '\n reviewer_result_list= [\n # Espo - CE-DFT Reviewer\n {:user_id => @espo.id,\n :role_id => @ce_dft.id,\n :comment => 'This is good!',\n :result => 'APPROVED',\n :review_result_id => design_review_results(:mx234a_route_ce_dft).id,\n :role_id_tag => 'role_id_7',\n :expected_results => {\n :comments_count => 2,\n :review_status_id => @in_review.id,\n :mail_count => 1,\n :mail_subject => mail_subject + ' CE-DFT - APPROVED - See comments'\n }\n },\n # Dan Gough - SLM - Vendor\n {:user_id => @dan_g.id,\n :role_id => @slm_vendor.id,\n :comment => 'I am stressed and I am going to pull a nutty!!!!',\n :result => 'APPROVED',\n :review_result_id => design_review_results(:mx234a_routing_slm_v),\n :role_id_tag => 'role_id_18',\n :expected_results => {\n :comments_count => 3,\n :review_status_id => @in_review.id,\n :mail_count => 1,\n :mail_subject => mail_subject + ' SLM-Vendor - APPROVED - See comments'\n }\n },\n # Heng Kit Too - DFM Reviewer\n {:user_id => @heng_k.id,\n :role_id => @dfm.id,\n :comment => 'This is good enough to waive.',\n :result => 'WAIVED',\n :review_result_id => design_review_results(:mx234a_route_dfm).id,\n :role_id_tag => ':role_id_8',\n :expected_results => {\n :comments_count => 4,\n :review_status_id => @in_review.id,\n :mail_count => 1,\n :mail_subject => mail_subject + ' DFM - WAIVED - See comments'\n }\n },\n # Lee Shaff- HW Reviewer\n {:user_id => @lee_s.id,\n :role_id => @hweng.id,\n :comment => 'No Comment',\n :result => 'APPROVED',\n :review_result_id => design_review_results(:mx234a_route_hw).id,\n :role_id_tag => ':role_id_5',\n :expected_results => {\n :comments_count => 5,\n :review_status_id => @in_review.id,\n :mail_count => 1,\n :mail_subject => mail_subject + ' HWENG - APPROVED - See comments'\n }\n },\n # Anthony Gentile - Mechanical MFG\n {:user_id => @anthony_g.id,\n :role_id => @mechanical_mfg.id,\n :comment => '',\n :result => 'APPROVED',\n :review_result_id => design_review_results(:mx234a_placement_mech_mfg).id,\n :role_id_tag => 'role_id_11',\n :expected_results => {\n :comments_count => 5,\n :review_status_id => @review_complete.id,\n :mail_count => 2,\n :mail_subject => mail_subject + ' Mechanical-MFG - APPROVED - No comments'\n }\n }\n ]\n\n mx234a = design_reviews(:mx234a_routing)\n\n update_mx234a = DesignReview.find(mx234a.id)\n update_mx234a.review_status_id = @in_review.id\n update_mx234a.save\n\n mx234a_review_results = DesignReviewResult.find_all_by_design_review_id(mx234a.id)\n for mx234a_review_result in mx234a_review_results\n mx234a_review_result.result = 'No Response'\n mx234a_review_result.save\n end\n\n mx234a_review_results = DesignReviewResult.find_all_by_design_review_id(mx234a.id)\n\n assert_equal(reviewer_result_list.size,\n mx234a_review_results.size)\n assert_equal(1, \n DesignReviewComment.find_all_by_design_review_id(mx234a.id).size)\n for review_result in mx234a_review_results\n assert_equal(\"No Response\", review_result.result)\n end\n\n repost = false\n for reviewer_result in reviewer_result_list\n\n if repost\n update_mx234a = DesignReview.find(mx234a.id)\n update_mx234a.review_status_id = @in_review.id\n update_mx234a.update\n end\n \n rev = User.find(reviewer_result[:user_id]).name\n reviewer_session = set_session(reviewer_result[:user_id], Role.find(reviewer_result[:role_id]).name)\n \n if reviewer_result[:result]\n post(:reviewer_results,\n { :post_comment => { \"comment\" => reviewer_result[:comment] },\n reviewer_result[:role_id_tag] => { reviewer_result[:review_result_id] => reviewer_result[:result] },\n :design_review => { \"id\" => mx234a.id } },\n reviewer_session,\n {:review_results => mx234a_review_results } )\n expected_results[reviewer_result[:role_id].to_s] = reviewer_result[:result]\n else\n post(:reviewer_results,\n { :post_comment => { \"comment\" => reviewer_result[:comment] },\n :design_review => { \"id\" => mx234a.id } },\n reviewer_session,\n {:review_results => mx234a_review_results } )\n end\n\n if reviewer_result[:result] != 'REJECTED'\n assert_redirected_to(:action => :post_results)\n else\n expected_results.each { |k,v| \n expected_results[k] = 'WITHDRAWN' if v == 'APPROVED'\n }\n \n assert_redirected_to(:action => :confirm_rejection)\n #follow_redirect\n # \"follow_redirect\" is part of integration testing and should not be in\n # used in a functional test\n if false #comment out section\n assert_equal(mx234a.id, assigns(:design_review_id))\n end #suppress follow_redirect\n repost = true\n end\n\n post(:post_results, {}, reviewer_session,\n {:review_results => mx234a_review_results })\n assert_equal(reviewer_result[:expected_results][:mail_count], @emails.size)\n email = @emails.pop\n\n if @emails.size > 0\n assert_equal('Catalyst/AC/(pcb252_234_a0_g): The Routing design review is complete',\n email.subject)\n email = @emails.pop\n end\n \n assert_equal(reviewer_result[:expected_results][:mail_subject],\n email.subject)\n \n design_review_comments = DesignReviewComment.find_all_by_design_review_id(mx234a.id)\n assert_equal(reviewer_result[:expected_results][:comments_count], \n design_review_comments.size)\n if reviewer_result[:comment] != ''\n assert_equal(reviewer_result[:comment],\n design_review_comments.pop.comment)\n end\n\n review_results = DesignReviewResult.find_all_by_design_review_id(mx234a.id)\n\n for review_result in review_results\n assert_equal(expected_results[review_result.role_id.to_s],\n review_result.result)\n end\n\n routing_design_review = DesignReview.find(mx234a.id)\n assert_equal(reviewer_result[:expected_results][:review_status_id],\n routing_design_review.review_status_id)\n end\n\n mx234a_design.reload\n mx234a_routing_dr = DesignReview.find(mx234a.id)\n assert_equal(ReviewType.get_final.id,\n mx234a_design.phase_id)\n assert_equal('Review Completed', \n mx234a_routing_dr.review_status.name)\n assert_equal(Time.now.format_dd_mon_yy,\n mx234a_routing_dr.completed_on.format_dd_mon_yy)\n\n #\n # THE FINAL REVIEW\n #\n expected_results = { '7' => \"No Response\", '8' => \"No Response\",\n '5' => \"No Response\", '11' => \"No Response\",\n '10' => \"No Response\", '12' => \"No Response\",\n '13' => \"No Response\", '9' => \"No Response\",\n '6' => \"No Response\" }\n\n mail_subject = 'Catalyst/AC/(pcb252_234_a0_g): Final '\n final_reviewer_result_list = [\n # Espo - CE-DFT Reviewer\n {:user_id => @espo.id,\n :role_id => @ce_dft.id,\n :comment => 'This is good!',\n :result => 'APPROVED',\n :review_result_id => design_review_results(:mx234a_final_ce_dft).id,\n :role_id_tag => 'role_id_7',\n :expected_results => {\n :comments_count => 1,\n :review_status_id => @in_review.id,\n :mail_count => 1,\n :mail_subject => mail_subject + ' CE-DFT - APPROVED - See comments'\n }\n },\n # Heng Kit Too - DFM Reviewer\n {:user_id => @heng_k.id,\n :role_id => @dfm.id,\n :comment => 'This is good enough to waive.',\n :result => 'WAIVED',\n :review_result_id => design_review_results(:mx234a_final_dfm).id,\n :role_id_tag => ':role_id_8',\n :expected_results => {\n :comments_count => 2,\n :review_status_id => @in_review.id,\n :mail_count => 1,\n :mail_subject => mail_subject + ' DFM - WAIVED - See comments'\n }\n },\n # Lee Shaff- HW Reviewer\n {:user_id => @lee_s.id,\n :role_id => @hweng.id,\n :comment => 'No Comment',\n :result => 'APPROVED',\n :review_result_id => design_review_results(:mx234a_final_hw).id,\n :role_id_tag => ':role_id_5',\n :expected_results => {\n :comments_count => 3,\n :review_status_id => @in_review.id,\n :mail_count => 1,\n :mail_subject => mail_subject + ' HWENG - APPROVED - See comments'\n }\n },\n # Anthony Gentile - Mechanical MFG\n {:user_id => @anthony_g.id,\n :role_id => @mechanical_mfg.id,\n :comment => '',\n :result => 'APPROVED',\n :review_result_id => design_review_results(:mx234a_final_mech_mfg).id,\n :role_id_tag => 'role_id_11',\n :expected_results => {\n :comments_count => 3,\n :review_status_id => @in_review.id,\n :mail_count => 1,\n :mail_subject => mail_subject + ' Mechanical-MFG - APPROVED - No comments'\n }\n },\n # Tom Flak - Mehanical\n {:user_id => @tom_f.id,\n :role_id => @mechanical.id,\n :comment => 'This is good!',\n :result => 'APPROVED',\n :review_result_id => design_review_results(:mx234a_final_mech).id,\n :role_id_tag => 'role_id_10',\n :expected_results => {\n :comments_count => 4,\n :review_status_id => @in_review.id,\n :mail_count => 1,\n :mail_subject => mail_subject + ' Mechanical - APPROVED - See comments'\n }\n },\n # Jim Light - PCB Manager\n {:user_id => @jim_l.id,\n :role_id => @pcb_design.id,\n :comment => 'This is good!',\n :result => 'APPROVED',\n :review_result_id => design_review_results(:mx234a_final_pcb_design).id,\n :role_id_tag => 'role_id_12',\n :expected_results => {\n :comments_count => 5,\n :review_status_id => @in_review.id,\n :mail_count => 1,\n :mail_subject => mail_subject + ' PCB Design - APPROVED - See comments'\n }\n },\n # Matt Disanzo - Planner\n {:user_id => @matt_d.id,\n :role_id => @planning.id,\n :comment => 'This is a test.',\n :result => 'APPROVED',\n :review_result_id => design_review_results(:mx234a_final_plan).id,\n :role_id_tag => 'role_id_13',\n :expected_results => {\n :comments_count => 6,\n :review_status_id => @in_review.id,\n :mail_count => 1,\n :mail_subject => mail_subject + ' Planning - APPROVED - See comments'\n }\n },\n # Rich Ahamed - Planner\n {:user_id => @rich_a.id,\n :role_id => @tde.id,\n :comment => 'TDE Rules! Planning Drools!',\n :result => 'APPROVED',\n :review_result_id => design_review_results(:mx234a_final_tde).id,\n :role_id_tag => 'role_id_9',\n :expected_results => {\n :comments_count => 7,\n :review_status_id => @in_review.id,\n :mail_count => 1,\n :mail_subject => mail_subject + ' TDE - APPROVED - See comments'\n }\n },\n # Lisa Austin - Valor\n {:user_id => @bob_g.id,\n :role_id => @valor.id,\n :comment => '',\n :result => 'APPROVED',\n :review_result_id => design_review_results(:mx234a_final_valor).id,\n :role_id_tag => 'role_id_6',\n :expected_results => {\n :comments_count => 7,\n :review_status_id => @review_complete.id,\n :mail_count => 2,\n :mail_subject => mail_subject + ' Valor - APPROVED - No comments'\n }\n }\n ]\n\n mx234a = design_reviews(:mx234a_final)\n admin_email = users(:patrice_m).email\n\n\n update_mx234a = DesignReview.find(mx234a.id)\n update_mx234a.review_status_id = @in_review.id\n update_mx234a.save\n\n mx234a_review_results = DesignReviewResult.find_all_by_design_review_id(mx234a.id)\n for mx234a_review_result in mx234a_review_results\n mx234a_review_result.result = 'No Response'\n mx234a_review_result.save\n end\n\n mx234a_review_results = DesignReviewResult.find_all_by_design_review_id(mx234a.id)\n\n assert_equal(final_reviewer_result_list.size, mx234a_review_results.size)\n assert_equal(0, \n DesignReviewComment.find_all_by_design_review_id(mx234a.id).size)\n mx234a_review_results.each do |review_result| \n assert_equal(\"No Response\", review_result.result)\n end\n\n repost = false\n final_reviewer_result_list.each do |reviewer_result|\n\n if repost\n update_mx234a = DesignReview.find(mx234a.id)\n update_mx234a.review_status_id = @in_review.id\n update_mx234a.update\n end\n \n rev = User.find(reviewer_result[:user_id]).name\n reviewer_session = set_session(reviewer_result[:user_id], Role.find(reviewer_result[:role_id]).name)\n\n if reviewer_result[:result]\n post(:reviewer_results,\n { :post_comment => { \"comment\" => reviewer_result[:comment] },\n reviewer_result[:role_id_tag] => { reviewer_result[:review_result_id] => reviewer_result[:result] },\n :design_review => { \"id\" => mx234a.id } },\n reviewer_session,\n {:review_results => mx234a_review_results } )\n expected_results[reviewer_result[:role_id].to_s] = reviewer_result[:result]\n else\n post(:reviewer_results,\n { :post_comment => { \"comment\" => reviewer_result[:comment] },\n :design_review => { \"id\" => mx234a.id } },\n reviewer_session,\n {:review_results => mx234a_review_results } )\n end\n\n if reviewer_result[:result] != 'REJECTED'\n assert_redirected_to(:action => :post_results)\n else\n expected_results.each { |k,v| \n expected_results[k] = 'WITHDRAWN' if v == 'APPROVED'\n }\n \n assert_redirected_to(:action => :confirm_rejection)\n #follow_redirect\n # \"follow_redirect\" is part of integration testing and should not be in\n # used in a functional test\n if false #comment out section\n assert_equal(mx234a.id, assigns(:design_review_id))\n end #suppress follow_redirect\n repost = true\n end\n\n post(:post_results, {}, reviewer_session)\n assert_equal(reviewer_result[:expected_results][:mail_count], \n @emails.size)\n email = @emails.pop\n\n if @emails.size > 0\n assert_equal(\"Catalyst/AC/(pcb252_234_a0_g): The Final design review is complete\",\n email.subject)\n\n found_email = email.cc.detect { |addr| addr == admin_email }\n assert_equal(admin_email, found_email)\n \n email = @emails.pop\n end\n \n assert_equal(reviewer_result[:expected_results][:mail_subject],\n email.subject)\n \n design_review_comments = DesignReviewComment.find_all_by_design_review_id(mx234a.id)\n assert_equal(reviewer_result[:expected_results][:comments_count], \n design_review_comments.size)\n if reviewer_result[:comment] != ''\n assert_equal(reviewer_result[:comment],\n design_review_comments.pop.comment)\n end\n\n review_results = DesignReviewResult.find_all_by_design_review_id(mx234a.id)\n\n for review_result in review_results\n assert_equal(expected_results[review_result.role_id.to_s],\n review_result.result)\n end\n\n routing_design_review = DesignReview.find(mx234a.id)\n assert_equal(reviewer_result[:expected_results][:review_status_id],\n routing_design_review.review_status_id)\n end\n\n mx234a_design.reload\n mx234a_final_dr = DesignReview.find(mx234a.id)\n assert_equal(ReviewType.get_release.id,\n mx234a_design.phase_id)\n assert_equal('Review Completed', \n mx234a_final_dr.review_status.name)\n assert_equal(Time.now.format_dd_mon_yy,\n mx234a_final_dr.completed_on.format_dd_mon_yy)\n\n #\n # THE RELEASE REVIEW\n #\n expected_results = { '5' => \"No Response\", '12' => \"No Response\",\n '19' => \"No Response\" }\n\n mail_subject = 'Catalyst/AC/(pcb252_234_a0_g): Release '\n reviewer_result_list= [\n # Lee Shaff- HW Reviewer\n {:user_id => @lee_s.id,\n :role_id => @hweng.id,\n :comment => 'No Comment',\n :result => 'APPROVED',\n :review_result_id => design_review_results(:mx234a_release_hw).id,\n :role_id_tag => ':role_id_5',\n :expected_results => {\n :comments_count => 1,\n :review_status_id => @in_review.id,\n :mail_count => 1,\n :mail_subject => mail_subject + ' HWENG - APPROVED - See comments'\n }\n },\n # Jim Light - PCB Manager\n {:user_id => @jim_l.id,\n :role_id => @pcb_design.id,\n :comment => 'This is good!',\n :result => 'APPROVED',\n :review_result_id => design_review_results(:mx234a_release_pcb_design).id,\n :role_id_tag => 'role_id_12',\n :expected_results => {\n :comments_count => 2,\n :review_status_id => @in_review.id,\n :mail_count => 1,\n :mail_subject => mail_subject + ' PCB Design - APPROVED - See comments'\n }\n },\n # Eileen Corran - Operations Manager\n {:user_id => @eileen_c.id,\n :role_id => roles(:operations_manager).id,\n :comment => '',\n :result => 'APPROVED',\n :review_result_id => design_review_results(:mx234a_release_ops).id,\n :role_id_tag => 'role_id_19',\n :expected_results => {\n :comments_count => 2,\n :review_status_id => @review_complete.id,\n :mail_count => 2,\n :mail_subject => mail_subject + ' Operations Manager - APPROVED - No comments'\n }\n }\n ]\n\n mx234a = design_reviews(:mx234a_release)\n\n update_mx234a = DesignReview.find(mx234a.id)\n update_mx234a.review_status_id = @in_review.id\n update_mx234a.save\n\n mx234a_review_results = DesignReviewResult.find_all_by_design_review_id(mx234a.id)\n for mx234a_review_result in mx234a_review_results\n mx234a_review_result.result = 'No Response'\n mx234a_review_result.save\n end\n\n mx234a_review_results = DesignReviewResult.find_all_by_design_review_id(mx234a.id)\n\n assert_equal(reviewer_result_list.size,\n mx234a_review_results.size)\n assert_equal(0, \n DesignReviewComment.find_all_by_design_review_id(mx234a.id).size)\n for review_result in mx234a_review_results\n assert_equal(\"No Response\", review_result.result)\n end\n\n repost = false\n for reviewer_result in reviewer_result_list\n\n if repost\n update_mx234a = DesignReview.find(mx234a.id)\n update_mx234a.review_status_id = @in_review.id\n update_mx234a.update\n end\n \n rev = User.find(reviewer_result[:user_id]).name\n reviewer_session = set_session(reviewer_result[:user_id], Role.find(reviewer_result[:role_id]).name)\n\n if reviewer_result[:result]\n post(:reviewer_results,\n { :post_comment => { \"comment\" => reviewer_result[:comment] },\n reviewer_result[:role_id_tag] => { reviewer_result[:review_result_id] => reviewer_result[:result] },\n :design_review => { \"id\" => mx234a.id } },\n reviewer_session,\n {:review_results => mx234a_review_results } )\n expected_results[reviewer_result[:role_id].to_s] = reviewer_result[:result]\n else\n post(:reviewer_results,\n { :post_comment => { \"comment\" => reviewer_result[:comment] },\n :design_review => { \"id\" => mx234a.id } },\n reviewer_session,\n {:review_results => mx234a_review_results } )\n end\n\n if reviewer_result[:result] != 'REJECTED'\n assert_redirected_to(:action => :post_results)\n else\n expected_results.each { |k,v| \n expected_results[k] = 'WITHDRAWN' if v == 'APPROVED'\n }\n \n assert_redirected_to(:action => :confirm_rejection)\n follow_redirect\n # \"follow_redirect\" is part of integration testing and should not be in\n # used in a functional test\n if false #comment out section\n assert_equal(mx234a.id, assigns(:design_review_id))\n end #suppress follow_redirect\n repost = true\n end\n\n post(:post_results, {}, reviewer_session,\n {:review_results => mx234a_review_results } )\n assert_equal(reviewer_result[:expected_results][:mail_count], \n @emails.size)\n email = @emails.pop\n\n if @emails.size > 0\n assert_equal(\"Catalyst/AC/(pcb252_234_a0_g): The Release design review is complete\",\n email.subject)\n email = @emails.pop\n end\n\n assert_equal(reviewer_result[:expected_results][:mail_subject],\n email.subject)\n \n design_review_comments = DesignReviewComment.find_all_by_design_review_id(mx234a.id)\n assert_equal(reviewer_result[:expected_results][:comments_count], \n design_review_comments.size)\n if reviewer_result[:comment] != ''\n assert_equal(reviewer_result[:comment],\n design_review_comments.pop.comment)\n end\n\n review_results = DesignReviewResult.find_all_by_design_review_id(mx234a.id)\n\n for review_result in review_results\n assert_equal(expected_results[review_result.role_id.to_s],\n review_result.result)\n end\n\n release_design_review = DesignReview.find(mx234a.id)\n assert_equal(reviewer_result[:expected_results][:review_status_id],\n release_design_review.review_status_id)\n end\n\n mx234a_design.reload\n mx234a_release_dr = DesignReview.find(mx234a.id)\n assert_equal(Design::COMPLETE, mx234a_design.phase_id)\n assert_equal('Review Completed', \n mx234a_release_dr.review_status.name)\n assert_equal(Time.now.format_dd_mon_yy,\n mx234a_release_dr.completed_on.format_dd_mon_yy)\n\n end",
"def test_brief_author\n @current_test_name = \"Arch - Testing Brief Results Display with scope set to author\"\n for_all_nyu_users do |user|\n each_view_default_searches do |search_term|\n next if views_sans_access.include?(@view)\n login_for_nyu user do\n # Set scope 1 to author\n set_scope1 \"WAU\"\n # Search Arch\n submit_search search_term\n # Make sure the common elements are there\n common_elements?\n # Check that facets are present\n facets?\n # Check that results are present\n results?\n end\n end\n end\n end",
"def test_update_review_assignments\n \n reviewer_session = rich_reviewer_session\n\n hw_review_result = design_review_results(:mx234a_pre_artwork_hw)\n tde_review_result = design_review_results(:mx234a_pre_artwork_tde)\n assert_equal('Lee Schaff', User.find(hw_review_result.reviewer_id).name)\n\n put(:update_review_assignments,\n { :id => design_reviews(:mx234a_pre_artwork).id,\n 'HWENG_5_assign_to_self' => 'yes' },\n reviewer_session)\n email = @emails.pop\n assert_equal(0, @emails.size)\n assert_equal('Catalyst/AC/(pcb252_234_a0_g): The Hardware Engineer (EE) review has been reassigned to Rich Ahamed',\n email.subject)\n\n hw_review_result.reload\n assert_equal('Rich Ahamed', User.find(hw_review_result.reviewer_id).name)\n assert_equal('Rich Ahamed', User.find(tde_review_result.reviewer_id).name)\n\n put(:update_review_assignments,\n { :id => design_reviews(:mx234a_pre_artwork).id,\n :user => { 'TDE' => '7201',\n 'HWENG' => '6000' } },\n reviewer_session)\n email = @emails.pop\n assert_equal(1, @emails.size)\n assert_equal('Catalyst/AC/(pcb252_234_a0_g): You have been assigned to perform the TDE Engineer review',\n email.subject)\n email = @emails.pop\n assert_equal(0, @emails.size)\n assert_equal('Catalyst/AC/(pcb252_234_a0_g): You have been assigned to perform the Hardware Engineer (EE) review',\n email.subject)\n\n hw_review_result.reload\n assert_equal('Ben Bina', User.find(hw_review_result.reviewer_id).name)\n tde_review_result.reload\n assert_equal('Man Chan', User.find(tde_review_result.reviewer_id).name)\n\n end",
"def test01_LI2_view_contributor\n\t\t$browser.goto($patch_login)\n\t\tloginDirectory\n\t\t$browser.goto($patch_directory_listing)\n\t\t$directory_top.fire_event(\"onclick\")\n\t\t\n\t\tsleep 5\n\t\tbegin\n\t\tassert assert $browser.text.include? \"Recent Activity\"\n\t\trescue => e\n\t\t\tputs e\n\t\tputs \"LI2_view_contributor: FAILED! Unable to view top contributor profile.\"\n\t\tend\n\tend",
"def test_review_type_role_assignment\n\n # Verify response when not logged in.\n get :review_type_role_assignment, {},{}\n assert_redirected_to(:controller => 'tracker', :action => 'index')\n assert_equal(Pcbtr::MESSAGES[:admin_only], flash['notice'])\n\n\n # Verify response when logged in as a non-admin\n get :review_type_role_assignment, {}, rich_designer_session\n assert_redirected_to(:controller => 'tracker', :action => 'index')\n #assert_equal(Pcbtr::MESSAGES[:admin_only], flash['notice'])\n\n # Verify response when logged in as an admin\n get :review_type_role_assignment, {}, cathy_admin_session\n assert_response :success\n\n roles = assigns(roles)['roles']\n review_types = assigns(review_types)['review_types']\n\n assert_equal(@expected_values.size, roles.size)\n assert_equal(5, review_types.size)\n\n roles.each_with_index { |role, i|\n expected_role = @expected_values[i]\n assert_equal(expected_role[:name], role.name)\n\n review_types = role.review_types.sort_by { |rt| rt.name }\n review_types.each_with_index { |rt, j|\n expected_name = expected_role[:review_types][j]\n assert_equal(expected_role[:name]+'::'+expected_name.to_s,\n role.name+'::'+rt.name)\n }\n\n }\n\n expected_values = [\n {:name => 'Final',\n :role_names => ['CE-DFT', 'DFM', 'HWENG',\n 'Mechanical', 'Mechanical-MFG', 'PCB Design',\n 'Planning', 'TDE', 'Valor'] },\n {:name => 'Pre-Artwork',\n :role_names => ['CE-DFT', 'DFM', 'HWENG', \n 'Library', 'Mechanical', 'Mechanical-MFG',\n 'PCB Input Gate', 'PCB Mechanical', 'Planning',\n 'SLM BOM', 'SLM-Vendor', 'TDE',\n 'Valor' ]},\n {:name => 'Placement',\n :role_names => ['CE-DFT', 'DFM', 'HWENG',\n 'Mechanical', 'Mechanical-MFG', 'TDE' ]},\n {:name => 'Routing',\n :role_names => ['CE-DFT', 'DFM', 'HWENG',\n 'Library', 'Mechanical-MFG' ]},\n {:name => 'Release',\n :role_names => ['HWENG', 'Operations Manager',\n 'PCB Design' ]},\n ]\n\n review_types.each_with_index do |review_type, i|\n expected_rt = expected_values[i]\n assert_equal(expected_rt[:name], review_type.name)\n\n review_type.roles.sort_by { |role| role.name }.each_with_index do |role, j|\n expected_role = expected_rt[:role_names]\n expected_name = expected_role[j]\n assert_equal(expected_name, role.name)\n end\n end\n\n end",
"def test_design_information\n\n end",
"def test_contributor_search\n search_details = ContributorAdvancedSearchDetails.new\n search_details.known_as=\"\"\n search_details.role_type_id=26\n search_details.apra=\"no\"\n search_details.canz=\"true\"\n \n #Add a role type\n search_details.role_type_id = RoleType.find(40)\n search_details.status_id = Status.find_by_symbol(:pending).status_id\n ids = AdvancedFinderHelper.find_contributors(search_details)\n display_contributors(ids)\nend",
"def test_ID_25863_comment_on_review()\n login_as_user1\n read_all_updates\n share_review(\"outside-in\")\n logout_common\n login_as_user2\n leave_comment_on_share_review_group(\"outside-in\")\n logout_common\n login_as_user1\n verify_updates\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
test_home_page_redirects Description: This method does the functional testing for the home page redirects.
|
def test_home_page_redirects
post('admin_home', {}, {})
assert_redirected_to(:controller => 'tracker', :action => 'index')
post('reviewer_home', {}, {})
assert_redirected_to(:controller => 'tracker', :action => 'index')
post('manager_home', {}, {})
assert_redirected_to(:controller => 'tracker', :action => 'index')
post('pcb_admin_home', {}, {})
assert_redirected_to(:controller => 'tracker', :action => 'index')
post('designer_home', {}, {})
assert_redirected_to(:controller => 'tracker', :action => 'index')
end
|
[
"def test_home\n @current_test_name = \"Arch - Testing Home Page\"\n each_driver do\n each_view_redirects do\n common_elements?\n end\n end\n end",
"def test_my_default\r\n get '/'\r\n follow_redirect!\r\n assert last_response.ok? # Tests if a positive redirect response received\r\n assert_equal \"http://example.org/game\", last_request.url\r\n end",
"def test_route_for_pages_home\n\t\tassert_routing({path: '/', method: :get}, {controller: 'pages', action: 'home'})\n\tend",
"def test_p1_00010_homepage\n @homepage.goto_homepage\n \tassert @browser.body.present?\n @profilepage = CommunityProfilePage.new(@browser)\n assert @profilepage.topnav.present?\n @homepage.newline\n end",
"def test_p1_00030_homepage_search_bar\n if !@homepage.home.present?\n @homepage.goto_homepage\n end\n @homepage.check_homepage_search_bar\n assert @homepage.search_bar.present?\n @homepage.newline\n end",
"def redirect_to_homepage?\n @page.homepage?\n end",
"def process_page_with_home_path(page)\n homepage = Page.current_site.homepage\n if page.is_a?(FileNotFoundPage) && !params[:url].include?(homepage.slug)\n if homepage.slug != \"/\"\n false if redirect_to \"/#{homepage.slug}/#{params[:url]}\"\n else\n process_page_without_home_path(page)\n end\n else\n process_page_without_home_path(page)\n end\n\n end",
"def Home\n if logged_in?\n @agent.page.links[0].click\n if @agent.page.title == 'Facebook'\n return true\n end\n @errors << \"Couldn't access the Facebook homepage.\"\n @agent.history.pop\n end\n false\n end",
"def test_p1_00140_homepage_footer\n if !@homepage.home.present?\n @homepage.goto_homepage\n end\n @homepage.check_homepage_footer\n #puts @homepage.footer.text\n assert @homepage.footer.present?\n @homepage.newline\n end",
"def test_reviewer_home\n\n post('index', {}, {})\n assert_response(:success)\n assert_template('tracker/index')\n \n post('index', {}, lee_hweng_session)\n assert_response(:success)\n assert_template('tracker/reviewer_home')\n\n #follow_redirect\n #assert_no_tag :content => \"POST Placement Review\"\n\n end",
"def clean_home_url\n redirect_to(home_page) and return if request.path == '/'\n end",
"def test_redirect_url_match\n process :redirect_external\n assert @response.redirect?\n assert @response.redirect_url_match?(\"rubyonrails\")\n assert @response.redirect_url_match?(/rubyonrails/)\n assert !@response.redirect_url_match?(\"phpoffrails\")\n assert !@response.redirect_url_match?(/perloffrails/)\n end",
"def go_home\n self.homelink.click\n end",
"def test_default_pages\n get :show, :link => 'HomePage' # Trigger creation of default pages\n page = nil\n ['header'].each do |page_name|\n page = Page.find_by_name(page_name)\n assert(page, \"Page #{page_name} doesn't exist\")\n assert_equal(:html, page.content_type, \"Page '#{page.name}\")\n assert_equal('system', page.author, \"Page '#{page.name}\")\n end\n end",
"def test_nav_between_profiles\r\n\r\n recipient_profile_url = PropertiesReader.get_recipient_profile_url\r\n @place_holder.login_goto_profile(recipient_profile_url)\r\n\r\n # Verify navigation to and back from location page\r\n begin\r\n\r\n # Verify navigation to the location page\r\n assert_fail_message = 'Clicking on the location of this profile takes to the expected URL'\r\n @place_holder.nav_via_hyperlink_assert_url(PropertiesReader.get_address_text_to_click_on, PropertiesReader.get_exp_location_navigated_to, assert_fail_message)\r\n\r\n # Click on the browser back button and verify that the user is navigated back to the original profile page\r\n back_nav_assert_fail_msg = 'Clicking on the back button from the location page does not take to the expected URL'\r\n @place_holder.browser_go_back_assert_url(recipient_profile_url, back_nav_assert_fail_msg)\r\n\r\n end\r\n\r\n # Verify navigation to and back from phone number page\r\n begin\r\n\r\n # Verify navigation to the address page\r\n assert_fail_message = 'Clicking on the phone number of this profile takes to the expected URL'\r\n @place_holder.nav_via_hyperlink_assert_url(PropertiesReader.get_phone_number_text_to_click_on, PropertiesReader.get_exp_phone_navigated_to, assert_fail_message)\r\n\r\n # Click on the browser back button and verify that the user is navigated back to the original profile page (the following navigations rely being on this pag)\r\n back_nav_assert_fail_msg = 'Clicking on the back button from the phone number page does not take to the expected URL'\r\n @place_holder.browser_go_back_assert_url(recipient_profile_url, back_nav_assert_fail_msg)\r\n\r\n end\r\n\r\n # Verify navigation to and back from Tax ID page\r\n begin\r\n\r\n # Verify navigation to the Tax ID page\r\n assert_fail_message = 'Clicking on the Tax ID of this profile takes to the expected URL'\r\n @place_holder.nav_via_hyperlink_assert_url(PropertiesReader.get_tax_id_text_to_click_on, PropertiesReader.get_exp_tax_id_navigated_to, assert_fail_message)\r\n\r\n # Click on the browser back button and verify that the user is navigated back to the original profile page (the following navigations rely being on this pag)\r\n back_nav_assert_fail_msg = 'Clicking on the back button from the Tax ID page does not take to the expected URL'\r\n @place_holder.browser_go_back_assert_url(recipient_profile_url, back_nav_assert_fail_msg)\r\n\r\n end\r\n\r\n # Verify navigation to and back from Provider page\r\n begin\r\n\r\n # Verify navigation to the Provider page\r\n assert_fail_message = 'Clicking on the Provider of this takes to the expected URL'\r\n provider_link_text_to_click_on = PropertiesReader.get_provider_link_text_to_click_on\r\n @place_holder.nav_via_hyperlink_assert_url(provider_link_text_to_click_on, PropertiesReader.get_exp_provider_navigated_to, assert_fail_message)\r\n\r\n # Click on the browser back button and verify that the user is navigated back to the original profile page (the following navigations rely being on this pag)\r\n back_nav_assert_fail_msg = 'Clicking on the back button from the Tax ID page does not take to the expected URL'\r\n @place_holder.browser_go_back_assert_url(recipient_profile_url, back_nav_assert_fail_msg)\r\n\r\n end\r\n\r\n end",
"def follow_redirect!\n assert @response, \"No response made\"\n assert_response :redirect, \"No redirect made\"\n follow_redirect\n end",
"def test_00090_homepage_open_q_conv_link\n @browser.wait_until { @home_page.open_questions_widget.present? }\n @browser.wait_until { @home_page.open_questions_widget.posts.size > 0 }\n\n # verify the title link would redirect to conversation page.\n q_title = @home_page.open_questions_widget.posts[0].title\n @home_page.open_questions_widget.posts[0].click_title_link\n @browser.wait_until { @convdetail_page.conv_detail.present? }\n assert_equal @convdetail_page.conv_title.when_present.text, q_title, \"title doesn't match\"\n\n @home_page.navigate_in\n @browser.wait_until { @home_page.open_questions_widget.posts.size > 0 }\n\n # verify avatar link would redirect to profile page\n @home_page.open_questions_widget.posts[0].click_avatar\n @browser.wait_until { @profile_page.profile_page.present? }\n author_name = @profile_page.profile_page_author_name_betaon.when_present.text\n\n @home_page.navigate_in\n @browser.wait_until { @home_page.open_questions_widget.posts.size > 0 }\n\n # verify author link would redirect to profile page\n @home_page.open_questions_widget.posts[0].click_author_link\n @browser.wait_until { @profile_page.profile_page.present? }\n assert_equal @profile_page.profile_page_author_name_betaon.when_present.text, author_name, \"Author doesn't match\"\n\n @home_page.navigate_in\n @browser.wait_until { @home_page.open_questions_widget.posts.size > 0 }\n\n # verify topic link would redirect to the correct topic page\n post_in_topic = @home_page.open_questions_widget.posts[0].in_topic_link.when_present.text\n\n @home_page.open_questions_widget.posts[0].click_topic_link\n @browser.wait_until { @topicdetail_page.topic_filter.present? }\n assert_equal @topicdetail_page.topic_title.when_present.text, post_in_topic, \"Title doesn't match\"\n end",
"def check_existing_home_page\n\t\tif self.home_page?\n\t\t\t@old_home = Page.get_home_page\n\t\t\tunless @old_home.nil? || @old_home == self\n\t\t\t\t@old_home.home_page = false\n\t\t\t\t@old_home.save\n\t\t\tend\n\t\tend\n\t\t\n\tend",
"def redirect_to_home\n redirect_to session[:superadmin_mode] ? ubiquo.superadmin_home_path : ubiquo.home_path\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
GET /evaluation_builders GET /evaluation_builders.json
|
def index
@evaluation_builders = @event.evaluation_builders
end
|
[
"def index\n @builders = Builder.all\n end",
"def builders\n @builders ||= {}\n end",
"def builders(*values)\n if values.size > 0\n @builders ||= []\n @builders += values.flatten\n else\n @builders || (@project.parent ? @project.parent.eclipse.builders : [])\n end\n end",
"def create\n @evaluation_builder = @event.evaluation_builders.new(evaluation_builder_params)\n @evaluation_builder.organization_id = @event.organization_id\n respond_to do |format|\n if @evaluation_builder.save\n format.html { redirect_to @evaluation_builder, notice: 'Evaluation builder was successfully created.' }\n format.json { render :show, status: :created, location: @evaluation_builder }\n else\n format.html { render :new }\n format.json { render json: @evaluation_builder.errors, status: :unprocessable_entity }\n end\n end\n end",
"def cmdBuildersCountGetList\n params = {\n \"builders_count_get_list\" => 1,\n \"app_version\" => @config[\"version\"],\n }\n response = @client.request(params, @sid, true, false)\n serializer = Serializer.new(response)\n return serializer.parseBuildersList\n end",
"def generate_builders\n @builders = @data.map do |object|\n Builder.new(object, @settings, @options)\n end\n end",
"def builds(filters = {})\n fetch_resources_lazily(\"builds\", filters)\n end",
"def builders\n @builders ||= CompositeBuilder.new(\n basic_metadata_builder,\n spatial_builder,\n date_builder,\n references_builder,\n layer_info_builder,\n slug_builder\n )\n end",
"def to_a\n generate_builders\n\n if template_cache_configured? && Rabl.configuration.use_read_multi\n map_engines_to_builders\n read_cache_results\n replace_engines_with_cache_results\n end\n\n result = @builders.map(&:to_hash)\n result = result.map(&:presence).compact if Rabl.configuration.exclude_empty_values_in_collections\n result\n end",
"def builds\n @builds ||= RequestList.new\n end",
"def new\n @evaluation_criterium = EvaluationCriterium.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @evaluation_criterium }\n end\n end",
"def get_brandings\n request :get, \"/v3/brandings.json\"\n end",
"def get_building\n if params[:customer].present?\n @buildings = Customer.find(params[:customer]).buildings\n else\n @buildings = Customer.all\n end\n if request.xhr?\n respond_to do |format|\n format.json {\n render json: {buildings: @buildings}\n }\n end\n end\n end",
"def get_brandings\n request :get,\n '/v3/brandings.json'\n end",
"def index\n @schedules_builders = SchedulesBuilder.all\n end",
"def index\n @evaluation_buyers = EvaluationBuyer.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @evaluation_buyers }\n end\n end",
"def get_building\n if params[:customer].present?\n @buildings = Customer.find(params[:customer]).buildings\n else\n # @buildings = Customer.all\n puts params[:customer]\n end\n # to tell if the request is sent via ajax\n if request.xhr?\n respond_to do |f|\n f.json {\n render json: {buildings: @buildings}\n }\n end\n end\n end",
"def show\n @evaluation_criterium = EvaluationCriterium.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @evaluation_criterium }\n end\n end",
"def index\n @evaluation_types = EvaluationType.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @evaluation_types }\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
POST /evaluation_builders POST /evaluation_builders.json
|
def create
@evaluation_builder = @event.evaluation_builders.new(evaluation_builder_params)
@evaluation_builder.organization_id = @event.organization_id
respond_to do |format|
if @evaluation_builder.save
format.html { redirect_to @evaluation_builder, notice: 'Evaluation builder was successfully created.' }
format.json { render :show, status: :created, location: @evaluation_builder }
else
format.html { render :new }
format.json { render json: @evaluation_builder.errors, status: :unprocessable_entity }
end
end
end
|
[
"def index\n @evaluation_builders = @event.evaluation_builders\n end",
"def create\n @evaluation = Evaluation.new(evaluation_params)\n\n if @evaluation.save\n render :show, status: :created\n else\n render json: @evaluation.errors, status: :unprocessable_entity\n end\n end",
"def create\n @evaluation = current_user.evaluations.build(evaluation_params)\n\n if @evaluation.save\n # render :show, status: :created\n render json: {evaluation: @evaluation}, status: :created\n else\n render json: {errors: @evaluation.errors.full_messages.join(',')}, status: :unprocessable_entity\n end\n end",
"def create\n @evaluations = Evaluation.new(evaluation_params)\n\n respond_to do |format|\n if @evaluation.save\n format.html { redirect_to @evaluation, notice: 'evaluation was successfully created.' }\n format.json { render :show, status: :created, location: @evaluation }\n else\n format.html { render :new }\n format.json { render json: @evaluation.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @evaluation_criterium = EvaluationCriterium.new(params[:evaluation_criterium])\n\n respond_to do |format|\n if @evaluation_criterium.save\n format.html { redirect_to @evaluation_criterium, notice: 'Evaluation criterium was successfully created.' }\n format.json { render json: @evaluation_criterium, status: :created, location: @evaluation_criterium }\n else\n format.html { render action: \"new\" }\n format.json { render json: @evaluation_criterium.errors, status: :unprocessable_entity }\n end\n end\n end",
"def builders\n @builders ||= {}\n end",
"def create\r\n @evaluation = Evaluation.new(params[:evaluation])\r\n @answers = @evaluation.responses\r\n\r\n respond_to do |format|\r\n if @evaluation.save && @answers.each{|a| a.save }.all?\r\n format.html { redirect_to(@evaluation, :notice => 'Evaluation was successfully created.') }\r\n format.xml { render :xml => @evaluation, :status => :created, :location => @evaluation }\r\n else\r\n format.html { render :action => \"new\" }\r\n format.xml { render :xml => @evaluation.errors, :status => :unprocessable_entity }\r\n end\r\n end\r\n end",
"def create\n @qc_evaluation = QcEvaluation.new(params[:qc_evaluation])\n\n respond_to do |format|\n if @qc_evaluation.save\n format.html { redirect_to @qc_evaluation, notice: 'Qc evaluation was successfully created.' }\n format.json { render json: @qc_evaluation, status: :created, location: @qc_evaluation }\n else\n format.html { render action: \"new\" }\n format.json { render json: @qc_evaluation.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n # Create a new evaluation\n @eval = Eval.new(eval_params)\n\n respond_to do |format|\n if @eval.save\n format.html { redirect_to @eval, notice: 'Eval was successfully created.' }\n format.json { render :show, status: :created, location: @eval }\n else\n format.html { render :new }\n format.json { render json: @eval.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @evaluation_builder.update(evaluation_builder_params)\n format.html { redirect_to @evaluation_builder, notice: 'Evaluation builder was successfully updated.' }\n format.json { render :show, status: :ok, location: @evaluation_builder }\n else\n format.html { render :edit }\n format.json { render json: @evaluation_builder.errors, status: :unprocessable_entity }\n end\n end\n end",
"def node_set_builders(node, builders)\n @client.request_session(\n {\n 'node_set_builders' => 1,\n 'id_node' => node,\n 'builders' => builders,\n 'app_version' => @version\n },\n @sid\n )\n end",
"def builders(*values)\n if values.size > 0\n @builders ||= []\n @builders += values.flatten\n else\n @builders || (@project.parent ? @project.parent.eclipse.builders : [])\n end\n end",
"def generate_builders\n @builders = @data.map do |object|\n Builder.new(object, @settings, @options)\n end\n end",
"def new\n @evaluation_criterium = EvaluationCriterium.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @evaluation_criterium }\n end\n end",
"def create\n @evaluation = @exam.evaluations.build(params[:evaluation])\n\n respond_to do |format|\n if @evaluation.save\n format.html { redirect_to(signature_part_exam_url(@signature, @part, @exam), :notice => 'La nota se creo correctamente.') }\n format.xml { render :xml => @evaluation, :status => :created, :location => @evaluation }\n else\n format.html { render 'exams/show' }\n format.xml { render :xml => @evaluation.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def cmdNodeSetBuilders(id, builders)\n params = {\n \"node_set_builders\" => 1,\n \"id_node\" => id,\n \"builders\" => builders,\n \"app_version\" => @config[\"version\"],\n }\n response = @client.request(params, @sid)\n return response\n end",
"def create\n @evaluation_detail = EvaluationDetail.new(params[:evaluation_detail])\n\n respond_to do |format|\n if @evaluation_detail.save\n format.html { redirect_to @evaluation_detail, notice: 'Evaluation detail was successfully created.' }\n format.json { render json: @evaluation_detail, status: :created, location: @evaluation_detail }\n else\n format.html { render action: \"new\" }\n format.json { render json: @evaluation_detail.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n puts \"groups_eval_params: #{groups_eval_params.inspect}\"\n @groups_eval = GroupsEval.new(groups_eval_params)\n puts \"@groups_eval: #{@groups_eval.inspect}\"\n\n if @groups_eval.save\n render json: { data: {groups_eval: @groups_eval}, errors: @errors }.to_json, status: :created\n else\n @errors += @groups_eval.errors.full_messages\n render json: { data: {groups_eval: @groups_eval.errors}, errors: @errors}.to_json, status: :ok\n end\n end",
"def create\n @item_evaluation = ItemEvaluation.new(item_evaluation_params)\n\n respond_to do |format|\n if @item_evaluation.save\n format.html { redirect_to @item_evaluation, notice: 'Item evaluation was successfully created.' }\n format.json { render :show, status: :created, location: @item_evaluation }\n else\n format.html { render :new }\n format.json { render json: @item_evaluation.errors, status: :unprocessable_entity }\n end\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
PATCH/PUT /evaluation_builders/1 PATCH/PUT /evaluation_builders/1.json
|
def update
respond_to do |format|
if @evaluation_builder.update(evaluation_builder_params)
format.html { redirect_to @evaluation_builder, notice: 'Evaluation builder was successfully updated.' }
format.json { render :show, status: :ok, location: @evaluation_builder }
else
format.html { render :edit }
format.json { render json: @evaluation_builder.errors, status: :unprocessable_entity }
end
end
end
|
[
"def create\n @evaluation_builder = @event.evaluation_builders.new(evaluation_builder_params)\n @evaluation_builder.organization_id = @event.organization_id\n respond_to do |format|\n if @evaluation_builder.save\n format.html { redirect_to @evaluation_builder, notice: 'Evaluation builder was successfully created.' }\n format.json { render :show, status: :created, location: @evaluation_builder }\n else\n format.html { render :new }\n format.json { render json: @evaluation_builder.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @builder.update(builder_params)\n format.html { redirect_to @builder, notice: 'Builder was successfully updated.' }\n format.json { render :show, status: :ok, location: @builder }\n else\n format.html { render :edit }\n format.json { render json: @builder.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n render json: @building.errors unless @building.update(building_params)\n end",
"def update\n retval = @survey.update_quality_control(params[:quality_control_questions_type], params[:quality_control_questions_ids] || [])\n render_json_auto retval and return\n end",
"def update\n @evaluation_criterium = EvaluationCriterium.find(params[:id])\n\n respond_to do |format|\n if @evaluation_criterium.update_attributes(params[:evaluation_criterium])\n format.html { redirect_to @evaluation_criterium, notice: 'Evaluation criterium was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @evaluation_criterium.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n render json: @building_level.errors unless @building_level.update(building_level_params)\n end",
"def update\n request_body_Data= '{ \"widget\":\n {\n \"name\" : \"'+params[:name]+'\",\n \"description\" : \"'+params[:description]+'\"\n }}'\n response = RestClient::Request.new({\n method: :put,\n url: ENV['API_URL'] + '/widgets/' + params[:id],\n payload: request_body_Data,\n headers: { Authorization: session[:access_token], content_type: 'application/json'}\n }).execute do |response, request, result|\n case response.code\n when 400\n [ :error, JSON.parse(response) ]\n when 200\n [ :success, JSON.parse(response) ]\n json=JSON.parse(response)\n @widget= Widget.new do |widget|\n widget.id=json[\"data\"][\"widget\"][\"id\"]\n widget.name=json[\"data\"][\"widget\"][\"name\"]\n widget.description=json[\"data\"][\"widget\"][\"description\"]\n widget.kind=json[\"data\"][\"widget\"][\"kind\"]\n widget.userid=json[\"data\"][\"widget\"][\"user\"][\"id\"]\n widget.username=json[\"data\"][\"widget\"][\"user\"][\"name\"]\n widget.owner=json[\"data\"][\"widget\"][\"owner\"]\n end\n else\n fail \"Invalid response #{response.to_str} received.\"\n end\n end\n respond_to do |format|\n if @widget\n format.html { redirect_to @widget, notice: 'Widget was successfully updated.' }\n format.json { render :show, status: :ok, location: @widget }\n else\n format.html { render :edit }\n format.json { render json: @widget.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @survey_choice = Survey::Choice.find(params[:id])\n\n respond_to do |format|\n if @survey_choice.update_attributes(params[:survey_choice])\n format.html { redirect_to @survey_choice, notice: 'Choice was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @survey_choice.errors, status: :unprocessable_entity }\n5 end\n end\n end",
"def update_cell_maker\n setting = Setting.last || Setting.new\n setting.base_url = cell_maker_params[:base_url]\n setting.master_wizard_id = cell_maker_params[:master_wizard_id]\n setting.save\n\n render json: { status: true }\n end",
"def update\n @evaluation = Evaluation.find(params[:id])\n\n respond_to do |format|\n if @evaluation.update_attributes(params[:evaluation])\n format.html { redirect_to @evaluation, notice: 'Evaluation was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @evaluation.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @qc_evaluation = QcEvaluation.find(params[:id])\n\n respond_to do |format|\n if @qc_evaluation.update_attributes(params[:qc_evaluation])\n format.html { redirect_to @qc_evaluation, notice: 'Qc evaluation was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @qc_evaluation.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @change_evaluation.update(change_evaluation_params)\n format.html { redirect_to @change_evaluation, notice: 'Change evaluation was successfully updated.' }\n format.json { render :show, status: :ok, location: @change_evaluation }\n else\n format.html { render :edit }\n format.json { render json: @change_evaluation.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @eval_type = EvalType.find(params[:id])\n\n respond_to do |format|\n if @eval_type.update_attributes(params[:eval_type].permit(:name, :organization_id))\n format.html { redirect_to eval_types_url, notice: I18n.t(:general_update_success) }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @eval_type.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @evaluation.update(evaluation_params)\n format.html { redirect_to @evaluation, notice: 'Evaluation was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @evaluation.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @built_with.update(built_with_params)\n format.html { redirect_to @built_with, notice: 'Built with was successfully updated.' }\n format.json { render :show, status: :ok, location: @built_with }\n else\n format.html { render :edit }\n format.json { render json: @built_with.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @evaluation_buyer = EvaluationBuyer.find(params[:id])\n\n respond_to do |format|\n if @evaluation_buyer.update_attributes(params[:evaluation_buyer])\n format.html { redirect_to @evaluation_buyer, notice: 'Evaluation buyer was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @evaluation_buyer.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @specific_requirement = SpecificRequirement.find(params[:id])\n\n respond_to do |format|\n if @specific_requirement.update_attributes(params[:specific_requirement])\n format.html { redirect_to @specific_requirement, notice: 'Specific requirement was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @specific_requirement.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update \n @user_requirement = UserRequirement.find(params[:id])\n if @user_requirement.update_attributes(user_requirement_params) \n render json: @user_requirement, status: :updated\n else \n render json: @user_requirement.errors, status: :unprocessable_entity \n end \n end",
"def update\n render_json_auto @survey.update_logic_control_rule(params[:id].to_i, params[:logic]) and return\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
DELETE /evaluation_builders/1 DELETE /evaluation_builders/1.json
|
def destroy
@evaluation_builder.destroy
respond_to do |format|
format.html { redirect_to event_evaluation_builders_url(@event), notice: 'Evaluation builder was successfully destroyed.' }
format.json { head :no_content }
end
end
|
[
"def destroy\n @builder.destroy\n respond_to do |format|\n format.html { redirect_to builders_url, notice: 'Builder was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def delete_floor_plan(args = {}) \n delete(\"/files.json/floorplan/images\", args)\nend",
"def destroy\n @url_builder.destroy\n respond_to do |format|\n format.html { redirect_to url_builders_url, notice: 'Url builder was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @evaluation_criterium = EvaluationCriterium.find(params[:id])\n @evaluation_criterium.destroy\n\n respond_to do |format|\n format.html { redirect_to evaluation_criteria_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @evaluation.destroy\n respond_to do |format|\n format.html { redirect_to evaluations_url }\n format.json { head :no_content }\n end\n end",
"def delete\n Client.delete(\"/kits/#{@id}\")\n end",
"def delete_aos_version(args = {}) \n delete(\"/aosversions.json/#{args[:aosVersionId]}\", args)\nend",
"def delete_aos_version_box(args = {}) \n delete(\"/aosversions.json/aosversionbox/#{args[:aosVersionBoxId]}\", args)\nend",
"def destroy\n @qc_evaluation = QcEvaluation.find(params[:id])\n @qc_evaluation.destroy\n\n respond_to do |format|\n format.html { redirect_to qc_evaluations_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @evaluation_detail = EvaluationDetail.find(params[:id])\n @evaluation_detail.destroy\n\n respond_to do |format|\n format.html { redirect_to evaluation_details_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @evaluation_type = EvaluationType.find(params[:id])\n @evaluation_type.destroy\n\n respond_to do |format|\n format.html { redirect_to evaluation_types_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @schedules_builder.destroy\n respond_to do |format|\n format.html { redirect_to schedules_builders_url, notice: 'Schedules builder was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def delete(req)\n @building.delete(req)\n save\n end",
"def destroy\n @xml_builder.destroy\n respond_to do |format|\n format.html { redirect_to xml_builders_url, notice: 'Xml builder was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n render_json_auto @survey.delete_logic_control_rule(params[:id].to_i) and return\n end",
"def destroy\n @kernel_build.destroy\n respond_to do |format|\n format.html { redirect_to kernel_builds_url, notice: \"Kernel build was successfully destroyed.\" }\n format.json { head :no_content }\n end\n end",
"def destroy\n @built_with.destroy\n respond_to do |format|\n format.html { redirect_to built_withs_url, notice: 'Built with was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def deleteExecution(execution_id)\n uri = URI(RUNDECKSERVER + ':' + RUNDECKPORT + '/api/12/execution/' + execution_id)\n http = Net::HTTP.new(uri.host, uri.port)\n headers = {'Content-Type'=> 'application/jsonr','X-RunDeck-Auth-Token'=> API_KEY }\n r = http.delete(uri.path, headers) \n return r\nend",
"def destroy\n @building_block_substep.destroy\n respond_to do |format|\n format.html { redirect_to building_block_substeps_path, notice: 'Building block substep was successfully destroyed.' }\n format.json { head :no_content }\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Calculate and set the corrective_change parameter, based on the old_system_value of the property.
|
def calculate_corrective_change(old_system_value)
# Only idempotent properties, and cases where we have an old system_value
# are corrective_changes.
if @property_instance.idempotent? &&
!@property_instance.sensitive &&
!old_system_value.nil?
# If the values aren't insync, we have confirmed a corrective_change
insync = @property_instance.insync_values?(old_system_value, previous_value)
# Preserve the nil state, but flip true/false
@corrective_change = insync.nil? ? nil : !insync
else
@corrective_change = false
end
end
|
[
"def new_system_value(property, event, old_system_value)\n if event && event.status != \"success\"\n # For non-success events, we persist the old_system_value if it is defined,\n # or use the event previous_value.\n # If we're using the event previous_value, we ensure that it's\n # an array. This is needed because properties assume that their\n # `should` value is an array, and we will use this value later\n # on in property insync? logic.\n event_value = [event.previous_value] unless event.previous_value.is_a?(Array)\n old_system_value.nil? ? event_value : old_system_value\n else\n # For non events, or for success cases, we just want to store\n # the parameters agent value.\n # We use instance_variable_get here because we want this process to bypass any\n # munging/unmunging or validation that the property might try to do, since those\n # operations may not be correctly implemented for custom types.\n property.instance_variable_get(:@should)\n end\n end",
"def old_value; end",
"def old=(value); end",
"def set_change_values\n # get the previous survey\n prev_verdict = Verdict.previous_survey(self.time_period)\n compute_change_values(self, prev_verdict)\n\n return true\n end",
"def set_change_values\n # get the previous survey\n prev_rs = ReformSurvey.previous_survey(self.reform_id, self.time_period)\n compute_change_values(self, prev_rs)\n\n return true\n end",
"def updated_changed_properties(new_values, current_values)\n new_values.each_pair do |property_name, new_value|\n old_value = current_values[property_name]\n\n next unless !new_value.nil? && (old_value != new_value)\n Chef::Log.debug(\"#{self} property '#{property_name}' has changed to '#{new_value}'\")\n\n if managed_resource.properties.include?(property_name)\n # junos-ez-stdlib prefers some values as symbols\n managed_resource[property_name] = if VALUES_TO_SYMBOLIZE.include?(new_value)\n new_value.to_sym\n else\n new_value\n end\n else\n error_message = \"#{self} don't know how to manage property :#{property_name}.\"\n error_message << \" Known properties include: :#{managed_resource.properties.join(', :')}\"\n raise ArgumentError, error_message\n end\n end\n # return Hash of updated properties\n managed_resource.should\n end",
"def set_change_values\n if self.update_change_values == true\n if self.time_periods.length == 1\n # there is only one record, so all change values should be nil\n self.time_periods.each do |tp|\n tp.overall_change = nil\n tp.data.each do |datum|\n datum.change = nil\n end\n end\n else\n # compare each time period with the previous time period and compute the change value\n # start with the most recent and go backwards\n (self.time_periods.length-1).downto(1).each do |index|\n current = self.time_periods[index]\n previous = self.time_periods[index-1]\n\n # if this is composite indicator, then update the overall_change in time period\n if self.is_composite?\n current.overall_change = compute_change(current.overall_value, previous.overall_value)\n end\n\n # compute change for the data values\n current.data.each_with_index do |current_datum, current_index|\n if self.is_country?\n # find country in previous\n previous_datum = previous.data.select{|x| x.country_id == current_datum.country_id}.first\n if previous_datum.present?\n current_datum.change = compute_change(current_datum.value, previous_datum.value)\n else\n # could not find the previous matching record, so reset to nil\n current_datum.change = nil\n end\n\n elsif self.is_composite?\n # find index in previous\n previous_datum = previous.data.select{|x| x.index_id == current_datum.index_id}.first\n if previous_datum.present?\n # get change_multiplier value for this index\n index = self.indices.select{|x| x.id == current_datum.index_id}.first\n current_datum.change = compute_change(current_datum.value, previous_datum.value, index.present? ? index.change_multiplier : 1)\n else\n # could not find the previous matching record, so reset to nil\n current_datum.change = nil\n end\n\n else # basic\n previous_datum = previous.data[current_index]\n if previous_datum.present?\n current_datum.change = compute_change(current_datum.value, previous_datum.value)\n else\n # could not find the previous matching record, so reset to nil\n current_datum.change = nil\n end\n end\n end\n end\n end\n end\n\n return true\n end",
"def calculate_report_corrective_change\n @corrective_change = resource_statuses.any? do |name, status|\n status.corrective_change\n end\n end",
"def change_to(new_value); self.value=new_value; end",
"def compute_government_change(current_value, previous_value)\n diff = current_value - previous_value\n change = nil\n if diff < 0\n change = -1\n elsif diff > 0\n change = 1\n else\n change = 0\n end\n\n return change\n end",
"def change_rate\n return nil if @value_1.nil? || @value_2.nil?\n\n if @value_1 == 0\n if @value_2 > 0\n return 1\n elsif @value_2 < 0\n return -1\n else\n return 0\n end\n end\n\n (@value_2 - @value_1) / @value_1.to_f\n end",
"def changeImpellerEfficiency(impeller_eff)\n \n # Get the existing motor efficiency\n existing_motor_eff = self.motorEfficiency\n\n # Calculate the new total efficiency\n new_total_eff = existing_motor_eff * impeller_eff\n \n # Set the revised motor and total fan efficiencies\n self.setFanEfficiency(new_total_eff)\n \n end",
"def actual_value=(value)\n @actual_value = value\n end",
"def update_primary_property\n return unless primary_property\n\n primary_property.update(property_params(primary_property_params))\n # primary_property.update_mortgage_payment_amount\n update_liabilities(primary_property, primary_property_params)\n end",
"def changeMotorEfficiency(motor_eff)\n \n # Calculate the existing impeller efficiency\n existing_motor_eff = self.motorEfficiency\n existing_total_eff = self.fanEfficiency\n existing_impeller_eff = existing_total_eff / existing_motor_eff\n \n # Calculate the new total efficiency\n new_total_eff = motor_eff * existing_impeller_eff\n \n # Set the revised motor and total fan efficiencies\n self.setMotorEfficiency(motor_eff)\n self.setFanEfficiency(new_total_eff)\n \n end",
"def propagate_changes=(value)\n @propagate_changes = value\n end",
"def rate_of_change\n return @_rate_of_change if defined?(@_rate_of_change)\n\n @_rate_of_change = (first_half_slope + last_half_slope) / 2.0\n end",
"def change_system(system)\n system = System[system] unless system.kind_of?(System)\n target_oom = Math.log10(self.unit.factor)\n bu = self.class.best_fit(target_oom, system)\n convert(bu)\n end",
"def update_property_value_for_app_comp\n app_comp_id = application_component.id\n new_property_value = current_property_values.map(&:value)\n property_val_objs = PropertyValue.values_for_app_comp(app_comp_id)\n property_val_objs.each do |property_val_obj|\n property_val_obj.update_attribute( \"value\", new_property_value.to_s )\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
If it's a failure, use 'err', else use either the resource's log level (if available) or 'notice'.
|
def log_level
status == "failure" ? :err : (@default_log_level || :notice)
end
|
[
"def rollbar_level(severity)\n [:debug, :info, :warning, :error, :critical, :error][severity] || :error\n end",
"def resolve_log_level\n if auto_log_level?\n if using_output_formatter?\n :warn\n else\n :info\n end\n else\n Chef::Config[:log_level]\n end\n end",
"def severity_str\n case severity\n when 3 then \"Low\"\n when 2 then \"Medium\"\n when 1 then \"High\"\n end\n end",
"def log_level\n @log_level ||= :warn\n end",
"def severity_str\n return \"\" if severity.nil?\n case severity\n when 3 then \"Low\"\n when 2 then \"Medium\"\n when 1 then \"High\"\n end\n end",
"def result_message_severity(xccdf_status)\n case xccdf_status\n when 'fail'\n 'error'\n when 'notapplicable'\n 'warning'\n else\n 'info'\n end\n end",
"def sarif_level(severity)\n case severity\n when \"LOW\"\n SARIF_WARNINGS[:warning]\n when \"MEDIUM\"\n SARIF_WARNINGS[:error]\n when \"HIGH\"\n SARIF_WARNINGS[:error]\n else\n SARIF_WARNINGS[:note]\n end\n end",
"def severity()\n @logger.level\n end",
"def severity_lookup(severity, fallback = :unknown)\n LOG_LEVELS[severity.to_sym] || LOG_LEVELS[fallback]\n end",
"def log_level\n RAILS_DEFAULT_LOGGER.level\n end",
"def convert_level(val)\n if val.is_a? String\n begin\n val = Logger.const_get(val.upcase)\n rescue NameError\n raise \"Invalid log level '#{val}' specified.\"\n end\n end\n\n return val\n end",
"def failure_notice(message)\n { type: 'error', message: message }\n end",
"def format_error(err) Ripl::Runner.format_error(err) end",
"def severity_to_s\n Severity[self.severity].to_s.upcase\n end",
"def log_level\n return unless options[:log_level]\n return @log_level if @log_level\n\n level = options[:log_level].downcase.to_sym\n unless valid_log_level?(level)\n level = :info\n banner \"WARNING - invalid log level specified: \" \\\n \"\\\"#{options[:log_level]}\\\" - reverting to :info log level.\"\n end\n\n @log_level = level\n end",
"def severity_string\n s = severity_symbol\n if s then\n s.to_s.upcase\n else\n \"UNKNOWN\"\n end\n end",
"def severity\n @severity || SEVERITY_LEVELS.first\n end",
"def severity_string severity\n\n\t\t::Pantheios::Core.severity_string severity\n\tend",
"def severity= severity\n @logger.level = severity\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Remove the node if it is in a list. You should be careful to only remove nodes that are part of this list.
|
def remove?(node)
if node.head
return remove!(node)
end
return nil
end
|
[
"def remove_node\n # Interface method\n end",
"def remove_node(node)\n\t\tif is_node(node)\n\t\t\tindexing.remove_element(node)\n\t\t\ttrue\n\t\telse\n\t\t\t#raise ArgumentError, 'The node is invalid (non Node object or node isn\\'t in the graph)'\n\t\t\tfalse\n\t\tend\n\tend",
"def remove\n return node_set.delete(self) if node_set\n end",
"def remove_node(node)\n has_node = HAS_NODE_ROLE.find_by_node_id_and_role_instance_id node.id, self.id\n HAS_NODE_ROLE.delete has_node\n end",
"def remove(x)\n if x == head\n self.head = head.next\n x.next = nil\n else\n\n el = head\n while el and el != x\n prev = el \n el = el.next\n end\n\n raise ListError, \"Element not found\" unless el\n\n prev.next = el.next\n el.next = nil\n end\n x\n end",
"def remove_node(node)\n cspsearchpath.delete(node)\n end",
"def delete\n @parent_list.node_will_be_removed(self) if @parent_list\n @next.prev = @prev if @next\n @prev.next = @next if @prev\n @prev = nil\n @next = nil\n data = @data\n @data = nil\n data\n end",
"def node_remove(node)\n return unless node_present? node\n nodes.delete prepare_key(node)\n end",
"def delete(node)\n examined_node = @head\n after_examined_node = @head.next\n\n if examined_node == node #to be deleted item is first on the list\n remove_front()\n elsif node == @tail\n remove_tail()\n else\n\n while examined_node != node\n before_examined_node = examined_node\n\n if examined_node.next == nil\n\n else\n\n examined_node = examined_node.next\n end\n after_examined_node = examined_node.next\n\n if examined_node == node\n temp = before_examined_node\n before_examined_node.next = after_examined_node\n end\n end\n end\n end",
"def remove(node)\n # Traverse looking for the node\n sets = []\n prev_link = nil\n each_link do |link|\n if link[:ref_id] == node.id\n sets << [prev_link, link]\n next # in case adjacent removal node links\n end\n prev_link = link\n end\n # Now we can just do the join and we're out\n sets.each do |prev_link, the_link|\n if prev_link\n prev_link[:next_id] = the_link[:next_id]\n else\n self[:head_id] = the_link[:next_id]\n end\n # Mark removal\n element_removed!\n end\n # Return the node if any were removed\n node unless sets.empty?\n end",
"def remove_node(node)\n\t\t\tif has_node?(node)\n\t\t\t\tpull_edges_of(node)\n\t\t\t\t@nodes.delete(node)\n\t\t\t\t@order -= 1\n\t\t\tend\n\t\t\tself\n\t\tend",
"def remove (data)\n if @head.data == data\n @head = @head.next # Move head to the next node\n else\n current_node = @head.next # Go the next element\n prev_node = @head\n while current_node\n if current_node.data == data\n prev_node.next = current_node.next\n return @head # Head didnt change\n end\n prev_node = current_node\n current_node = current_node.next\n end\n\n return @head\n end\n end",
"def remove_node(node)\n @nodes_being_worked_on.delete(node)\n @nodes.delete(node)\n # the last edge keeps getting ignored when you remove this, so handling the final case\n assign_node(@edges[0][1]) if @edges.size == 1\n @edges.reject! { |edge| edge.include?(node) }\n end",
"def remove\n if @children.empty?\n @parent.remove_child(self)\n @prev.next = @next if @prev\n @next.prev = @prev if @next\n true\n else\n false\n end\n end",
"def deleteLinkedlistNode(node)\n nextnode = node.next\n if node && nextnode\n node.next = nextnode.next\n node.value = nextnode.value\n end\nend",
"def remove_from(list, opts={})\n list = validate_list(list)\n list.socket.listUnsubscribe({:id => list.id, :email_address => self.email}.merge opts) == \"true\"\n end",
"def remove_child_from_list_positions(entry)\n if children.include?(entry)\n decrement_child_positions(entry.list_position)\n end\n children\n end",
"def remove_node\n cspsearchpath.delete(@label)\n end",
"def remove_from_back\n return \"Empty list...\" if @head.nil?\n\n node = @head\n while node != @tail\n if node.next.next.nil?\n node.next = nil\n @tail = node\n break\n else\n node = node.next\n end\n end\n\n if @head == @tail\n @head = nil\n @tail = nil\n end\n\n reduce_list_length\n self.display\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
update survey_answer, journal_entry.answered_at, csv_survey_answers.age, csv_score_rapports.age | created_at, csv_answers.age
|
def update_date
entry = JournalEntry.find(params[:journal_entry][:id])
entry.follow_up = params[:journal_entry][:follow_up]
date_param = params[:journal_entry][:created]
if date_param.blank?
entry.save
flash[:notice] = "Opfølgning er rettet"
redirect_to journaL_path(entry.journal)
end
sep = date_param.include?("/") && "/" || date_param.include?("-") && "-"
d = date_param.split(sep).map {|p| p.to_i }
date = [d[2],d[1],d[0]]
created = Date.new(*date)
entry.update_date(created)
# age = ((created - entry.journal.birthdate).to_i / 365.25).floor
# entry.survey_answer.age = age
# entry.answered_at = created
# entry.save
# entry.survey_answer.created_at = created
# entry.survey_answer.save
# csv_score_rapport = CsvScoreRapport.find_by_survey_answer_id(entry.survey_answer_id)
# if csv_score_rapport
# csv_score_rapport.age = age if csv_score_rapport
# csv_score_rapport.created_at = created
# csv_score_rapport.save
# end
# csv_survey_answer = CsvSurveyAnswer.find_by_journal_entry_id(entry.id)
# if csv_survey_answer
# csv_survey_answer.created_at = created
# csv_survey_answer.age = age if csv_survey_answer
# csv_survey_answer.save
# end
# score_rapport = ScoreRapport.find_by_survey_answer_id(entry.survey_answer_id)
# if score_rapport
# score_rapport.age = age
# score_rapport.created_at = created
# score_rapport.save
# end
flash[:notice] = "Besvarelsesdato og opfølgning er rettet"
redirect_to journal_path(entry.journal)
end
|
[
"def update\n # TODO: try to retrieve the survey_sheet then update, avoid hacking by faking a id of non-self survey_sheet\n \n if (has_survey_responses(params))\n #render :text => \"in update, params has responses, so we can parse and update!\"\n # * parse and update the survey_sheet and responses, see if we can DRY by doing sth \n # in helper or rails multiple model updating\n sheet_id = params[\"id\"]\n survey_id = params[\"survey_id\"]\n \n# @survey_sheet = SurveySheet.find(:first, :include =>:responses ,:conditions => [\"user_id = :user_id and survey_id = :survey_id\", \n# { :user_id => self.current_user.id, :survey_id => survey_id } ] )\n \n @survey_sheet = SurveySheet.find(:first, :include =>:responses ,:conditions => [\"id = :sheet_id\", \n { :sheet_id => sheet_id } ] )\n \n logger.debug \"===============================================================\"\n logger.debug \"@survey_sheet info: #{@survey_sheet.id}\"\n logger.debug \"@survey_sheet has responses size: #{@survey_sheet.responses.size}\"\n \n # TODO: make it a transaction to avoid dirty records\n ts = Time.now\n \n # esp, delete method will rerturn nil instead of processed array\n prev_vers = @survey_sheet.sheet_histories.collect(&:version_num)\n prev_vers.delete(nil)\n if prev_vers and not prev_vers.empty? # select the max and then plus one \n current_ver = prev_vers.max + 1\n else # assign 1 to the first\n current_ver = 1\n end\n \n # create a sheet version entry\n @a_version = SheetHistory.new\n @a_version.version_num = current_ver\n @a_version.user_id = current_user.id\n @a_version.when_submit = ts\n @survey_sheet.sheet_histories << @a_version\n @survey_sheet.responses.each do | response | \n response.update_attribute(:updated_at ,ts )\n response.update_attribute(:answer_text, params[\"response#{response.question_id}\"] )\n response.save!\n end\n @survey_sheet.save!\n @survey_sheet = SurveySheet.find_by_id(@survey_sheet.id)\n render :action => \"edit\"\n else\n render :text => \"There is no response for this survey sheet!\"\n end\n end",
"def answer_attributes=(answer_attributes) \n answer_attributes.each_pair do |answer_id, params_hash|\n answers.find(answer_id.to_i).update_attributes(params_hash)\n end \n end",
"def UpdateExam(chartno,accessionno,age,orderdate,ordertime,examtype,examdate,examtime,status,drorderidname,dronidname,drreportidname,examdetail,drfrom,chargeby,modality,divisionon,zone,isdanger)\n @chartno=chartno\n @accessionno=accessionno\n @age=age\n @orderdate=orderdate\n @ordertime=ordertime\n @examtype=examtype\n @examdate=examdate\n @examtime=examtime\n @status=status\n @drfrom=drfrom\n @drorderidname=drorderidname\n @dronidname=dronidname\n @drreportidname=drreportidname\n @examdetail=examdetail\n @chargeby=chargeby\n @modality=modality\n @divisionon=divisionon\n @zone=zone\n @isdanger=isdanger\n \n sql=\"update \"\n sql+=\"cris_exam_online \"\n sql+=\"set \"\n sql+=\"system='HIS_IN', \"\n sql+=\"chartno='#{@chartno}', \"\n sql+=\"age='#{@age}', \"\n sql+=\"orderdate='#{@orderdate}', \" \n sql+=\"ordertime='#{@ordertime}', \"\n sql+=\"type='#{@examtype}', \"\n sql+=\"examdate='#{@examdate}', \"\n sql+=\"examtime='#{@examtime}', \"\n sql+=\"status='#{@status}', \"\n sql+=\"accessionnumber='#{@accessionno}', \"\n sql+=\"dr_from='#{@drfrom}', \"\n sql+=\"dr_order='#{@drorderidname}', \"\n sql+=\"dr_on='#{@dronidname}', \"\n sql+=\"dr_report='#{@drreportidname}', \"\n sql+=\"uploadcode='10', \"\n sql+=\"examdetail='#{@examdetail}', \"\n sql+=\"his_reqno='#{@accessionno}', \"\n sql+=\"chargeby='#{@chargeby}', \"\n sql+=\"division_on='#{@divisionon}', \"\n sql+=\"modality='#{@modality}', \"\n sql+=\"zone='#{@zone}', \"\n sql+=\"isdanger='#{@isdanger}' \"\n sql+=\"where \"\n sql+=\"uni_key='#{@accessionno}' \"\n @logger.info(sql)\n \n @db.dbh.do(sql)\n if @db::dbtype==\"ORA\"\n @db.dbh.commit()\n end\n end",
"def update\n @claim_audit_entry.delete_prev_detail_records\n parsing_answers\n respond_to do |format|\n if @claim_audit_entry.update(claim_audit_entry_params)\n @claim_audit_entry.claim_awaiting_audit.update(:last_reviewed_date=>Date.today, :new_upload => true)\n format.html { redirect_to root_path, notice: 'Claim audit entry was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { redirect_to action: 'edit',:id=>@claim_audit_entry.id,notice: \"#{@claim_audit_entry.errors.full_messages.first}\" }\n format.json { render json: @claim_audit_entry.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n # Using first or create allows users to update their answer (or create it obviously)\n answer = Answer.where(:question_id => params[:question_id], :user_id => current_user.id).first_or_create! do |answer|\n answer.user_id = current_user.id\n answer.survey_id = params[:survey_id]\n answer.group_id = current_user.group.id\n answer.question_id = params[:question_id]\n answer.answer = params[:answer]\n\n @created = true\n\n if answer.save\n q = IQuestion.find_by_id(params[:question_id])\n\n if q.present?\n qdes = q.description\n else\n qdes = \"Orphaned question\"\n end\n\n s = ISurvey.find_by_id(params[:survey_id])\n\n if s.present?\n sdes = s.title\n else\n sdes = \"Orphaned survey\"\n end\n #sendCable(\"#{current_user.name} <#{current_user.email}>\", \"[#{sdes}] #{qdes}:\", params[:answer])\n\n render json:{\"continue\" => true}\n else\n render json:{\"continue\" => false}\n end\n end\n if !@created && answer\n answer.answer = params[:answer]\n if answer.save\n #sendCable(\"#{current_user.name} <#{current_user.email}>\", \"Updated Answer: \", params[:answer])\n render json:{\"continue\" => true}\n else\n render json:{\"continue\" => false}\n end\n end\n end",
"def update_answers\n last_user_answers.each do |lua|\n lua.correct= (lua.response.to_f == self.response.to_f)\n lua.save!\n end\n end",
"def update()\n sql = \"UPDATE members SET (name, age) = ($1, $2) WHERE id = $3\"\n values = [@name, @age, @id]\n SqlRunner.run(sql, values)\n end",
"def update_award! \n increment_award\n update_expiration\n end",
"def update()\n sql = \"UPDATE bakes SET (bake_date, score, starter_time, leaven_time,\n autolyse_time, add_salt_time, bulk_time, shape_time, prove_time,\n bake_time, cool_time)\n = ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)\n WHERE id = $12\"\n values = [@bake_date, @score, @starter_time, @leaven_time,\n @autolyse_time, @add_salt_time, @bulk_time, @shape_time,\n @prove_time, @bake_time, @cool_time, @id]\n SqlRunner.run(sql,values)\n end",
"def audit\n if self.c_audit.nil?\n answers = Answer.where(:participant_id => self.id, :page => [5, 6]).order(\"id ASC\")\n self.c_audit = answers.reduce(0) do |sum, a|\n sum + audit_score(a.question_id, a.value)\n end\n self.save\n end\n self.c_audit\n end",
"def update_result_caches\n award_counts = Result.where(:team_id=>id).group(:award).count()\n self.count_gold = award_counts[\"gold\"] || 0\n self.count_silver = award_counts[\"silver\"] || 0\n self.count_bronze = award_counts[\"bronze\"] || 0\n self.count_total = self.count_gold + self.count_silver + self.count_bronze\n\n award_points = Result.where(:team_id=>id).group(:award).sum(:points_team)\n self.points_gold = award_points[\"gold\"] || 0\n self.points_silver = award_points[\"silver\"] || 0\n self.points_bronze = award_points[\"bronze\"] || 0\n self.points_total = self.points_gold + self.points_silver + self.points_bronze\n\n save()\n end",
"def export_survey_answers(csv_survey_answers, survey_id)\n survey = Survey.find(survey_id)\n header = journal_csv_header.keys + ['follow_up'] + survey.variables.map {|v| v.var}\n \n csv_rows = csv_survey_answers.inject([]) do |rows,csa|\n puts \"csa.journal.nil? #{csa.inspect} #{csa.journal.inspect} sa: #{csa.survey_answer.inspect}\" if csa.journal.nil?\n\n journal_entry = JournalEntry.where(\n survey_answer_id: csa.survey_answer_id,\n center_id: csa.center_id, \n group_id: csa.team_id).first\n info = \n if !journal_entry.nil? && journal_entry.answer_info\n journal_entry.answer_info.split(\";\")\n elsif csa.journal_info\n csa.journal_info.split(\";;\")\n elsif csa.survey_answer\n csa.survey_answer.info.values\n else\n puts \"no answer_info found in journal_entry or survey_answer: #{csa.inspect} je: #{csa.journal_entry.inspect}\"\n [\"ingen info: sa_id: #{csa.survey_answer_id} csa: #{csa.inspect} \"]\n end\n\n if !csa || !csa.answer\n puts \"No csa: #{info.inspect}\"\n end\n rows << info + [FollowUp.to_value(csa.follow_up)] + (csa && csa.answer && csa.answer.split(';;') || [] )\n rows\n end\n\n output = CSV.generate(:col_sep => \";\", :row_sep => :auto, :encoding => 'utf-8') do |csv_output|\n csv_output << header\n csv_rows.each { |line| csv_output << line }\n end\n end",
"def update_resource_flags\n \n if @resource.helpful_avg >= 2.5 && \n Interaction.where(resource_id: @resource.id).where(\"helpful_q >= ?\", 3).count >= 5\n \n @resource.tentative = false \n @resource.approved = true \n @resource.save \n elsif @resource.helpful_avg < 2.0 && \n Interaction.where(resource_id: @resource.id).where(\"helpful_q <= ?\", 2).count >= 5\n \n @resource.flagged = true \n @resource.save \n end \n \n end",
"def update_racer_info(racer_ids)\n racers = Racer.find(racer_ids)\n for r in racers\n results = Result.where(racer_id: r.id)\n r.update_attribute(:race_count, results.count)\n r.update_attribute(:fav_bib, results.includes(:race).order(\"races.date DESC\").pluck(:bib).first)\n end\n\n update_streak_calendar(racer_ids)\n end",
"def update_result_caches\n award_counts = Result.where(:registration_id=>id).group(:award).count()\n self.count_gold = award_counts[\"gold\"] || 0\n self.count_silver = award_counts[\"silver\"] || 0\n self.count_bronze = award_counts[\"bronze\"] || 0\n self.count_total = self.count_gold + self.count_silver + self.count_bronze\n\n award_points = Result.where(:registration_id=>id).group(:award).sum(:points_athlete)\n self.points_gold = award_points[\"gold\"] || 0\n self.points_silver = award_points[\"silver\"] || 0\n self.points_bronze = award_points[\"bronze\"] || 0\n self.points_total = self.points_gold + self.points_silver + self.points_bronze\n\n save()\n end",
"def update_csv\n\tCSV.open(@csvname, \"wb\") do |csv|\n\t\tcsv << ['seqno', 'date']\n\t\tcsv << [@currentSeqno, @currentDate]\n\tend\nend",
"def updateFoodStoreFields(prams , average)\n food = FoodStore.find(prams[:food_store_id])\n\t if food.curr_sum_ambience == nil\n\t food.update(curr_sum_ambience:0,curr_sum_price:0,curr_sum_service:0,curr_sum_food:0)\n\t end\n food.update(curr_sum:food.curr_sum + average , num_of_rating:food.num_of_rating + 1)\n \n\t food.update(curr_sum_ambience:food.curr_sum_ambience + prams[:ambience])\n food.update(curr_sum_food:food.curr_sum_food + prams[:foodquality] )\n food.update(curr_sum_price:food.curr_sum_price + prams[:pricing] )\n food.update(curr_sum_service:food.curr_sum_service + prams[:service] )\n\n food.update(ambience_average:((food.curr_sum_ambience/food.num_of_rating)*100).floor / 100.0)\n food.update(service_average:((food.curr_sum_service/food.num_of_rating)*100).floor / 100.0)\n food.update(foodquality_average:((food.curr_sum_food/food.num_of_rating)*100).floor / 100.0)\n food.update(pricing_average:((food.curr_sum_price/food.num_of_rating)*100).floor / 100.0)\n\n food.update(sarapp_rating:((food.curr_sum/food.num_of_rating)*100).floor / 100.0)\n end",
"def update_audit_log\n \t\taudit_log = AuditLog.where(user_id: self.user_id, start_date: (self.date - 7.days..self.date)).last \n# \t\taudit_log = AuditLog.where(user_id: Post.last.user_id, start_date: (date - 7.days..date)).last \n# \t\taudit_log = AuditLog.where(user_id: user_id, start_date: (date - 7.days..date)).last \n#\t\taudit_log = Post.last.user.audit_logs.last\n\t\tif audit_log.user.posts.last.rejected?\n\t\t\taudit_log.pending! if audit_log\n\t\telse\n\t\t\taudit_log.confirmed! if audit_log\n\t\tend \t\t\n \tend",
"def update_rate_for_philhealth(admission_type)\n @path = \"../csv/inpatient_ordered_items.csv\" if admission_type == 'inpatient'\n @path = \"../csv/or_ordered_items.csv\" if admission_type == 'outpatient'\n\n my_file = CSV.read(@path)\n count = my_file.count\n w = []\n x = 1\n count.times do\n w << my_file[x][0]\n if x + 1 == my_file.count\n else\n x += 1\n end\n end\n\n Database.connect\n @info = []\n if admission_type == 'inpatient'\n w.each do |o|\n @info << get_item_rate(:inpatient => true, :item_code => o)\n end\n elsif admission_type == 'outpatient'\n w.each do |o|\n @info << get_item_rate(:outpatient => true, :item_code => o)\n end\n end\n\n line_arr = File.readlines(@path)\n File.open(@path, \"w\") do |f|\n line_arr = \"MSERVICE_CODE,RATE,MRP_TAG,PH_CODE,ORDER_TYPE,DESCRIPTION\"\n line_arr.each{|line| f.puts(line)}\n end\n\n @x = 0\n @info.each do |s|\n add_line_to_csv(@path, s)\n if (@x + 1) == @info.count\n else\n @x += 1\n end\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
GET /post302s/new GET /post302s/new.xml
|
def new
@post302 = Post302.new
respond_to do |format|
format.html # new.html.erb
format.xml { render :xml => @post302 }
end
end
|
[
"def create\n @post302 = Post302.new(params[:post302])\n\n respond_to do |format|\n if @post302.save\n format.html { redirect_to(@post302, :notice => 'Post302 was successfully created.') }\n format.xml { render :xml => @post302, :status => :created, :location => @post302 }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @post302.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def new\n @post301 = Post301.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @post301 }\n end\n end",
"def new\n @redirect_url = RedirectUrl.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @redirect_url }\n end\n end",
"def new\n @redirect = Shorturl::Redirect.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @redirect }\n end\n end",
"def new\n @post307 = Post307.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @post307 }\n end\n end",
"def new\n @post306 = Post306.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @post306 }\n end\n end",
"def new\n @post200 = Post200.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @post200 }\n end\n end",
"def new\n @post443 = Post443.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @post443 }\n end\n end",
"def new\n @post303 = Post303.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @post303 }\n end\n end",
"def new\n @post304 = Post304.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @post304 }\n end\n end",
"def new\n @post203 = Post203.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @post203 }\n end\n end",
"def new\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => new_vurl }\n end\n end",
"def new\n @post300 = Post300.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @post300 }\n end\n end",
"def new\n @post308 = Post308.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @post308 }\n end\n end",
"def new\n @post377 = Post377.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @post377 }\n end\n end",
"def new\n @post404 = Post404.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @post404 }\n end\n end",
"def new\n @post225 = Post225.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @post225 }\n end\n end",
"def new\n @post310 = Post310.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @post310 }\n end\n end",
"def new\n @post220 = Post220.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @post220 }\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
POST /post302s POST /post302s.xml
|
def create
@post302 = Post302.new(params[:post302])
respond_to do |format|
if @post302.save
format.html { redirect_to(@post302, :notice => 'Post302 was successfully created.') }
format.xml { render :xml => @post302, :status => :created, :location => @post302 }
else
format.html { render :action => "new" }
format.xml { render :xml => @post302.errors, :status => :unprocessable_entity }
end
end
end
|
[
"def r302(*args)\n add_rule :r302, *args\n end",
"def new\n @post302 = Post302.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @post302 }\n end\n end",
"def post_xml_64(xml=:example_response)\n post \"/auth/saml/callback\", {'SAMLResponse' => load_xml_64(xml)}\nend",
"def follow_redirects\n while last_response.status == 302\n follow_redirect!\n end\n end",
"def update\n @post302 = Post302.find(params[:id])\n\n respond_to do |format|\n if @post302.update_attributes(params[:post302])\n format.html { redirect_to(@post302, :notice => 'Post302 was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @post302.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def destroy\n @post302 = Post302.find(params[:id])\n @post302.destroy\n\n respond_to do |format|\n format.html { redirect_to(post302s_url) }\n format.xml { head :ok }\n end\n end",
"def redirect(location, status = '302'); request.redirect(location, status); end",
"def create\n @post301 = Post301.new(params[:post301])\n\n respond_to do |format|\n if @post301.save\n format.html { redirect_to(@post301, :notice => 'Post301 was successfully created.') }\n format.xml { render :xml => @post301, :status => :created, :location => @post301 }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @post301.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def post_form(params, url, wait_for_next_state=nil, wait_before_post=nil)\n if wait_before_post != nil and wait_before_post.is_a? Integer\n 0.upto wait_before_post do |value|\n sleep(1)\n print \".\"\n end\n end\n validate_params(params, url)\n uri = URI.parse(url)\n req = Net::HTTP::Post.new(uri.request_uri)\n if @atl_token != \"\"\n params[\"atl_token\"] = @atl_token\n end\n req.set_form_data(params)\n req['Cookie'] = get_cookie\n use_ssl = false\n if uri.instance_of? URI::HTTPS\n use_ssl = true\n end\n if @host_url \n req.add_field(\"Host\", @host_url)\n end\n Net::HTTP.start(uri.hostname, uri.port, use_ssl: use_ssl, verify_mode: OpenSSL::SSL::VERIFY_NONE, :read_timeout => 1000) do |http|\n response = http.request(req)\n puts \"Response: #{response.inspect}\".yellow\n puts \"Location: #{response['location'].to_s}\".yellow\n puts \"Current : #{@url}#{get_current_url}\".yellow\n # puts \"BODY: #{response.body}\"\n redirected = true\n if response['location'] and !\"#{@url}/#{get_current_url}\".include? response['location']\n redirection_url = compose_redirection_url(response['location'])\n @current_url = URI.parse(redirection_url).request_uri\n else\n puts \"Was not redirected, staying on page...\".yellow\n redirected = false\n end\n puts \"Redirected to: #{@current_url.to_s}\".yellow\n # future REST requests might use different response codes\n # For example a 201 might be returned where there is no content, but still a success\n # In case 200 is here there may be an error in the form, maybe add some checking\n puts \"Returned response code: #{response.code}, #{response.code.to_i != 302 and response.code.to_i != 200}\"\n if response.code.to_i != 302 and response.code.to_i != 200\n puts response.inspect.red\n puts response.body.inspect.red\n raise \"There is a problem while calling #{url} with params #{params}\"\n end\n # follow redirects, if redirected\n if redirected and redirected == true\n puts \"Doing the redirection... #{redirected}\".yellow\n get_stage_and_fetch_cookie(\"#{@url}#{get_current_url}\")\n else\n # in case the app is waiting for an event\n if wait_for_next_state != nil\n if wait_for_next_state.is_a? Integer\n actual_url = @current_url\n while wait_for_next_state > 0 and @current_url == actual_url\n puts \"Sleeping for 5, #{wait_for_next_state}\".yellow\n sleep(5)\n # this will change current_url in case of redirection\n get_stage_and_fetch_cookie(\"#{@url}\")\n wait_for_next_state = wait_for_next_state-1\n if wait_for_next_state < 0\n abort \"Waited too long, check the config...\"\n end\n end\n end\n end\n end\n puts \"Done posting #{url}\".yellow\n end\n end",
"def redirect(uri)\n [ 303, {'Content-Length'=>'0', 'Content-Type'=>'text/plain',\n 'Location' => uri},\n [] ]\n end",
"def do_post_redirect(post_redirect)\n uri = post_redirect.uri\n\n session[:post_redirect_token] = post_redirect.token\n\n # XXX what is the built in Ruby URI munging function that can do this\n # choice of & vs. ? more elegantly than this dumb if statement?\n if uri.include?(\"?\")\n if uri.include?(\"#\")\n uri.sub!(\"#\", \"&post_redirect=1#\")\n else\n uri += \"&post_redirect=1\"\n end\n else\n if uri.include?(\"#\")\n uri.sub!(\"#\", \"?post_redirect=1#\")\n else\n uri += \"?post_redirect=1\"\n end\n end\n redirect_to uri\n end",
"def post(opts={})\n opts = merge_referer_into_opts(opts)\n http = @request.post opts\n http.callback { munge_output }\n http\n end",
"def follow_redirect!\n assert @response, \"No response made\"\n assert_response :redirect, \"No redirect made\"\n follow_redirect\n end",
"def follow_redirect?; end",
"def save_webpage_redirects\n redirections.each do |redirection|\n redirect_url = redirection.headers[:location]\n webpage_redirect = WebpageRedirect.new(url: redirect_url)\n @webpage_response.webpage_redirects << webpage_redirect\n end\n end",
"def forward(data)\n timestamp = Time.now\n http = Net::HTTP.new(url.host, url.port)\n \n http.read_timeout = http_read_timeout\n http.open_timeout = http_open_timeout\n \n \n request = Net::HTTP::Post.new(url.request_uri)\n request['Authorization'] = Authorization.new(data, timestamp).header\n request['Content-Type'] = 'application/octet-stream'\n request.body = data\n \n if secure\n http.use_ssl = true\n http.ca_file = OpenSSL::X509::DEFAULT_CERT_FILE if File.exist?(OpenSSL::X509::DEFAULT_CERT_FILE)\n http.verify_mode = OpenSSL::SSL::VERIFY_PEER\n else\n http.use_ssl = false\n end\n \n response = nil\n \n begin\n response = http.request(request)\n rescue Exception => e\n puts \"Timeout while connecting to the Sentry server\"\n end\n end",
"def test_post_numbers_multiple_values_redirect\n # post 'post_numbers', n: 'JCV', a: '41', num_1: '10', num_2: '20', num_3: '30' # num_1, etc correspond directly to name=\"num_1\" in input\n post '/post_numbers?un=JCV&ua=41', num_1: '10', num_2: '20', num_3: '30' # variation of former with browser address field value\n # un/ua trail: get_numbers.erb (action=\"post_numbers?un=<%= usn %>&ua=<%= usa %>\")\n # > app.rb (backend_name_5 = params[:un], backend_age_3 = params[:ua])\n follow_redirect! # need to include this line to trace the values through the routes - see notes in lines 26 - 29\n assert(last_response.ok?) # verify that the the subsequent get route is accessible, no values required to pass\n assert(last_response.body.include?('JCV')) # two ways to pass assertion - see notes in lines 31- 36\n assert(last_response.body.include?('41')) # two ways to pass assertion - see notes in lines 31- 36\n assert(last_response.body.include?('10')) # two ways to pass assertion - see notes in lines 31- 36\n assert(last_response.body.include?('20')) # two ways to pass assertion - see notes in lines 31- 36\n assert(last_response.body.include?('30')) # two ways to pass assertion - see notes in lines 31- 36\n end",
"def new\n @post301 = Post301.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @post301 }\n end\n end",
"def follow_redirect opts={}, &block\n return if !redirect?\n new_opts = @request ? @request.to_hash : {}\n\n if @code == \"303\" || @code == \"302\"\n new_opts[:http_method] = \"GET\"\n new_opts.delete(:form)\n new_opts.delete(:data)\n end\n\n new_opts.delete(:headers)\n new_opts.delete(:host)\n new_opts.delete(:path)\n\n new_opts.delete(:auth) if !opts[:trust_location] &&\n (!@request || self.location.host != self.uri.host)\n\n new_opts.merge!(opts)\n\n Request.new(self.location, new_opts).stream(new_opts, &block)\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
PUT /post302s/1 PUT /post302s/1.xml
|
def update
@post302 = Post302.find(params[:id])
respond_to do |format|
if @post302.update_attributes(params[:post302])
format.html { redirect_to(@post302, :notice => 'Post302 was successfully updated.') }
format.xml { head :ok }
else
format.html { render :action => "edit" }
format.xml { render :xml => @post302.errors, :status => :unprocessable_entity }
end
end
end
|
[
"def update\n @post301 = Post301.find(params[:id])\n\n respond_to do |format|\n if @post301.update_attributes(params[:post301])\n format.html { redirect_to(@post301, :notice => 'Post301 was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @post301.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update opts = {}\n opts[:headers] ||= {}\n opts[:headers]['Content-Type'] ||= 'text/xml'\n post opts.fetch(:path, update_path), opts\n end",
"def update opts = {}\n opts[:headers] ||= {}\n opts[:headers]['Content-Type'] ||= 'text/xml'\n post 'update', opts\n end",
"def r302(*args)\n add_rule :r302, *args\n end",
"def update\n @post307 = Post307.find(params[:id])\n\n respond_to do |format|\n if @post307.update_attributes(params[:post307])\n format.html { redirect_to(@post307, :notice => 'Post307 was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @post307.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def destroy\n @post302 = Post302.find(params[:id])\n @post302.destroy\n\n respond_to do |format|\n format.html { redirect_to(post302s_url) }\n format.xml { head :ok }\n end\n end",
"def update\n @post303 = Post303.find(params[:id])\n\n respond_to do |format|\n if @post303.update_attributes(params[:post303])\n format.html { redirect_to(@post303, :notice => 'Post303 was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @post303.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @post302 = Post302.new(params[:post302])\n\n respond_to do |format|\n if @post302.save\n format.html { redirect_to(@post302, :notice => 'Post302 was successfully created.') }\n format.xml { render :xml => @post302, :status => :created, :location => @post302 }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @post302.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @post443 = Post443.find(params[:id])\n\n respond_to do |format|\n if @post443.update_attributes(params[:post443])\n format.html { redirect_to(@post443, :notice => 'Post443 was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @post443.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def put(*a) route 'PUT', *a end",
"def test_put_existing\n request = Http::Request.new('PUT', '/file1', {}, 'bar')\n\n response = self.request(request)\n\n assert_equal(204, response.status)\n\n assert_equal(\n 'bar',\n @server.tree.node_for_path('file1').get\n )\n\n assert_equal(\n {\n 'X-Sabre-Version' => [Version::VERSION],\n 'Content-Length' => ['0'],\n 'ETag' => [\"\\\"#{Digest::MD5.hexdigest('bar')}\\\"\"]\n },\n response.headers\n )\n end",
"def put_datastream(pid, dsID, xml)\n uri = URI.parse(@fedora + '/objects/' + pid + '/datastreams/' + dsID ) \n RestClient.put(uri.to_s, xml, :content_type => \"application/xml\")\n rescue => e\n e.response \n end",
"def update\n @post203 = Post203.find(params[:id])\n\n respond_to do |format|\n if @post203.update_attributes(params[:post203])\n format.html { redirect_to(@post203, :notice => 'Post203 was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @post203.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @post225 = Post225.find(params[:id])\n\n respond_to do |format|\n if @post225.update_attributes(params[:post225])\n format.html { redirect_to(@post225, :notice => 'Post225 was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @post225.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @post326 = Post326.find(params[:id])\n\n respond_to do |format|\n if @post326.update_attributes(params[:post326])\n format.html { redirect_to(@post326, :notice => 'Post326 was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @post326.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @post270 = Post270.find(params[:id])\n\n respond_to do |format|\n if @post270.update_attributes(params[:post270])\n format.html { redirect_to(@post270, :notice => 'Post270 was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @post270.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def put *args\n make_request :put, *args\n end",
"def update\n @post304 = Post304.find(params[:id])\n\n respond_to do |format|\n if @post304.update_attributes(params[:post304])\n format.html { redirect_to(@post304, :notice => 'Post304 was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @post304.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def put(uri:, payload:, headers: {}, limit: redirect_limit)\n do_post_or_put(method: :put, uri: uri, payload: payload, headers: headers, limit: limit, timeout: timeout)\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
DELETE /post302s/1 DELETE /post302s/1.xml
|
def destroy
@post302 = Post302.find(params[:id])
@post302.destroy
respond_to do |format|
format.html { redirect_to(post302s_url) }
format.xml { head :ok }
end
end
|
[
"def destroy\n @post301 = Post301.find(params[:id])\n @post301.destroy\n\n respond_to do |format|\n format.html { redirect_to(post301s_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @post443 = Post443.find(params[:id])\n @post443.destroy\n\n respond_to do |format|\n format.html { redirect_to(post443s_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @post307 = Post307.find(params[:id])\n @post307.destroy\n\n respond_to do |format|\n format.html { redirect_to(post307s_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @post465 = Post465.find(params[:id])\n @post465.destroy\n\n respond_to do |format|\n format.html { redirect_to(post465s_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @post306 = Post306.find(params[:id])\n @post306.destroy\n\n respond_to do |format|\n format.html { redirect_to(post306s_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @post200 = Post200.find(params[:id])\n @post200.destroy\n\n respond_to do |format|\n format.html { redirect_to(post200s_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @post414 = Post414.find(params[:id])\n @post414.destroy\n\n respond_to do |format|\n format.html { redirect_to(post414s_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @post137 = Post137.find(params[:id])\n @post137.destroy\n\n respond_to do |format|\n format.html { redirect_to(post137s_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @post160 = Post160.find(params[:id])\n @post160.destroy\n\n respond_to do |format|\n format.html { redirect_to(post160s_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @post341 = Post341.find(params[:id])\n @post341.destroy\n\n respond_to do |format|\n format.html { redirect_to(post341s_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @post150 = Post150.find(params[:id])\n @post150.destroy\n\n respond_to do |format|\n format.html { redirect_to(post150s_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @post300 = Post300.find(params[:id])\n @post300.destroy\n\n respond_to do |format|\n format.html { redirect_to(post300s_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @post135 = Post135.find(params[:id])\n @post135.destroy\n\n respond_to do |format|\n format.html { redirect_to(post135s_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @post445 = Post445.find(params[:id])\n @post445.destroy\n\n respond_to do |format|\n format.html { redirect_to(post445s_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @post136 = Post136.find(params[:id])\n @post136.destroy\n\n respond_to do |format|\n format.html { redirect_to(post136s_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @post373 = Post373.find(params[:id])\n @post373.destroy\n\n respond_to do |format|\n format.html { redirect_to(post373s_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @post111 = Post111.find(params[:id])\n @post111.destroy\n\n respond_to do |format|\n format.html { redirect_to(post111s_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @post185 = Post185.find(params[:id])\n @post185.destroy\n\n respond_to do |format|\n format.html { redirect_to(post185s_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @post182 = Post182.find(params[:id])\n @post182.destroy\n\n respond_to do |format|\n format.html { redirect_to(post182s_url) }\n format.xml { head :ok }\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Sets the number of columns defined in TotalSpaces2. This does not change the actual number of desktops present, you should call add_desktops_on_display or remove_desktops_on_display as appropriate after changing the number of columns. display_id = TotalSpaces2.main_display[:display_id] TotalSpaces2.set_grid_columns_on_display(3, display_id)
|
def set_grid_columns_on_display(columns, display_id)
TSApi.tsapi_setDefinedColumnsOnDisplay(columns, display_id)
end
|
[
"def num_cols=(num_c)\n (num_c - num_cols).times do\n add_col\n end\n end",
"def columns=(integer); end",
"def start_columns(size = 2, gutter = 10)\n # Start from the current y-position; make the set number of columns.\n return false if @columns_on\n\n @columns = {\n :current => 1,\n :bot_y => @y\n }\n @columns_on = true\n # store the current margins\n @columns[:left] = @left_margin\n @columns[:right] = @right_margin\n @columns[:top] = @top_margin\n @columns[:bottom] = @bottom_margin\n # Reset the margins to suit the new columns. Safe enough to assume the\n # first column here, but start from the current y-position.\n @top_margin = @page_height - @y\n @columns[:size] = size || 2\n @columns[:gutter] = gutter || 10\n w = absolute_right_margin - absolute_left_margin\n @columns[:width] = (w - ((size - 1) * gutter)) / size.to_f\n @right_margin = @page_width - (@left_margin + @columns[:width])\n end",
"def add_desktops(number_to_add)\n TSApi.tsapi_addDesktopsOnDisplay(number_to_add, 0)\n end",
"def define_grid_page(columns = 4, rows = 20)\n define_grid(columns: columns, rows: rows)\n end",
"def add_desktops_on_display(number_to_add, display_id)\n TSApi.tsapi_addDesktopsOnDisplay(number_to_add, display_id)\n end",
"def set_grid_dimensions(row_count, col_count)\n \n # Forbid resetting grid dimensions\n if @grid_dimensions_set\n Vizkit.warn(\"Error: You may set grid dimensions before configuration only.\")\n return\n else\n @grid_dimensions_set = true\n end\n \n # Remove all parent widgets from grid.\n child = nil\n while(child = @grid.take_at(0)) \n widget = child.widget\n widget.set_parent(nil)\n widget = nil\n child = nil\n end\n \n # Generate container widgets with label if not yet existent\n counter = 0\n for row in 0..row_count-1 do\n for col in 0..col_count-1 do\n container = nil\n if not @container_hash[counter]\n widget_pos = (row * col_count) + col\n container = ContainerWidget.new(widget_pos)\n @container_hash[counter] = container\n else\n container = @container_hash[counter]\n end\n \n # Add parent widget to grid\n @grid.add_widget(container, row, col) # TODO does this make @grid the parent of the widget?\n container.show\n counter = counter + 1\n end\n end\n \n # Delete useless container widgets if any\n @container_hash.delete_if {|pos, conatiner| pos >= counter}\n \n end",
"def increaseGrid\n @world.updateSize(1)\n updateSizeLabel\n end",
"def add_cols(num)\n @image.each do |row|\n num.times { row << nil }\n end\n @col_count += num\n end",
"def define_columns(columns_props_array)\n view.define_columns(columns_props_array)\n\n # Sync again after adding the columns.\n sync_ui_from_model\n end",
"def setNbGrids(nbGrids)\n\t\t@nbGames = nbGrids\n\t\treturn self\n\tend",
"def fill_screens\n @screen_model.clear\n\n if @current_display\n\tn_screens = @current_display.n_screens\n\n\tn_screens.times do |i|\n\t iter = @screen_model.append\n\t iter.set_value(SCREEN_COLUMN_NUMBER, i)\n\t iter.set_value(SCREEN_COLUMN_SCREEN, @current_display.get_screen(i))\n\n\t if i == 0\n\t @screen_selection.select_iter(iter)\n\t end\n\tend\n end\n end",
"def setup_table_columns\n\t\twhile !(columns = @tableContentView.tableColumns).empty?\n\t\t\t@tableContentView.removeTableColumn(columns.first)\n\t\tend\n\t\t\n\t\t@columns.each_with_index do |name, i|\n\t\t\tcolumn = NSTableColumn.alloc.initWithIdentifier(i)\n\t\t\tcolumn.headerCell.setStringValue(name)\n\t\t\tcolumn.setEditable(false)\n\t\t\tsort = NSSortDescriptor.alloc.initWithKey_ascending_selector(name, true, \"localizedCaseInsensitiveCompare:\")\n\t\t\tcolumn.setSortDescriptorPrototype(sort)\n\n\t\t\t@tableContentView.addTableColumn(column)\n\t\tend\n\tend",
"def draw_table_six_columns(table_info, width_columns = [100, 100, 100, 90, 90, 40])\n table (table_info) do\n columns(0..5).border_width = 1\n columns(0..5).size = 7\n self.column_widths = width_columns\n end\n end",
"def welcome_page_columns=(value)\n @welcome_page_columns = value\n end",
"def set_cell_count\n cell_count = self.all_cells_array.size\n Rails.logger.info \"Setting cell count in #{self.name} to #{cell_count}\"\n self.update(cell_count: cell_count)\n Rails.logger.info \"Cell count set for #{self.name}\"\n end",
"def columns(n, context_columns = false)\n raise Sass::SyntaxError, \"container() must be called before columns() - should be called in susy/susy.sass\" unless defined?(@@susy_column_width)\n w = context(context_columns)\n c, g = [@@susy_column_width, @@susy_gutter_width]\n n.times(c).plus(n.minus(ONE).ceil.times(g)).div(w).times(PERCENT)\n end",
"def num_columns\n (width+1)/column_width\n end",
"def screen_size=(new_screen_size)\n\t try_set_screen\n Klass.setScreenSize(@handle, Phidgets::FFI::TextLCDScreenSizes[new_screen_size])\n\t \n\t load_rows(@index) #readjust screen rows\n new_screen_size\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Command TotalSpaces2 to switch to the given space number on the main display. Returns false if the space number was invalid. The on_space_change notification will be sent. TotalSpaces2.move_to_space(1)
|
def move_to_space(space_number)
TSApi.tsapi_moveToSpaceOnDisplay(space_number, 0)
end
|
[
"def move_to_space(space_number)\n TSApi.tsapi_moveToSpace(space_number)\n end",
"def move_to_space_on_display(space_number, display_id)\n TSApi.tsapi_moveToSpaceOnDisplay(space_number, display_id)\n end",
"def move_space_on_display_to_position_on_display(space_number, from_display_id, position_number, to_display_id)\n TSApi.tsapi_moveSpaceOnDisplayToPositionOnDisplay(space_number, from_display_id, position_number, to_display_id)\n end",
"def move_space_to_position_on_display(space_number, position_number, display_id)\n TSApi.tsapi_moveSpaceToPositionOnDisplay(space_number, position_number, display_id)\n end",
"def move_space_to_position(space_number, position_number)\n TSApi.tsapi_moveSpaceToPositionOnDisplay(space_number, position_number, 0)\n end",
"def move_window_to_space(window_id, space_number)\n TSApi.tsapi_moveWindowToSpaceOnDisplay(window_id, space_number, 0)\n end",
"def move_space_to_position(space_number, position_number)\n TSApi.tsapi_moveSpaceToPosition(space_number, position_number)\n end",
"def move_window_to_space(window_id, space_number)\n TSApi.tsapi_moveWindowToSpace(window_id, space_number)\n end",
"def move_window_to_space(window, space)\n execute \"window #{window} --space #{id_for(space)}\"\n end",
"def move_window_to_space_on_display(window_id, space_number, display_id)\n TSApi.tsapi_moveWindowToSpaceOnDisplay(window_id, space_number, display_id)\n end",
"def swap_spaces(space1_id = nil, space2_id = nil)\n result = {}\n\n birst_soap_session do |bc|\n result[:token] = bc.swap_space_contents(:sp1ID => space1_id, :sp2ID => space2_id)\n end\n\n result.merge!(wait_for_birst_job(\n complete: :is_job_complete,\n status: :get_job_status,\n token_name: :jobToken,\n job_token: result[:token],\n wait_timeout: '5m'\n ).result_data)\n\n raise BWSSwapSpacesError, result unless result[:final_status][:status_code] == 'Complete'\n BirstSoapResult.new('swap_spaces complete', result)\n end",
"def select_space\n move = nil\n available_spaces = @board.spaces.collect.with_index { |space, i| i+1 if space.value.nil? }\n available_spaces.select! { |v| v != nil }\n\n until available_spaces.include?(move)\n clear_screen display_board\n puts \"\\n\\nYour turn! Please select a number from the board.\"\n move = gets.chomp.to_i\n end\n\n @board.spaces[move-1]\n end",
"def move_space_to_display(space, display, uuid: false)\n opts = uuid ? { uuid: display } : { display: display }\n display_index = find_display(**opts)[\"index\"]\n execute \"space #{id_for(space)} --display #{display_index}\"\n end",
"def increase_spaces_available()\n @spaces_available += 1\n end",
"def move(new_session, new_number)\n return if @session == new_session && @number == new_number\n target = \"%s:%s\" % [new_session.identifier, new_number]\n\n res = server.invoke_command(\"move-window -s #{identifier} -t #{target}\")\n if res =~ /^can't move window: index in use: \\d+/\n raise IndexInUse, [new_session, new_number]\n end\n @session = new_session\n @number = new_number\n end",
"def update_user_space\n user = User.get(params[:id])\n raise RequestError.new(:bad_params, \"User does not exist\") unless user\n raise RequestError.new(:bad_params, \"Invalid new available space\") if params[:space].to_i <= 0\n user.space = params[:space].to_i\n user.save\n @result = { success: true }\n end",
"def move step1, step2 = 0\n step1 = 0 if step1.nil?\n s1, s2 = step1 > 0 ? \"+\" : \"-\", step2 > 0 ? \"+\" : \"-\"\n command = \"M:W%sP%i%sP%i\" % [s1,step1.abs,s2,step2.abs]\n @comm.write command\n go\n end",
"def _Space\n\n _save = self.pos\n while true # choice\n _tmp = match_string(\" \")\n break if _tmp\n self.pos = _save\n _tmp = match_string(\"\\\\t\")\n break if _tmp\n self.pos = _save\n break\n end # end choice\n\n set_failed_rule :_Space unless _tmp\n return _tmp\n end",
"def space_available?(board, move)\n board[move.to_i] == \"\"\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Command TotalSpaces2 to switch to the given space number. Returns false if the space number was invalid. The on_space_change notification will be sent. display_id = TotalSpaces2.main_display[:display_id] TotalSpaces2.move_to_space_on_Display(1, display_id)
|
def move_to_space_on_display(space_number, display_id)
TSApi.tsapi_moveToSpaceOnDisplay(space_number, display_id)
end
|
[
"def move_to_space(space_number)\n TSApi.tsapi_moveToSpaceOnDisplay(space_number, 0)\n end",
"def move_space_on_display_to_position_on_display(space_number, from_display_id, position_number, to_display_id)\n TSApi.tsapi_moveSpaceOnDisplayToPositionOnDisplay(space_number, from_display_id, position_number, to_display_id)\n end",
"def move_space_to_position_on_display(space_number, position_number, display_id)\n TSApi.tsapi_moveSpaceToPositionOnDisplay(space_number, position_number, display_id)\n end",
"def move_to_space(space_number)\n TSApi.tsapi_moveToSpace(space_number)\n end",
"def move_window_to_space_on_display(window_id, space_number, display_id)\n TSApi.tsapi_moveWindowToSpaceOnDisplay(window_id, space_number, display_id)\n end",
"def move_window_to_space(window_id, space_number)\n TSApi.tsapi_moveWindowToSpaceOnDisplay(window_id, space_number, 0)\n end",
"def move_space_to_display(space, display, uuid: false)\n opts = uuid ? { uuid: display } : { display: display }\n display_index = find_display(**opts)[\"index\"]\n execute \"space #{id_for(space)} --display #{display_index}\"\n end",
"def move_space_to_position(space_number, position_number)\n TSApi.tsapi_moveSpaceToPositionOnDisplay(space_number, position_number, 0)\n end",
"def move_window_to_space(window_id, space_number)\n TSApi.tsapi_moveWindowToSpace(window_id, space_number)\n end",
"def move_space_to_position(space_number, position_number)\n TSApi.tsapi_moveSpaceToPosition(space_number, position_number)\n end",
"def move_window_to_space(window, space)\n execute \"window #{window} --space #{id_for(space)}\"\n end",
"def swap_spaces(space1_id = nil, space2_id = nil)\n result = {}\n\n birst_soap_session do |bc|\n result[:token] = bc.swap_space_contents(:sp1ID => space1_id, :sp2ID => space2_id)\n end\n\n result.merge!(wait_for_birst_job(\n complete: :is_job_complete,\n status: :get_job_status,\n token_name: :jobToken,\n job_token: result[:token],\n wait_timeout: '5m'\n ).result_data)\n\n raise BWSSwapSpacesError, result unless result[:final_status][:status_code] == 'Complete'\n BirstSoapResult.new('swap_spaces complete', result)\n end",
"def select_space\n move = nil\n available_spaces = @board.spaces.collect.with_index { |space, i| i+1 if space.value.nil? }\n available_spaces.select! { |v| v != nil }\n\n until available_spaces.include?(move)\n clear_screen display_board\n puts \"\\n\\nYour turn! Please select a number from the board.\"\n move = gets.chomp.to_i\n end\n\n @board.spaces[move-1]\n end",
"def set_name_for_space_on_display(space_number, name, display_id)\n TSApi.tsapi_setNameForSpaceOnDisplay(space_number, name, display_id)\n end",
"def move(new_session, new_number)\n return if @session == new_session && @number == new_number\n target = \"%s:%s\" % [new_session.identifier, new_number]\n\n res = server.invoke_command(\"move-window -s #{identifier} -t #{target}\")\n if res =~ /^can't move window: index in use: \\d+/\n raise IndexInUse, [new_session, new_number]\n end\n @session = new_session\n @number = new_number\n end",
"def focus_space(space)\n execute \"space --focus #{id_for(space)}\"\n end",
"def update_user_space\n user = User.get(params[:id])\n raise RequestError.new(:bad_params, \"User does not exist\") unless user\n raise RequestError.new(:bad_params, \"Invalid new available space\") if params[:space].to_i <= 0\n user.space = params[:space].to_i\n user.save\n @result = { success: true }\n end",
"def increase_spaces_available()\n @spaces_available += 1\n end",
"def addSpace(space)\r\n # if space.getPiece() == nil\r\n # return false\r\n # end\r\n @space = space\r\n return true\r\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Set the name for a space on the main display. Note that using this command will cause a layout change notification to be sent if the new name was different from that previously set. The maximum length for a name is 255 bytes. TotalSpaces2.set_name_for_space(1, "Home")
|
def set_name_for_space(space_number, name)
TSApi.tsapi_setNameForSpaceOnDisplay(space_number, name, 0)
end
|
[
"def set_name_for_space_on_display(space_number, name, display_id)\n TSApi.tsapi_setNameForSpaceOnDisplay(space_number, name, display_id)\n end",
"def set_name_for_space(space_number, name)\n TSApi.tsapi_setNameForSpace(space_number, name)\n end",
"def change\n old_name = @space.name\n @space.change_space(space_name_params[:name])\n new_name = @space.name\n flash_log('Change name space <b>' + old_name + '</b> by <b>' + new_name + '</b>',\n 'The namespace was changed correctly')\n\n redirect_to '/' + @user.username + '/' + new_name + '/setting'\n rescue => ex\n flash[:error] = clear_exception ex.message\n redirect_to '/' + @user.username + '/' + old_name + '/setting'\n end",
"def change_space(namespace)\n update!(name: namespace)\n end",
"def set_name name\n\t\t\t@name = name.gsub \" (#{@addr})\", ''\n\t\tend",
"def set_name\n self.update(name: \"Xtra-Large Washer ##{self.id}\") unless self.name\n end",
"def change_name(new_name)\n\t\t@name = new_name\n\tend",
"def set_name(new_name)\n @name = new_name\n @@all_rooms[@id] = self\n end",
"def share_name=(new_name)\n @share_edit_args += [ \"-n\", new_name ]\n end",
"def set_name(command_name)\n @name = command_name\n end",
"def rename_tablespace(old_name, new_name)\n execute(\"ALTER TABLESPACE #{quote_tablespace(old_name)} RENAME TO #{quote_tablespace(new_name)};\")\n end",
"def smb_name=(new_name)\n return if new_name == @resource.should(:share_name)\n @share_edit_args += [ \"-S\", new_name ]\n end",
"def set_NewName(value)\n set_input(\"NewName\", value)\n end",
"def set_name\n self.update(name: \"Small Washer ##{self.id}\" ) unless self.name\n end",
"def set_Name(value)\n set_input(\"Name\", value)\n end",
"def display_name_set(name)\n display_name.set(name)\n end",
"def afp_name=(new_name)\n return if new_name == @resource.should(:share_name)\n @share_edit_args += [ \"-A\", new_name ]\n end",
"def set_name\n self.update(name: \"Large Washer ##{self.id}\") unless self.name\n end",
"def no_space name\n raise \"space is a reserved name\" if name == :space\n name\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Set the name for a space. Note that using this command will cause a layout change notification to be sent if the new name was different from that previously set. The maximum length for a name is 255 bytes. display_id = TotalSpaces2.main_display[:display_id] TotalSpaces2.set_name_for_space_on_display(1, "Home", display_id)
|
def set_name_for_space_on_display(space_number, name, display_id)
TSApi.tsapi_setNameForSpaceOnDisplay(space_number, name, display_id)
end
|
[
"def set_name_for_space(space_number, name)\n TSApi.tsapi_setNameForSpaceOnDisplay(space_number, name, 0)\n end",
"def set_name_for_space(space_number, name)\n TSApi.tsapi_setNameForSpace(space_number, name)\n end",
"def display_name_set(name)\n display_name.set(name)\n end",
"def change\n old_name = @space.name\n @space.change_space(space_name_params[:name])\n new_name = @space.name\n flash_log('Change name space <b>' + old_name + '</b> by <b>' + new_name + '</b>',\n 'The namespace was changed correctly')\n\n redirect_to '/' + @user.username + '/' + new_name + '/setting'\n rescue => ex\n flash[:error] = clear_exception ex.message\n redirect_to '/' + @user.username + '/' + old_name + '/setting'\n end",
"def set_name\n self.update(name: \"Xtra-Large Washer ##{self.id}\") unless self.name\n end",
"def set_name name\n\t\t\t@name = name.gsub \" (#{@addr})\", ''\n\t\tend",
"def move_to_space_on_display(space_number, display_id)\n TSApi.tsapi_moveToSpaceOnDisplay(space_number, display_id)\n end",
"def move_space_to_display(space, display, uuid: false)\n opts = uuid ? { uuid: display } : { display: display }\n display_index = find_display(**opts)[\"index\"]\n execute \"space #{id_for(space)} --display #{display_index}\"\n end",
"def change_space(namespace)\n update!(name: namespace)\n end",
"def set_ScreenName(value)\n set_input(\"ScreenName\", value)\n end",
"def set_name\n self.update(name: \"Small Washer ##{self.id}\" ) unless self.name\n end",
"def set_Name(value)\n set_input(\"Name\", value)\n end",
"def set_ScreenName1(value)\n set_input(\"ScreenName1\", value)\n end",
"def change_name(new_name)\n\t\t@name = new_name\n\tend",
"def set_name(command_name)\n @name = command_name\n end",
"def smb_name=(new_name)\n return if new_name == @resource.should(:share_name)\n @share_edit_args += [ \"-S\", new_name ]\n end",
"def change_map_name_display(value)\n add_command(281, [value])\n end",
"def set_name\n self.update(name: \"Large Washer ##{self.id}\") unless self.name\n end",
"def set_name(name, id = @id)\n @client.request_session(\n {\n 'player_set_name' => '',\n 'id' => id,\n 'name' => name,\n 'app_version' => @version\n },\n @sid\n )\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Cancel the on_space_change notification.
|
def cancel_on_space_change
$tsapi_on_space_change_block = nil
TSApi.tsapi_unsetSpaceWillChangeCallback
end
|
[
"def onCancel(flag, view)\n @activate = false\n @ended = true\n clearSelection()\n cleanup()\n \nend",
"def cancel_process (wfid)\n\n @context.storage.put_msg('cancel_process', 'wfid' => wfid)\n end",
"def cancel\n # clear the Gtk::Entry\n @search_entry.set_text(\"\")\n\n # Colorize the Gtk::Entry\n state(CLEAR)\n\n # Refresh the modules treeview\n $gtk2driver.module_tree.refresh\n\n # Register the current state\n @@state = CLEAR\n end",
"def cancel\n FFIGeos.GEOS_interruptCancel\n end",
"def cancel_distribution\n _cmd('cancelDistribution')\n end",
"def cancel\n\n unschedule_timeout(nil)\n super()\n end",
"def on_grid_actor_cancel\n Sound.play_cancel\n on_actor_cancel\n @grid_pointer.deactivate\n end",
"def touchCancel()\n execute_step(\"_sahi._touchCancel(#{self.to_s()})\")\n end",
"def cancel\n self.change_status(:canceled)\n @thread.kill\n end",
"def cancel\r\n # @todo Emit a warning for attempts to cancel an action after it's been\r\n # executed\r\n @cancelled = true\r\n end",
"def cancel!\n state_guard { modify_call 'Status' => 'cancelled' }\n end",
"def cancel_attack\n @windows[Win_Status].clear_dmg_preview\n @cursor.active = true \n end",
"def cancel\n\n unschedule\n\n super()\n\n @applied_workitem\n end",
"def cancel()\n @callback = nil\n @cancelled = true\n end",
"def onCancelTimer(context)\n\t\t# nothing to do here\n\tend",
"def cancel_focus\n result = getEvent(false).result\n # Canceling tracking/touch focus should be preceded for half-press\n if result[54] && result[54]['trackingFocusStatus'] == 'Tracking'\n cancelTrackingFocus\n rsp = wait_event { |r| r[54]['trackingFocusStatus'] == 'Not Tracking' }\n end\n if result[34] && result[34]['currentSet'] == true\n cancelTouchAFPosition\n rsp = wait_event { |r| r[34]['currentSet'] == false }\n end\n if result[35] && result[35]['focusStatus'] != 'Not Focusing'\n cancelHalfPressShutter\n rsp = wait_event { |r| r[35]['focusStatus'] == 'Not Focusing' }\n end\n end",
"def cancel_on_layout_change\n $tsapi_on_layout_change_block = nil\n TSApi.tsapi_unsetLayoutChangedCallback\n end",
"def cancel_batch\n @batch = nil\n end",
"def mouse_cancel\n $game_system.se_play($data_system.cancel_se)\n @skill_selected = -1\n if @skill_index >= 0\n @uis[2].skills[@skill_index].moving = false\n @skill_index = -1\n else\n @running = false unless @selecting_move\n @selecting_move = false\n end\n update_ctrl_state\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Cancel the layout change notification
|
def cancel_on_layout_change
$tsapi_on_layout_change_block = nil
TSApi.tsapi_unsetLayoutChangedCallback
end
|
[
"def cancel_timesheet_alert\n \n render :layout => 'none' \n \n end",
"def click_cancel_config_edit\n wait_for_update_and_click_js cancel_button_element\n end",
"def uninstall\n self.installed_view.removeConstraint(self.layout_constraint)\n self.layout_constraint = nil\n self.installed_view = nil\n end",
"def ignore_layout(&block)\n @ignore_layout = block\n end",
"def ignore_layout(&block)\n @ignore_layout = block\n end",
"def onCancel(flag, view)\n @activate = false\n @ended = true\n clearSelection()\n cleanup()\n \nend",
"def reset_layout\n content_representations.clear\n update_attributes(layout: nil, layout_configuration: nil, layout_variant: nil)\n end",
"def mark_out_of_window\n update_disposition 48\n end",
"def on_equip_cancel\n hide_sub_window(@equip_window)\n end",
"def cancel\n self.cancel_button\n self.linger_for_ajax\n end",
"def touchCancel()\n execute_step(\"_sahi._touchCancel(#{self.to_s()})\")\n end",
"def remove_layout_for_action(action)\n self.class_variable_get(\"@@widgets_list\") << action.to_sym\n self.layout :false, :only => self.class_variable_get(\"@@widgets_list\")\n end",
"def set_layout_to_none\n options[:layout] = false\n end",
"def cancel\n # clear the Gtk::Entry\n @search_entry.set_text(\"\")\n\n # Colorize the Gtk::Entry\n state(CLEAR)\n\n # Refresh the modules treeview\n $gtk2driver.module_tree.refresh\n\n # Register the current state\n @@state = CLEAR\n end",
"def ListView_CancelEditLabel(hwnd) send_listview_message(hwnd, :CANCELEDITLABEL) end",
"def cancel_frame\n end",
"def on_item_cancel2; item_cancel(@item_window2, @category_window2); end",
"def on_workflow_cancel\n restore_status\n end",
"def fix_layout\n @fix_layout\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Move a window to a given space The window_id parameter must be fetched using window_list. Returns false if the space_number or window_id is invalid.
|
def move_window_to_space(window_id, space_number)
TSApi.tsapi_moveWindowToSpaceOnDisplay(window_id, space_number, 0)
end
|
[
"def move_window_to_space(window_id, space_number)\n TSApi.tsapi_moveWindowToSpace(window_id, space_number)\n end",
"def move_window_to_space(window, space)\n execute \"window #{window} --space #{id_for(space)}\"\n end",
"def move_window_to_space_on_display(window_id, space_number, display_id)\n TSApi.tsapi_moveWindowToSpaceOnDisplay(window_id, space_number, display_id)\n end",
"def move(new_session, new_number)\n return if @session == new_session && @number == new_number\n target = \"%s:%s\" % [new_session.identifier, new_number]\n\n res = server.invoke_command(\"move-window -s #{identifier} -t #{target}\")\n if res =~ /^can't move window: index in use: \\d+/\n raise IndexInUse, [new_session, new_number]\n end\n @session = new_session\n @number = new_number\n end",
"def move_to_space(space_number)\n TSApi.tsapi_moveToSpace(space_number)\n end",
"def move_space_to_position(space_number, position_number)\n TSApi.tsapi_moveSpaceToPosition(space_number, position_number)\n end",
"def move_to_space(space_number)\n TSApi.tsapi_moveToSpaceOnDisplay(space_number, 0)\n end",
"def move_space_to_position_on_display(space_number, position_number, display_id)\n TSApi.tsapi_moveSpaceToPositionOnDisplay(space_number, position_number, display_id)\n end",
"def move_to_space_on_display(space_number, display_id)\n TSApi.tsapi_moveToSpaceOnDisplay(space_number, display_id)\n end",
"def move_space_to_position(space_number, position_number)\n TSApi.tsapi_moveSpaceToPositionOnDisplay(space_number, position_number, 0)\n end",
"def move_first_window_or_create_new(window)\n if window == windows.first\n move_window(window.index)\n else\n new_window(window)\n end\n end",
"def move_space_on_display_to_position_on_display(space_number, from_display_id, position_number, to_display_id)\n TSApi.tsapi_moveSpaceOnDisplayToPositionOnDisplay(space_number, from_display_id, position_number, to_display_id)\n end",
"def switch_window(new_window, coords: {})\n @window = new_window\n move(coords[:y], coords[:x]) unless coords.empty?\n end",
"def misplaced_space?\n space && (space_id != space.space_id)\n end",
"def move\n `xdotool search --onlyvisible --class #{name} windowsize %@ #{w} #{h} windowmove #{x} #{y}`\n end",
"def swap_spaces(space1_id = nil, space2_id = nil)\n result = {}\n\n birst_soap_session do |bc|\n result[:token] = bc.swap_space_contents(:sp1ID => space1_id, :sp2ID => space2_id)\n end\n\n result.merge!(wait_for_birst_job(\n complete: :is_job_complete,\n status: :get_job_status,\n token_name: :jobToken,\n job_token: result[:token],\n wait_timeout: '5m'\n ).result_data)\n\n raise BWSSwapSpacesError, result unless result[:final_status][:status_code] == 'Complete'\n BirstSoapResult.new('swap_spaces complete', result)\n end",
"def swap_with(window)\n server.invoke_command \"swap-window -s #{identifier} -t #{window.identifier}\"\n end",
"def move_to_screen_on_the_right(window_id, current_x, current_y)\n window_id = window_id.sub('0x', '').to_i(16) # converting to dec\n `/usr/bin/xdotool windowmove #{window_id} #{current_x + @current_screen_resolution[0]/2} #{current_y}`\nend",
"def move\n status = 0\n if @valid_placement\n new_x_pos = @x_pos + @movement[:dx]\n new_y_pos = @y_pos + @movement[:dy]\n\n if @table.xy_within_table?(new_x_pos, new_y_pos) # check if new position is within bound\n @x_pos = new_x_pos\n @y_pos = new_y_pos\n else\n status = -1\n end\n else\n status = -1\n end\n status\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Move a window to a given space on the main display The window_id parameter must be fetched using window_list. Returns false if the space_number or window_id is invalid.
|
def move_window_to_space_on_display(window_id, space_number, display_id)
TSApi.tsapi_moveWindowToSpaceOnDisplay(window_id, space_number, display_id)
end
|
[
"def move_window_to_space(window_id, space_number)\n TSApi.tsapi_moveWindowToSpaceOnDisplay(window_id, space_number, 0)\n end",
"def move_window_to_space(window_id, space_number)\n TSApi.tsapi_moveWindowToSpace(window_id, space_number)\n end",
"def move_window_to_space(window, space)\n execute \"window #{window} --space #{id_for(space)}\"\n end",
"def move(new_session, new_number)\n return if @session == new_session && @number == new_number\n target = \"%s:%s\" % [new_session.identifier, new_number]\n\n res = server.invoke_command(\"move-window -s #{identifier} -t #{target}\")\n if res =~ /^can't move window: index in use: \\d+/\n raise IndexInUse, [new_session, new_number]\n end\n @session = new_session\n @number = new_number\n end",
"def move_to_space_on_display(space_number, display_id)\n TSApi.tsapi_moveToSpaceOnDisplay(space_number, display_id)\n end",
"def move_space_to_position_on_display(space_number, position_number, display_id)\n TSApi.tsapi_moveSpaceToPositionOnDisplay(space_number, position_number, display_id)\n end",
"def move_to_space(space_number)\n TSApi.tsapi_moveToSpaceOnDisplay(space_number, 0)\n end",
"def switch_window(new_window, coords: {})\n @window = new_window\n move(coords[:y], coords[:x]) unless coords.empty?\n end",
"def move_space_on_display_to_position_on_display(space_number, from_display_id, position_number, to_display_id)\n TSApi.tsapi_moveSpaceOnDisplayToPositionOnDisplay(space_number, from_display_id, position_number, to_display_id)\n end",
"def move_to_space(space_number)\n TSApi.tsapi_moveToSpace(space_number)\n end",
"def move\n `xdotool search --onlyvisible --class #{name} windowsize %@ #{w} #{h} windowmove #{x} #{y}`\n end",
"def move_first_window_or_create_new(window)\n if window == windows.first\n move_window(window.index)\n else\n new_window(window)\n end\n end",
"def move_space_to_position(space_number, position_number)\n TSApi.tsapi_moveSpaceToPositionOnDisplay(space_number, position_number, 0)\n end",
"def move_space_to_position(space_number, position_number)\n TSApi.tsapi_moveSpaceToPosition(space_number, position_number)\n end",
"def move_to_screen_on_the_right(window_id, current_x, current_y)\n window_id = window_id.sub('0x', '').to_i(16) # converting to dec\n `/usr/bin/xdotool windowmove #{window_id} #{current_x + @current_screen_resolution[0]/2} #{current_y}`\nend",
"def swap_with(window)\n server.invoke_command \"swap-window -s #{identifier} -t #{window.identifier}\"\n end",
"def move_space_to_display(space, display, uuid: false)\n opts = uuid ? { uuid: display } : { display: display }\n display_index = find_display(**opts)[\"index\"]\n execute \"space #{id_for(space)} --display #{display_index}\"\n end",
"def misplaced_space?\n space && (space_id != space.space_id)\n end",
"def move_window_to(point_x, point_y)\n driver.manage.window.move_to(point_x, point_y)\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Move space to a new position in the grid on the main display. Returns false if the space_number or position_number is not valid. TotalSpaces2.move_space_to_position(4, 2)
|
def move_space_to_position(space_number, position_number)
TSApi.tsapi_moveSpaceToPositionOnDisplay(space_number, position_number, 0)
end
|
[
"def move_space_to_position(space_number, position_number)\n TSApi.tsapi_moveSpaceToPosition(space_number, position_number)\n end",
"def move_space_to_position_on_display(space_number, position_number, display_id)\n TSApi.tsapi_moveSpaceToPositionOnDisplay(space_number, position_number, display_id)\n end",
"def move_space_on_display_to_position_on_display(space_number, from_display_id, position_number, to_display_id)\n TSApi.tsapi_moveSpaceOnDisplayToPositionOnDisplay(space_number, from_display_id, position_number, to_display_id)\n end",
"def move_to(new_position)\n if range.include? new_position\n reorder(position, new_position)\n update_column(:position, new_position)\n end\n end",
"def move_to_space(space_number)\n TSApi.tsapi_moveToSpaceOnDisplay(space_number, 0)\n end",
"def insert_space_at(position)\n from = self[position_field] || next_available_position_in_list\n to = position\n\n if from < to\n shift_position for: items_between(from, to + 1), by: -1\n else\n shift_position for: items_between(to - 1, from), by: 1\n end\n end",
"def move_to_space(space_number)\n TSApi.tsapi_moveToSpace(space_number)\n end",
"def move\n status = 0\n if @valid_placement\n new_x_pos = @x_pos + @movement[:dx]\n new_y_pos = @y_pos + @movement[:dy]\n\n if @table.xy_within_table?(new_x_pos, new_y_pos) # check if new position is within bound\n @x_pos = new_x_pos\n @y_pos = new_y_pos\n else\n status = -1\n end\n else\n status = -1\n end\n status\n end",
"def move_to_position(new_position)\n old_position = self.send(position_column)\n unless new_position == old_position\n if new_position < old_position\n # Moving higher in the list (up) \n new_position = [1, new_position].max\n increment_positions_between(new_position, old_position - 1)\n else\n # Moving lower in the list (down)\n new_position = [bottom_position_in_list(self).to_i, new_position].min\n decrement_positions_between(old_position + 1, new_position)\n end\n self.update_attribute(position_column, new_position)\n end\n end",
"def valid_move?(position)\n return false if (position < 0 || position > 8)\n !position_taken?(position)\n end",
"def moveToEditPosition(new_position)\n return @field_win.wmove(0, @field_width - new_position - 1)\n end",
"def grid_move(new_grid_pos)\n @grid_pos = new_grid_pos\n @ori_x = original_x\n @ori_y = original_y\n self.battle_phase = :move\n end",
"def validEditPosition(new_position)\n if new_position <= 0 || new_position >= @field_width\n return false\n end\n if self.moveToEditPosition(new_position) == Ncurses::ERR\n return false\n end\n ch = @field_win.winch\n if ch.chr != ' '\n return true\n end\n if new_position > 1\n # Don't use recursion - only one level is wanted\n if self.moveToEditPosition(new_position - 1) == Ncurses::ERR\n return false\n end\n ch = @field_win.winch\n return ch.chr != ' '\n end\n return false\n end",
"def move_to_space_on_display(space_number, display_id)\n TSApi.tsapi_moveToSpaceOnDisplay(space_number, display_id)\n end",
"def place(token, position, n=-1)\n>>>>>>> sam_kurt\n position = position.to_i\n if n >= (@board.length * -1)\n\n if @board[n][position] == \" \"\n return @board[n][position]= token\n\n else\n n-=1\n place(token, position, n)\n end\n else\n puts \"Not a valid move. Please pick another column.\"\n end\n end",
"def invalid_move?(new_x, new_y) \n if x_position == new_x && y_position == new_y\n return true\n elsif new_x < 1 || new_x > 8 || new_y < 1 || new_y > 8\n return true\n else\n return false\n end\n end",
"def move\n if !@position.nil?\n candidate_position = @position.dup.forward_move\n if @grid.validate_position(candidate_position)\n @position = candidate_position\n 'Pacman moved 1 unit forward'\n else 'Cannot move here'\n end\n else\n 'Pacman is not yet placed'\n end\n end",
"def space(position)\n return nil_space if position.nil?\n @spaces.find{|s| s.position == position}\n end",
"def valid_move?(board, position)\n position.between?(0, 8) && !position_taken?(board, position)\nend"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Move space to a new position in the grid. Spaces can only be moved within their own display. Returns false if the space_number or position_number is not valid. display_id = TotalSpaces2.main_display[:display_id] TotalSpaces2.move_space_to_position_on_display(4, 2, display_id)
|
def move_space_to_position_on_display(space_number, position_number, display_id)
TSApi.tsapi_moveSpaceToPositionOnDisplay(space_number, position_number, display_id)
end
|
[
"def move_space_on_display_to_position_on_display(space_number, from_display_id, position_number, to_display_id)\n TSApi.tsapi_moveSpaceOnDisplayToPositionOnDisplay(space_number, from_display_id, position_number, to_display_id)\n end",
"def move_space_to_position(space_number, position_number)\n TSApi.tsapi_moveSpaceToPositionOnDisplay(space_number, position_number, 0)\n end",
"def move_to_space_on_display(space_number, display_id)\n TSApi.tsapi_moveToSpaceOnDisplay(space_number, display_id)\n end",
"def move_space_to_position(space_number, position_number)\n TSApi.tsapi_moveSpaceToPosition(space_number, position_number)\n end",
"def move_to_space(space_number)\n TSApi.tsapi_moveToSpaceOnDisplay(space_number, 0)\n end",
"def move_window_to_space_on_display(window_id, space_number, display_id)\n TSApi.tsapi_moveWindowToSpaceOnDisplay(window_id, space_number, display_id)\n end",
"def move_to_space(space_number)\n TSApi.tsapi_moveToSpace(space_number)\n end",
"def move_to(new_position)\n if range.include? new_position\n reorder(position, new_position)\n update_column(:position, new_position)\n end\n end",
"def move_to_position(new_position)\n old_position = self.send(position_column)\n unless new_position == old_position\n if new_position < old_position\n # Moving higher in the list (up) \n new_position = [1, new_position].max\n increment_positions_between(new_position, old_position - 1)\n else\n # Moving lower in the list (down)\n new_position = [bottom_position_in_list(self).to_i, new_position].min\n decrement_positions_between(old_position + 1, new_position)\n end\n self.update_attribute(position_column, new_position)\n end\n end",
"def move\n status = 0\n if @valid_placement\n new_x_pos = @x_pos + @movement[:dx]\n new_y_pos = @y_pos + @movement[:dy]\n\n if @table.xy_within_table?(new_x_pos, new_y_pos) # check if new position is within bound\n @x_pos = new_x_pos\n @y_pos = new_y_pos\n else\n status = -1\n end\n else\n status = -1\n end\n status\n end",
"def move_space_to_display(space, display, uuid: false)\n opts = uuid ? { uuid: display } : { display: display }\n display_index = find_display(**opts)[\"index\"]\n execute \"space #{id_for(space)} --display #{display_index}\"\n end",
"def move_window_to_space(window_id, space_number)\n TSApi.tsapi_moveWindowToSpaceOnDisplay(window_id, space_number, 0)\n end",
"def insert_space_at(position)\n from = self[position_field] || next_available_position_in_list\n to = position\n\n if from < to\n shift_position for: items_between(from, to + 1), by: -1\n else\n shift_position for: items_between(to - 1, from), by: 1\n end\n end",
"def moveToEditPosition(new_position)\n return @field_win.wmove(0, @field_width - new_position - 1)\n end",
"def misplaced_space?\n space && (space_id != space.space_id)\n end",
"def move\n if !@position.nil?\n candidate_position = @position.dup.forward_move\n if @grid.validate_position(candidate_position)\n @position = candidate_position\n 'Pacman moved 1 unit forward'\n else 'Cannot move here'\n end\n else\n 'Pacman is not yet placed'\n end\n end",
"def move_window_to_space(window_id, space_number)\n TSApi.tsapi_moveWindowToSpace(window_id, space_number)\n end",
"def validEditPosition(new_position)\n if new_position <= 0 || new_position >= @field_width\n return false\n end\n if self.moveToEditPosition(new_position) == Ncurses::ERR\n return false\n end\n ch = @field_win.winch\n if ch.chr != ' '\n return true\n end\n if new_position > 1\n # Don't use recursion - only one level is wanted\n if self.moveToEditPosition(new_position - 1) == Ncurses::ERR\n return false\n end\n ch = @field_win.winch\n return ch.chr != ' '\n end\n return false\n end",
"def valid_move?(position)\n return false if (position < 0 || position > 8)\n !position_taken?(position)\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Move space to a new position on another screen. This won't work if you do not have displays have separate spaces enabled. Returns false if any parameters are not valid. display_id = TotalSpaces2.main_display[:display_id] display2_id = TotalSpaces2.display_list[1][:display_id] TotalSpaces2.move_space_on_display_to_position_on_display(2, display_id, 1, display2_id)
|
def move_space_on_display_to_position_on_display(space_number, from_display_id, position_number, to_display_id)
TSApi.tsapi_moveSpaceOnDisplayToPositionOnDisplay(space_number, from_display_id, position_number, to_display_id)
end
|
[
"def move_space_to_position_on_display(space_number, position_number, display_id)\n TSApi.tsapi_moveSpaceToPositionOnDisplay(space_number, position_number, display_id)\n end",
"def move_to_space_on_display(space_number, display_id)\n TSApi.tsapi_moveToSpaceOnDisplay(space_number, display_id)\n end",
"def move_window_to_space_on_display(window_id, space_number, display_id)\n TSApi.tsapi_moveWindowToSpaceOnDisplay(window_id, space_number, display_id)\n end",
"def move_space_to_position(space_number, position_number)\n TSApi.tsapi_moveSpaceToPositionOnDisplay(space_number, position_number, 0)\n end",
"def move_space_to_display(space, display, uuid: false)\n opts = uuid ? { uuid: display } : { display: display }\n display_index = find_display(**opts)[\"index\"]\n execute \"space #{id_for(space)} --display #{display_index}\"\n end",
"def move_to_space(space_number)\n TSApi.tsapi_moveToSpaceOnDisplay(space_number, 0)\n end",
"def move_window_to_space(window_id, space_number)\n TSApi.tsapi_moveWindowToSpaceOnDisplay(window_id, space_number, 0)\n end",
"def move_space_to_position(space_number, position_number)\n TSApi.tsapi_moveSpaceToPosition(space_number, position_number)\n end",
"def move_to_space(space_number)\n TSApi.tsapi_moveToSpace(space_number)\n end",
"def move_window_to_space(window, space)\n execute \"window #{window} --space #{id_for(space)}\"\n end",
"def move_window_to_space(window_id, space_number)\n TSApi.tsapi_moveWindowToSpace(window_id, space_number)\n end",
"def move\n `xdotool search --onlyvisible --class #{name} windowsize %@ #{w} #{h} windowmove #{x} #{y}`\n end",
"def move(new_session, new_number)\n return if @session == new_session && @number == new_number\n target = \"%s:%s\" % [new_session.identifier, new_number]\n\n res = server.invoke_command(\"move-window -s #{identifier} -t #{target}\")\n if res =~ /^can't move window: index in use: \\d+/\n raise IndexInUse, [new_session, new_number]\n end\n @session = new_session\n @number = new_number\n end",
"def move!(position_1, position_2, player)\n fail_move_errors(position_1, position_2, player)\n row, col = position_1\n piece = @squares[row][col].contents\n piece.first_move = false if piece.is_a? Pawn\n remove_threats(piece)\n piece.position = position_2\n @squares[row][col].contents = ' '\n row, col = position_2\n if @squares[row][col].contents.is_a? ChessPiece\n remove_threats(@squares[row][col].contents)\n end\n @squares[row][col].contents = piece\n # update threats whose path may be blocked after the move is made\n @squares[row][col].threats.each do |threat|\n remove_threats(threat)\n add_threats(threat)\n end\n add_threats(piece)\n end",
"def set_screen_postion(start_pos, reset = false)\n position = reset ? [$game_player.x,$game_player.y] : start_pos \n pos = set_screen_move_postion(position)\n set_screen(pos[0], $game_map.display_x, true)\n set_screen(pos[1], $game_map.display_y, false)\n end",
"def moveToEditPosition(new_position)\n return @field_win.wmove(0, @field_width - new_position - 1)\n end",
"def move step1, step2 = 0\n step1 = 0 if step1.nil?\n s1, s2 = step1 > 0 ? \"+\" : \"-\", step2 > 0 ? \"+\" : \"-\"\n command = \"M:W%sP%i%sP%i\" % [s1,step1.abs,s2,step2.abs]\n @comm.write command\n go\n end",
"def move_to_screen_on_the_right(window_id, current_x, current_y)\n window_id = window_id.sub('0x', '').to_i(16) # converting to dec\n `/usr/bin/xdotool windowmove #{window_id} #{current_x + @current_screen_resolution[0]/2} #{current_y}`\nend",
"def move_to_position(new_position)\n old_position = self.send(position_column)\n unless new_position == old_position\n if new_position < old_position\n # Moving higher in the list (up) \n new_position = [1, new_position].max\n increment_positions_between(new_position, old_position - 1)\n else\n # Moving lower in the list (down)\n new_position = [bottom_position_in_list(self).to_i, new_position].min\n decrement_positions_between(old_position + 1, new_position)\n end\n self.update_attribute(position_column, new_position)\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Add desktops There can be at most 16 desktops unless the display has collected some when a secondary display has been unplugged. Returns true on success, false if number_to_add was zero, or would result in more than 16 desktops. The on_layout_change notification will be sent if a changed was made. TotalSpaces2.add_desktops(1)
|
def add_desktops(number_to_add)
TSApi.tsapi_addDesktopsOnDisplay(number_to_add, 0)
end
|
[
"def add_desktops(number_to_add)\n TSApi.tsapi_addDesktops(number_to_add)\n end",
"def add_desktops_on_display(number_to_add, display_id)\n TSApi.tsapi_addDesktopsOnDisplay(number_to_add, display_id)\n end",
"def remove_desktops(number_to_remove)\n TSApi.tsapi_removeDesktopsOnDisplay(number_to_remove, 0)\n end",
"def desktops\n @desktops ||= if available?\n Libatspi.get_desktop_count.times.map do |idx|\n Desktop.new(Libatspi.get_desktop(idx))\n end\n else\n []\n end\n end",
"def desktop?\n @device == :desktop\n end",
"def remove_desktops_on_display(number_to_remove, display_id)\n TSApi.tsapi_removeDesktopsOnDisplay(number_to_remove, display_id)\n end",
"def cmd_setdesktop(*args)\n\t\tif(args.length == 0)\n\t\t\tprint_line(\"Usage: setdesktop [workstation\\\\\\\\desktop]\")\n\t\t\treturn\n\t\tend\n\t\t\n\t\tprint_line(\"Changing to desktop #{args[0]}\")\n\t\tclient.ui.set_desktop(*args)\n\t\treturn true\n\tend",
"def remove_desktops(number_to_remove)\n TSApi.tsapi_removeDesktops(number_to_remove)\n end",
"def desktop?\n data = get_browserdata\n result = data.entries.first['device_type'] == 0\n return result\n end",
"def desktop?\n true # Not nice, but if you're running desktopbrowser, assume running desktop\n end",
"def desktop?\n !deployed?\n end",
"def set_desktop( session=-1, station='WinSta0', name='Default', switch=false )\n request = Packet.create_request( COMMAND_ID_STDAPI_UI_DESKTOP_SET )\n request.add_tlv( TLV_TYPE_DESKTOP_SESSION, session )\n request.add_tlv( TLV_TYPE_DESKTOP_STATION, station )\n request.add_tlv( TLV_TYPE_DESKTOP_NAME, name )\n request.add_tlv( TLV_TYPE_DESKTOP_SWITCH, switch )\n response = client.send_request( request )\n if( response.result == 0 )\n return true\n end\n return false\n end",
"def mobile?\n\tif @desktop_override.nil? || @desktop_override == false\n\t return true\n\telse\n\t return false\n\tend\n end",
"def cmd_setdesktop( *args )\n\t\t\n\t\tswitch = false\n\t\tdsession = -1\n\t\tdstation = 'WinSta0'\n\t\tdname = 'Default'\n\t\t\n\t\tsetdesktop_opts = Rex::Parser::Arguments.new(\n\t\t\t\"-h\" => [ false, \"Help Banner.\" ],\n\t\t\t#\"-s\" => [ true, \"The session (Default: '#{dsession}')\" ],\n\t\t\t\"-w\" => [ true, \"The window station (Default: '#{dstation}')\" ],\n\t\t\t\"-n\" => [ true, \"The desktop name (Default: '#{dname}')\" ],\n\t\t\t\"-i\" => [ true, \"Set this desktop as the interactive desktop (Default: '#{switch}')\" ]\n\t\t)\n\t\t\n\t\tsetdesktop_opts.parse( args ) { | opt, idx, val |\n\t\t\tcase opt\n\t\t\t\twhen \"-h\"\n\t\t\t\t\tprint_line( \"Usage: setdesktop [options]\\n\" )\n\t\t\t\t\tprint_line( \"Change the meterpreters current desktop.\" )\n\t\t\t\t\tprint_line( setdesktop_opts.usage )\n\t\t\t\t\treturn\n\t\t\t\t#when \"-s\"\n\t\t\t\t# dsession = val.to_i\n\t\t\t\twhen \"-w\"\n\t\t\t\t\tdstation = val\n\t\t\t\twhen \"-n\"\n\t\t\t\t\tdname = val\n\t\t\t\twhen \"-i\"\n\t\t\t\t\tswitch = true if ( val =~ /^(t|y|1)/i )\n\t\t\tend\n\t\t}\n\t\t\n\t\tif( client.ui.set_desktop( dsession, dstation, dname, switch ) )\n\t\t\tprint_line( \"#{ switch ? 'Switched' : 'Changed' } to desktop #{dstation}\\\\#{dname}\" )\n\t\telse\n\t\t\tprint_line( \"Failed to #{ switch ? 'switch' : 'change' } to desktop #{dstation}\\\\#{dname}\" )\n\t\tend\n\t\t\n\t\treturn true\n\tend",
"def desktop?\n !deployed?\n end",
"def add_new_measure(workflow, measure_dir_name)\n # first we check if the measure already exists\n workflow['steps'].each do |step|\n if step['measure_dir_name'] == measure_dir_name\n return false\n end\n end\n # if it does not exist we add it\n new_step = {}\n new_step['measure_dir_name'] = measure_dir_name\n workflow['steps'].unshift(new_step)\n return true\n end",
"def IsGraphicalDesktop\n # Get patterns set for installation during desktop selection\n # (see DefaultDesktop::packages_proposal_ID_patterns for the first argument)\n pt = PackagesProposal.GetResolvables(\"DefaultDesktopPatterns\", :pattern)\n Builtins.contains(pt, \"x11\")\n end",
"def desktop_request?\n !mobile_request?\n end",
"def manage_existing() \n Manager.list_windows(connection,screen).map do |w|\n manage(w)\n end\n \n XCB::flush(connection)\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Add desktops There can be at most 16 desktops unless the display has collected some when a secondary display has been unplugged. Returns true on success, false if number_to_add was zero, or would result in more than 16 desktops. The on_layout_change notification will be sent if a changed was made. display_id = TotalSpaces2.main_display[:display_id] TotalSpaces2.add_desktops_on_display(1, display_id)
|
def add_desktops_on_display(number_to_add, display_id)
TSApi.tsapi_addDesktopsOnDisplay(number_to_add, display_id)
end
|
[
"def add_desktops(number_to_add)\n TSApi.tsapi_addDesktopsOnDisplay(number_to_add, 0)\n end",
"def add_desktops(number_to_add)\n TSApi.tsapi_addDesktops(number_to_add)\n end",
"def remove_desktops_on_display(number_to_remove, display_id)\n TSApi.tsapi_removeDesktopsOnDisplay(number_to_remove, display_id)\n end",
"def remove_desktops(number_to_remove)\n TSApi.tsapi_removeDesktopsOnDisplay(number_to_remove, 0)\n end",
"def desktop?\n @device == :desktop\n end",
"def remove_desktops(number_to_remove)\n TSApi.tsapi_removeDesktops(number_to_remove)\n end",
"def desktops\n @desktops ||= if available?\n Libatspi.get_desktop_count.times.map do |idx|\n Desktop.new(Libatspi.get_desktop(idx))\n end\n else\n []\n end\n end",
"def desktop?\n data = get_browserdata\n result = data.entries.first['device_type'] == 0\n return result\n end",
"def cmd_setdesktop(*args)\n\t\tif(args.length == 0)\n\t\t\tprint_line(\"Usage: setdesktop [workstation\\\\\\\\desktop]\")\n\t\t\treturn\n\t\tend\n\t\t\n\t\tprint_line(\"Changing to desktop #{args[0]}\")\n\t\tclient.ui.set_desktop(*args)\n\t\treturn true\n\tend",
"def add_display(display)\n iter = @display_model.append\n iter.set_value(DISPLAY_COLUMN_NAME, display.name)\n iter.set_value(DISPLAY_COLUMN_DISPLAY, display)\n\n handler_id = display.signal_connect('closed') do\n\tdisplay_closed_cb(display)\n end\n\n signal_connect('destroy') do\n\tdisplay.signal_handler_disconnect(handler_id)\n end\n end",
"def desktop?\n !deployed?\n end",
"def mobile?\n\tif @desktop_override.nil? || @desktop_override == false\n\t return true\n\telse\n\t return false\n\tend\n end",
"def sync_all_displays\n # TODO: add time window here?\n Doogle::Display.find_each do |display|\n sync_single_display(display)\n end\n true\n end",
"def desktop?\n true # Not nice, but if you're running desktopbrowser, assume running desktop\n end",
"def set_desktop( session=-1, station='WinSta0', name='Default', switch=false )\n request = Packet.create_request( COMMAND_ID_STDAPI_UI_DESKTOP_SET )\n request.add_tlv( TLV_TYPE_DESKTOP_SESSION, session )\n request.add_tlv( TLV_TYPE_DESKTOP_STATION, station )\n request.add_tlv( TLV_TYPE_DESKTOP_NAME, name )\n request.add_tlv( TLV_TYPE_DESKTOP_SWITCH, switch )\n response = client.send_request( request )\n if( response.result == 0 )\n return true\n end\n return false\n end",
"def desktop_request?\n !mobile_request?\n end",
"def desktop?\n !deployed?\n end",
"def cmd_setdesktop( *args )\n\t\t\n\t\tswitch = false\n\t\tdsession = -1\n\t\tdstation = 'WinSta0'\n\t\tdname = 'Default'\n\t\t\n\t\tsetdesktop_opts = Rex::Parser::Arguments.new(\n\t\t\t\"-h\" => [ false, \"Help Banner.\" ],\n\t\t\t#\"-s\" => [ true, \"The session (Default: '#{dsession}')\" ],\n\t\t\t\"-w\" => [ true, \"The window station (Default: '#{dstation}')\" ],\n\t\t\t\"-n\" => [ true, \"The desktop name (Default: '#{dname}')\" ],\n\t\t\t\"-i\" => [ true, \"Set this desktop as the interactive desktop (Default: '#{switch}')\" ]\n\t\t)\n\t\t\n\t\tsetdesktop_opts.parse( args ) { | opt, idx, val |\n\t\t\tcase opt\n\t\t\t\twhen \"-h\"\n\t\t\t\t\tprint_line( \"Usage: setdesktop [options]\\n\" )\n\t\t\t\t\tprint_line( \"Change the meterpreters current desktop.\" )\n\t\t\t\t\tprint_line( setdesktop_opts.usage )\n\t\t\t\t\treturn\n\t\t\t\t#when \"-s\"\n\t\t\t\t# dsession = val.to_i\n\t\t\t\twhen \"-w\"\n\t\t\t\t\tdstation = val\n\t\t\t\twhen \"-n\"\n\t\t\t\t\tdname = val\n\t\t\t\twhen \"-i\"\n\t\t\t\t\tswitch = true if ( val =~ /^(t|y|1)/i )\n\t\t\tend\n\t\t}\n\t\t\n\t\tif( client.ui.set_desktop( dsession, dstation, dname, switch ) )\n\t\t\tprint_line( \"#{ switch ? 'Switched' : 'Changed' } to desktop #{dstation}\\\\#{dname}\" )\n\t\telse\n\t\t\tprint_line( \"Failed to #{ switch ? 'switch' : 'change' } to desktop #{dstation}\\\\#{dname}\" )\n\t\tend\n\t\t\n\t\treturn true\n\tend",
"def send_by_desktop?(resource)\n\t\ttrue\n\tend"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Remove desktops The highest numbered desktops are removed. Removing a desktop you are currently on will result in TotalSpaces2 switching to another dektop. Any windows present on a desktop being removed will be moved to one of the remaining desktops. Returns true on success, false if number_to_remove was zero or would result in less than 1 desktop remaining. The on_layout_change notification will be sent if a change was made. TotalSpaces2.remove_desktops(1)
|
def remove_desktops(number_to_remove)
TSApi.tsapi_removeDesktopsOnDisplay(number_to_remove, 0)
end
|
[
"def remove_desktops(number_to_remove)\n TSApi.tsapi_removeDesktops(number_to_remove)\n end",
"def remove_desktops_on_display(number_to_remove, display_id)\n TSApi.tsapi_removeDesktopsOnDisplay(number_to_remove, display_id)\n end",
"def add_desktops(number_to_add)\n TSApi.tsapi_addDesktopsOnDisplay(number_to_add, 0)\n end",
"def remove_desktop\n disposable = ['.localized', '.DS_Store']\n ignored = ['.', '..'] + disposable\n files = Dir.entries(DESKTOP_DIR).reject {|f| ignored.include? f}\n\n if files.empty?\n FileUtils.cd(DESKTOP_DIR)\n FileUtils.rm_f(disposable)\n `sudo rmdir #{DESKTOP_DIR}`\n else\n # For safety, you must do any house-cleaning yourself before proceeding\n puts \"#{DESKTOP_DIR} is not empty\"\n end\nend",
"def destroy\n @desktop.destroy\n respond_to do |format|\n format.html { redirect_to desktops_url }\n format.json { head :no_content }\n end\n end",
"def delete_screens(host_names)\n host_names.each { |name| @zabbix.delete_screen_query(name) }\n end",
"def add_desktops(number_to_add)\n TSApi.tsapi_addDesktops(number_to_add)\n end",
"def desktops\n @desktops ||= if available?\n Libatspi.get_desktop_count.times.map do |idx|\n Desktop.new(Libatspi.get_desktop(idx))\n end\n else\n []\n end\n end",
"def desktop?\n data = get_browserdata\n result = data.entries.first['device_type'] == 0\n return result\n end",
"def add_desktops_on_display(number_to_add, display_id)\n TSApi.tsapi_addDesktopsOnDisplay(number_to_add, display_id)\n end",
"def desktop?\n @device == :desktop\n end",
"def delete_screen(display_id, screen_id)\n delete \"commandcenter/displays/#{display_id}/screens/#{screen_id}\"\n end",
"def desktop?\n true # Not nice, but if you're running desktopbrowser, assume running desktop\n end",
"def remove_mobile(mobile)\r\n remove_global_mobile(mobile)\r\n @mobiles.delete(mobile)\r\n return\r\n end",
"def destroy\n @desktop_computer.destroy\n respond_to do |format|\n format.html { redirect_to desktop_computers_url, notice: 'Desktop computer was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def remove_sliders\n size = @sliders.size\n @sliders.clear\n @dialog.execute_script(\"remove_sliders(); update_size();\") if @dialog\n size\n end",
"def remove_mobile_platform(value)\n @children['mobile-platform'][:value].delete(value)\n end",
"def remove_sliders\n size = @sliders.size\n @sliders.clear\n @dialog.execute_script(\"remove_sliders(); size_changed();\") if @dialog\n size\n end",
"def unlock_desktop(unlock=true)\n request = Packet.create_request(COMMAND_ID_STDAPI_UI_UNLOCK_DESKTOP)\n request.add_tlv(TLV_TYPE_BOOL, unlock)\n client.send_request(request)\n return true\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Remove desktops The highest numbered desktops are removed. Removing a desktop you are currently on will result in TotalSpaces2 switching to another dektop. Any windows present on a desktop being removed will be moved to one of the remaining desktops. Returns true on success, false if number_to_remove was zero or would result in less than 1 desktop remaining. The on_layout_change notification will be sent if a change was made. display_id = TotalSpaces2.main_display[:display_id] TotalSpaces2.remove_desktops_on_display(1, display_id)
|
def remove_desktops_on_display(number_to_remove, display_id)
TSApi.tsapi_removeDesktopsOnDisplay(number_to_remove, display_id)
end
|
[
"def remove_desktops(number_to_remove)\n TSApi.tsapi_removeDesktopsOnDisplay(number_to_remove, 0)\n end",
"def remove_desktops(number_to_remove)\n TSApi.tsapi_removeDesktops(number_to_remove)\n end",
"def add_desktops_on_display(number_to_add, display_id)\n TSApi.tsapi_addDesktopsOnDisplay(number_to_add, display_id)\n end",
"def add_desktops(number_to_add)\n TSApi.tsapi_addDesktopsOnDisplay(number_to_add, 0)\n end",
"def delete_screens(host_names)\n host_names.each { |name| @zabbix.delete_screen_query(name) }\n end",
"def remove_desktop\n disposable = ['.localized', '.DS_Store']\n ignored = ['.', '..'] + disposable\n files = Dir.entries(DESKTOP_DIR).reject {|f| ignored.include? f}\n\n if files.empty?\n FileUtils.cd(DESKTOP_DIR)\n FileUtils.rm_f(disposable)\n `sudo rmdir #{DESKTOP_DIR}`\n else\n # For safety, you must do any house-cleaning yourself before proceeding\n puts \"#{DESKTOP_DIR} is not empty\"\n end\nend",
"def delete_screen(display_id, screen_id)\n delete \"commandcenter/displays/#{display_id}/screens/#{screen_id}\"\n end",
"def add_desktops(number_to_add)\n TSApi.tsapi_addDesktops(number_to_add)\n end",
"def destroy\n @desktop.destroy\n respond_to do |format|\n format.html { redirect_to desktops_url }\n format.json { head :no_content }\n end\n end",
"def desktops\n @desktops ||= if available?\n Libatspi.get_desktop_count.times.map do |idx|\n Desktop.new(Libatspi.get_desktop(idx))\n end\n else\n []\n end\n end",
"def delete_display(display_id)\n delete \"commandcenter/displays/#{display_id}\"\n end",
"def desktop?\n data = get_browserdata\n result = data.entries.first['device_type'] == 0\n return result\n end",
"def desktop?\n @device == :desktop\n end",
"def unlock_desktop(unlock=true)\n request = Packet.create_request(COMMAND_ID_STDAPI_UI_UNLOCK_DESKTOP)\n request.add_tlv(TLV_TYPE_BOOL, unlock)\n client.send_request(request)\n return true\n end",
"def check_display\n\t\tif !GroupDisplay.where(:display_id => self.display_id).exists?\n\t\t\tDisplay.destroy(self.display_id)\n\t\tend\n\tend",
"def remove_mobile(mobile)\r\n remove_global_mobile(mobile)\r\n @mobiles.delete(mobile)\r\n return\r\n end",
"def detach\n if @client\n response = @client.send_command( Command.new( \"screen_del #{self.id}\" ) )\n \n if response.successful?\n @client.add_message( \"Screen '#{self.id}' detached from client '#{@client.name}'\" )\n @client = nil\n \n return true\n end\n else\n add_message \"Error: Failed to detach screen '#{self.id}' from '#{@client.name}' (#{response.message})\"\n return false\n end\n end",
"def unlock_desktop(unlock=true)\n\t\trequest = Packet.create_request('stdapi_ui_unlock_desktop')\n\t\trequest.add_tlv(TLV_TYPE_BOOL, unlock)\n\t\tresponse = client.send_request(request)\n\t\treturn true\n\tend",
"def desktop?\n true # Not nice, but if you're running desktopbrowser, assume running desktop\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Move a particular window to the front and activate it. This might be usful after moving windows to other desktops.
|
def set_front_window(window_id)
TSApi.tsapi_setFrontWindow(window_id)
end
|
[
"def bring_to_front\n \t\tautoit.WinActivate title, ''\t\t\n \tend",
"def activate\n @window.activate\n end",
"def activate_process_window( pid )\n pid , hwnd = find_parent_process_with_hwnd(pid)\n wsh.AppActivate(pid) if pid\n end",
"def window_activate(title, text = nil)\n win.WinActivate(title, text).nil?\n end",
"def window_focus()\n do_command(\"windowFocus\", [])\n end",
"def activate_current_window\n if @item_window.index > -1\n @item_window.activate\n else\n @category_window.activate\n end\n end",
"def set_focus\n main_window.set_focus if running?\n end",
"def activate\n Window.functions[__method__] ||= AU3_Function.new(\"WinActivate\", 'SS')\n Window.functions[__method__].call(@title.wide, @text.wide)\n active?\n end",
"def focus_previous_window\n window = get_current_workspace_managed_window\n if window\n previous_window = windawesome.current_workspace.get_previous_window window\n windawesome.switch_to_application previous_window.hWnd if previous_window\n elsif windawesome.current_workspace.get_windows_count > 0\n windawesome.switch_to_application windawesome.current_workspace.get_windows.first.value.hWnd\n end\nend",
"def focus_previous_window\r\n window = get_current_workspace_managed_window\r\n if window\r\n previous_window = windawesome.current_workspace.get_previous_window window\r\n windawesome.switch_to_application previous_window.hWnd if previous_window\r\n elsif windawesome.current_workspace.get_windows_count > 0\r\n windawesome.switch_to_application windawesome.current_workspace.get_windows.first.value.hWnd\r\n end\r\nend",
"def bring_to_top!\n ret=WinUser.BringWindowToTop(hwnd)\n ret != WIN_FALSE\n end",
"def focus_window(win=nil)\n window(win) << '.dialog().dialog(\"focus\");'\n nil\n end",
"def set_focus\n if is_window?\n # if current process was the last to receive input, we can be sure that\n # SetForegroundWindow will be allowed. Send the shift key to whatever has\n # the focus now. This allows IRB to set_focus.\n keystroke(VK_SHIFT)\n ret = SetForegroundWindow(handle)\n logger.warn(\"SetForegroundWindow failed\") if ret == 0\n end\n end",
"def activate_vrc_window!\r\n @vrc_win.activate\r\n end",
"def switch_to_previous_window\n @window_id -= 1\n if @window_id < 0\n # wrap back to the last\n @window_id = @browser.windows.count - 1\n end\n\n @browser.windows[@window_id].use\n end",
"def active_window\n window_pointer = FFI::MemoryPointer.new :ulong, 1\n XDo::FFILib.xdo_window_get_active @_pointer, window_pointer\n XDo::Window.new self, window_pointer.read_ulong\n end",
"def activate_win_color\n @windows[Win_Config].win_color.activate\n @windows[Win_Config].win_color.show\n @windows[Win_Config].win_color.index = 0\n @windows[Win_Config].win_color.y = @windows[Win_Config].cursor_rect.y\n end",
"def activate_terminal_window!\r\n @terminal_win.activate\r\n end",
"def focused_window\n window_pointer = FFI::MemoryPointer.new :ulong, 1\n XDo::FFILib.xdo_window_get_focus @_pointer, window_pointer\n XDo::Window.new self, window_pointer.read_ulong\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Bind an app to a space. The bundle_id is normally in the format "com.apple.mail" Setting the space_uuid to AllSpaces will result in an app appearing on every desktop. Setting the space_uuid to nil will delete the setting for the given bundle_id. TotalSpaces2.bind_app_to_space("com.apple.mail", "AllSpaces")
|
def bind_app_to_space(bundle_id, space_uuid)
TSApi.tsapi_bindAppToSpace(bundle_id, space_uuid)
end
|
[
"def bind_app_services(app, service_instance_guid)\n service_instance = @client.service_instance(service_instance_guid)\n service_binding = @client.service_binding\n service_binding.app = app\n service_binding.service_instance = service_instance\n app.bind(service_instance)\n\n end",
"def update(app_name, state, instances, memory, services, urls, binding_object, current_space, apps_list)\n #cloning the app object\n app = nil\n apps_list.each do |app_item|\n app = app_item.dup if app_item.name == app_name\n end\n\n app_guid = app.guid\n application = @client.app(app_guid)\n\n #modify app details\n info_ln(\"Setting up #{instances} instances for the app, each with #{memory}MB ...\")\n\n application.total_instances = instances\n application.memory = memory\n\n #service bindings\n info_ln(\"Updating service bindings ...\")\n parsed_services = JSON.parse(services)\n app_services = app.services\n parsed_services.each do |service|\n service_name = service[\"name\"]\n element = app_services.find{ |serv| serv.name == service_name }\n\n if element == nil\n begin\n info(\" Binding service '#{service_name} ... \")\n bind_app_services(application, service['guid'])\n ok_ln(\"OK\")\n rescue => ex\n error_ln(\"Failed\")\n warning_ln(\" #{ex.message}\")\n end\n end\n end\n\n app_services.each do |service|\n service_name = service.name\n element = parsed_services.find { |serv| serv['name'] == service_name }\n if element == nil\n begin\n info(\" Unbinding service '#{service_name} ... \")\n unbind_app_services(application, service.guid)\n ok_ln(\"OK\")\n rescue => ex\n error_ln(\"Failed\")\n warning_ln(\" #{ex.message}\")\n end\n end\n end\n\n #url bindings\n info_ln(\"Updating URL bindings ...\")\n parsed_urls = JSON.parse(urls)\n app_uris = app.uris\n parsed_urls.each do |url|\n url_host = url[\"host\"]\n element = app_uris.find{ |app_url| app_url.host == url_host && app_url.domain == url['domain'] }\n if element == nil\n begin\n info(\" Binding url '#{url_host}' ...\")\n binding_object.create(app_guid, current_space, url['domain_guid'], url_host)\n ok_ln(\"OK\")\n rescue => ex\n error_ln(\"Failed\")\n warning_ln(\" #{ex.message}\")\n end\n end\n end\n\n\n app_uris.each do |uri|\n uri_host = uri.host\n element = parsed_urls.find { |url| url['host'] == uri_host }\n if element == nil\n begin\n info(\" Unbinding url '#{uri_host}' ...\")\n unbind_app_url(application, uri_host, uri.domain)\n ok_ln(\"OK\")\n rescue => ex\n error_ln(\"Failed\")\n warning_ln(\" #{ex.message}\")\n end\n end\n end\n\n info_ln(\"#{app_name} is now being updated ...\")\n #applying the current state\n\n if state == 'true'\n begin\n info(\"Starting the app ...\")\n application.stop!\n application.start!\n ok_ln(\"OK\")\n rescue => ex\n error_ln(\"Failed\")\n warning_ln(\" #{ex.message}\")\n end\n else\n begin\n info(\"Stopping the app ...\")\n application.stop!\n ok_ln(\"OK\")\n rescue => ex\n error_ln(\"Failed\")\n warning_ln(\" #{ex.message}\")\n end\n end\n\n begin\n info(\"Applying changes ...\")\n application.update!\n ok_ln(\"OK\")\n rescue => ex\n error_ln(\"Failed\")\n warning_ln(\" #{ex.message}\")\n end\n\n end",
"def assign_to(app)\n update_attribute(:application_for_offering_id, app.id) if available?\n end",
"def launch_app_in_namespace(app)\n Log.info(\"Launching app #{app.colorize(:cyan)} in the #{@config.namespace.colorize(:cyan)} namespace\", newline:false)\n pid = spawn(\"ip netns exec #{@config.namespace} sudo -H -u #{User.name} bash -c '#{Net.proxy_export}#{app}' &> /dev/null\")\n Process.detach(pid)\n\n # Check if the app is running\n begin\n Process.getpgid(pid)\n Log.puts(\"...success!\".colorize(:green), stamp:false)\n rescue Errno::ESRCH\n Log.puts(\"...failed!\".colorize(:red), stamp:false)\n raise SystemExit\n end\n\n return pid\n end",
"def application_sid(sid, **keyword_args)\n append(ApplicationSid.new(sid, **keyword_args))\n end",
"def appstore_apps_labels(app_id, label_id, space_id)\n response = connection.put(get_appstore_apps_labels_url(app_id, label_id)) do |request|\n request.params[:adminDeviceSpaceId] = space_id\n end\n\n if response.success?\n AppStoreAppsLabelsResponse.new(response.body)\n end\n end",
"def app_with_pid pid\n AX::Application.new pid\n end",
"def application_for_pid pid\n raise ArgumentError, 'pid must be greater than 0' unless pid > 0\n AXUIElementCreateApplication(pid)\n end",
"def application_with_pid pid\n AX::Element.process AX.application_for_pid pid\n end",
"def bind(namespace)\n # Over-write an empty prefix\n uri = namespace.uri.to_s\n @uri_binding[uri] = namespace unless namespace.prefix.to_s.empty?\n @uri_binding[uri] ||= namespace\n @nsbinding[namespace.prefix.to_s] = namespace\n end",
"def outgoing_application_sid=(_arg0); end",
"def register_application(app_name, namespace, public_hostname)\n # create an A record for the application in the domain\n fqdn = \"#{app_name}-#{namespace}.#{@domain_suffix}\"\n raise DNSException.new unless system %{\nnsupdate <<EOF\nkey #{@keyname} #{@keyvalue}\nserver #{@server} #{@port}\nupdate add #{fqdn} 60 CNAME #{public_hostname}\nsend\nquit\nEOF\n }\n end",
"def update_app_list\n # Differentiate between a null app_nids params and no app_nids params\n return unless params[:organization].key?(:app_nids) && (desired_nids = Array(params[:organization][:app_nids]))\n\n existing_apps = @organization.app_instances.active\n\n existing_apps.each do |app_instance|\n desired_nids.delete(app_instance.app.nid) || app_instance.terminate\n end\n\n desired_nids.each do |nid|\n begin\n @organization.app_instances.create(product: nid)\n rescue => e\n Rails.logger.error { \"#{e.message} #{e.backtrace.join(\"\\n\")}\" }\n end\n\n end\n\n # Force reload\n existing_apps.reload\n end",
"def app=(value)\n @app = value\n end",
"def transfer\n unless target = shift_argument\n error(\"Usage: heroku sharing:transfer EMAIL\\nMust specify EMAIL to transfer an app.\")\n end\n validate_arguments!\n org_from_app!\n\n action(\"Transferring #{app} to #{target}\") do\n if org || !target.include?('@')\n locked = options[:locked]\n\n org_api.transfer_app(target, app, locked)\n display(\"App is locked. Organization members must be invited to access.\") if locked\n\n else\n api.put_app(app, \"transfer_owner\" => target)\n end\n end\n end",
"def bind_meta_app args={}, &block\n arg_string =\n args.values_at(:key, :listen_to, :respond_on, :application).join(\" \")\n arg_string += \"::#{args[:parameters]}\" if args[:parameters]\n\n application \"bind_meta_app\", arg_string, &block\n end",
"def bind_service(name, appname)\n require_login\n raise CloudFoundry::Client::Exception::BadParams, \"Service Name cannot be blank\" if name.nil? || name.empty?\n raise CloudFoundry::Client::Exception::BadParams, \"Application Name cannot be blank\" if appname.nil? || appname.empty?\n service = service_info(name)\n app = app_info(appname)\n services = app[:services] || []\n service_exists = services.index(name)\n raise CloudFoundry::Client::Exception::BadParams, \"Service [#{name}] is already binded to [#{appname}]\" if service_exists\n app[:services] = services << name\n update_app(appname, app)\n true\n end",
"def bind!(spaces)\n dir = @warrior.direction_of(spaces.first)\n @binded_dir << dir\n @warrior.bind!(dir)\n end",
"def associate_addresses_to_application(app)\n add = addresses(app.application_id)\n app.merge!({ :addresses => add })\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
returns the current week, from the specified array of weeks, based on the weeks' start times
|
def get_current_week_from_weeks(weeks)
now = DateTime.now
weeks.each { |week|
if now < week.start_time
return (week.number - 1)
end
}
return weeks.last.number
end
|
[
"def current_week\n week_split.select { |c| c.include?(day) }.flatten\n end",
"def week\n [Time.new(year, month, week_start), Time.new(year, month, week_end)]\n end",
"def week(*weeks)\n TExp::Week.new(normalize_weeks(weeks))\n end",
"def setup_weeks(week_start)\n a_date = self.first\n the_last = self.last\n @weeks = []\n\n while (a_date < the_last)\n @weeks << Koyomi::Week.new(a_date, week_start)\n a_date += WEEK_DAYS\n end\n @weeks\n end",
"def build_week_to_start_time_map(weeks)\n week_to_start_time_map = {}\n weeks.each { |week|\n week_to_start_time_map[week.number] = week.start_time\n }\n return week_to_start_time_map\n end",
"def weeks\n return @weeks if @weeks\n\n @weeks = []\n @weeks_to_show.times do |i|\n root = i.weeks.from_now(@start_date)\n @weeks << Week.new(\n root.beginning_of_week(:sunday).to_date,\n root.end_of_week(:sunday).to_date\n )\n end\n\n @weeks\n end",
"def weeks() 7 * days end",
"def week\n first_day_of_week = @date.monday\n \n days_of_week = []\n 7.times do |time|\n days_of_week << day_and_types(first_day_of_week + time.days)\n end \n \n days_of_week\n end",
"def weeks\n @weeks ||= days.slice_when do |day|\n Date::DAYNAMES[day.wday] == LAST_DAY_OF_THE_WEEK\n end.to_a\n end",
"def dispatch_weeks weeks\n if @gamedays.size < weeks.size\n weeks = remove_weeks weeks, @gamedays.size\n elsif @gamedays.size > weeks.size\n weeks = add_weeks weeks, @gamedays.size\n end\n @gamedays.each.with_index { |gd, i| gd.date = weeks[i] }\n end",
"def add_weeks weeks, gamedays\n span_size = gamedays - weeks.size\n span_before = span_size / 2\n span_after = span_size - span_before\n if @start_week - span_before < 1\n span_after += (span_before - @start_week + 1)\n @start_week = 1\n else\n @start_week -= span_before\n end\n @end_week += span_after\n @span = true\n @start_week.upto(@end_week).to_a\n end",
"def start_at_as_current_week_date\n Time.now.beginning_of_week + ((weekday - 1).days) + start_at_hour.to_i\n end",
"def game_week\n now = DateTime.now\n all_games = NflSchedule.where(year: Date.today.year)\n .order(:start_time)\n week_started = 0\n all_games.each { |game|\n if (week_started < game.week) && (now > game.start_time)\n week_started = game.week\n end\n }\n return week_started\n end",
"def weeks\n result = end_week - start_week + 1\n weeks = Date.new(start_date.year, 12, 31).strftime('%W').to_i\n result < 0 ? result + weeks : result\n end",
"def weeks_since(weeks)\n advance(weeks: weeks)\n end",
"def week\n @week ||= (((date-start_of_month)+1)/7.0).ceil\n end",
"def game_week\n now = DateTime.now\n all_games = NflSchedule.where(year: current_season_year)\n .order(:start_time)\n week_started = 0\n all_games.each { |game|\n if (week_started < game.week) && (now > game.start_time)\n week_started = game.week\n end\n }\n return week_started\n end",
"def days_to_week_start(start_day = Date.beginning_of_week)\n start_day_number = DAYS_INTO_WEEK[start_day]\n current_day_number = wday != 0 ? wday - 1 : 6\n (current_day_number - start_day_number) % 7\n end",
"def build_week_to_start_time_map\n week_to_start_time_map = {}\n Week.where(year: Date.today.year).each { |week|\n week_to_start_time_map[week.number] = week.start_time\n }\n return week_to_start_time_map\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
returns the week number based on whether any games in that particular week have already started
|
def game_week
now = DateTime.now
all_games = NflSchedule.where(year: Date.today.year)
.order(:start_time)
week_started = 0
all_games.each { |game|
if (week_started < game.week) && (now > game.start_time)
week_started = game.week
end
}
return week_started
end
|
[
"def game_week\n now = DateTime.now\n all_games = NflSchedule.where(year: current_season_year)\n .order(:start_time)\n week_started = 0\n all_games.each { |game|\n if (week_started < game.week) && (now > game.start_time)\n week_started = game.week\n end\n }\n return week_started\n end",
"def weeks_left\n return 0 if Time.now > self.weeks.where(week_number: number_of_weeks).first.latest_match# == week.week_number\n weeks.each do |week|\n return number_of_weeks - week.week_number + 1 if week.latest_match > Time.now\n end\n end",
"def week\n @week ||= (((date-start_of_month)+1)/7.0).ceil\n end",
"def match_week\n @week ? { 'fiscal_week_id' => @week.to_i } : {}\n end",
"def number_of_days_worked_in_a_week\n count = 0\n self.working_days.each do |day|\n count += 1 if day[1] == '1'\n end\n count\n end",
"def assign_week\n self.week ||= Checkin.week_integer_for_time(self.created_at || Time.current, self.startup.present? ? self.startup.checkin_offset : Checkin.default_offset)\n true\n end",
"def get_week(week)\n #check how much cweeks the current year has\n year = @@htwk[\"semester\"].scan /\\d+/\n cweek_count = Date.civil(year[0].to_i,12,31).cweek\n\n week = week.to_i\n\n if week > cweek_count and cweek_count != 1 then\n week -= cweek_count\n end\n week\nend",
"def weeks\n result = end_week - start_week + 1\n weeks = Date.new(start_date.year, 12, 31).strftime('%W').to_i\n result < 0 ? result + weeks : result\n end",
"def valid_week?(week)\n (1..21).include?(week.to_i)\n end",
"def week_start\n\ti = 1\n\twhile i < RC_C.num_rows\n\t\ttmp = RC_C.rows[i][0].split(\" \")[0].split(\"/\")\n\t\tdate = Date.parse(tmp[2] + '-' + tmp[0] + '-' + tmp[1])\n\t\tif (Date.parse(\"wednesday\") - 7) <= date\n\t\t\treturn i\n\t\tend\n\t\ti += 1\n\tend\n\treturn i\nend",
"def at_least_week?(record)\n return calculate_length(record) >= 7\n end",
"def week_start\n\ti = 1\n\twhile i < RC_C.num_rows\n\t\ttmp = RC_C.rows[i][0].split(\" \")[0].split(\"/\")\n\t\tdate = Date.parse(tmp[2] + '-' + tmp[0] + '-' + tmp[1])\n\t\tif (Date.parse(\"monday\") - 7) <= date\n\t\t\treturn i\n\t\tend\n\t\ti += 1\n\tend\n\treturn i\nend",
"def get_bye_week(matchups)\n numOfGames = matchups[0].size\n for y in 0..(numOfGames - 1)\n if (!matchups[1][y]) # if the team doesn't have an id then this is the bye week\n return [1, y]\n elsif (!matchups[0][y])\n return [0, y]\n end\n end\n end",
"def week_number\n\t\tWeek.find(week_id).week_number\n\tend",
"def check_week(target_week)\n $log.info(\"Looking for week #{ target_week } scores, using week #{ @current_week } scores.\")\n raise \"No new scores - nothing to do. Aborting.\" unless target_week == @current_week \n end",
"def weeks() 7 * days end",
"def week_number\n\t\tstart_date.cweek\n\tend",
"def this_week?\n\t\trange = (Time.now .. Time.now.sunday)\n\t\trange = (Time.now .. Time.now.next_week) if(Time.now.sunday?)\n\t\tself.showtimes.detect{ |st| range.cover? st.timestamp }\n\tend",
"def number_of_game_weeks_with_fixtures(no_users)\n if no_users.even?\n # No byes, every one plays each other, so number of rounds is no_users - 1\n NUMBER_OF_GAME_WEEKS - (NUMBER_OF_GAME_WEEKS % (no_users - 1))\n else\n # Each user has a bye per round + plays everyone once, so number of rounds is no_users\n NUMBER_OF_GAME_WEEKS - (NUMBER_OF_GAME_WEEKS % no_users)\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
GET /university_profiles/1 GET /university_profiles/1.json
|
def show
@university_profile = UniversityProfile.find(params[:id])
respond_to do |format|
format.html # show.html.erb
format.json { render json: @university_profile }
end
end
|
[
"def profiles\n hash = {:username => @username}\n @api.request(\"users/profiles/?#{build_query_string(hash)}\")\n end",
"def profile_detail()\n get(\"profile\")\n end",
"def get_default_profile \n get(\"/profiles.json/default\")\nend",
"def get_profiles\n if authenticate\n page = 1\n pages = 0\n total = 0\n begin\n while true\n params = \"?p=#{page}&ps=100\"\n response = @cap_api.get \"#{@cap_profiles}#{params}\"\n if response.status == 200\n data = response.body\n if data['firstPage']\n pages = data['totalPages']\n total = data['totalCount']\n puts \"Retrieved #{page} of #{pages} pages (#{total} profiles).\"\n else\n puts \"Retrieved #{page} of #{pages} pages.\"\n end\n profiles = data['values']\n profiles.each do |profile|\n profile_clean(profile)\n # split out the publication data to accommodate size limit on mongo\n id = profile['profileId']\n profile['_id'] = id # use 'profileId' as the mongo _id\n pres = profile.delete('presentations') || []\n presentations_save(id, pres)\n pubs = profile.delete('publications') || []\n publications_save(id, pubs)\n profile_save(profile)\n end\n page += 1\n break if data['lastPage']\n else\n msg = \"Failed to GET profiles page #{page}: #{response.status}\"\n @config.logger.error msg\n puts msg\n break\n end\n end\n puts \"Processed #{total} profiles.\"\n rescue => e\n msg = e.message\n binding.pry if @config.debug\n @config.logger.error msg\n ensure\n repo_commit\n end\n end\n end",
"def index\n @skill_user_profiles = SkillUserProfile.all\n\n render json: @skill_user_profiles\n end",
"def get_user_profile_by_profile_id(profile_id)\n path = \"/d2l/api/lp/#{$lp_ver}/profile/#{profile_id}\"\n _get(path)\n # Returns UserProfile JSON data block\nend",
"def show\n @webprofile = Webprofile.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @webprofile }\n end\n end",
"def get_current_user_profile\n path = \"/d2l/api/lp/#{$lp_ver}/profile/myProfile\"\n _get(path)\n # Returns: UserProfile JSON data block\nend",
"def show\n @official_profile = OfficialProfile.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @official_profile }\n end\n end",
"def new\n @university_profile = UniversityProfile.new\n @university_profile.university_courses || @user.build_university_courses\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @university_profile }\n end\n end",
"def show\n @private_profile = PrivateProfile.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @private_profile }\n end\n end",
"def get_individual_profile_json thisURI\n #vivo_app = ENV[\"VIVO_APP_URL\"]\n vivo_app = Vivotoblacklight.configuration.vivo_app_url\n #Throw an exception if the app url has not been set\n Rails.logger.debug(\"VIVO APP is #{vivo_app.inspect}\")\n result= {}\n thisURI = CGI::escape(thisURI)\n url = URI.parse(vivo_app + \"/individual?uri=\" + thisURI + \"&action=defaultJSON\")\n begin\n resp = Net::HTTP.get_response(url)\n rescue\n result = nil\n else\n data = resp.body\n result = JSON.parse(data)\n end\n return result\n end",
"def index\n @universities = University.all\n render json: @universities\n end",
"def get_universities\n\t\t@universities = University.all\n\t\trender :json => (@universities.map { |u| u.as_json(:only => [:id,:name, :acronym, :logo])}).to_json\n\tend",
"def new\n @profile = current_user.profiles.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @profile }\n end\n end",
"def get_profile\n path = self.api_root + '/register/profile'\n process_firecloud_request(:get, path)\n end",
"def show\n @university = University.find(params[:id])\n end",
"def create\n @university_profile = UniversityProfile.new(params[:university_profile])\n \n respond_to do |format|\n if @university_profile.save\n format.html { redirect_to @university_profile, notice: 'University profile was successfully created.' }\n format.json { render json: @university_profile, status: :created, location: @university_profile }\n else\n format.html { render action: \"new\" }\n format.json { render json: @university_profile.errors, status: :unprocessable_entity }\n end\n end\n end",
"def profiles\n collection(\"profiles\", paged: true)\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
GET /university_profiles/new GET /university_profiles/new.json
|
def new
@university_profile = UniversityProfile.new
@university_profile.university_courses || @user.build_university_courses
respond_to do |format|
format.html # new.html.erb
format.json { render json: @university_profile }
end
end
|
[
"def new\n @profile = current_user.profiles.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @profile }\n end\n end",
"def create\n @university_profile = UniversityProfile.new(params[:university_profile])\n \n respond_to do |format|\n if @university_profile.save\n format.html { redirect_to @university_profile, notice: 'University profile was successfully created.' }\n format.json { render json: @university_profile, status: :created, location: @university_profile }\n else\n format.html { render action: \"new\" }\n format.json { render json: @university_profile.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @user = User.find(params[:user_id])\n @profile = @user.profile == nil ? Profile.new : @user.profile\n @from_profiles_new = true\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @profile }\n end\n end",
"def new\n @profile = current_user.profiles.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @profile }\n format.json { render :json => @profile }\n\n end\n end",
"def new\n @university = University.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @university }\n end\n end",
"def new\n @user_profile = UserProfile.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @user_profile }\n end\n end",
"def new\n @profile = Profile.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @profile }\n end\n end",
"def new\n @webprofile = Webprofile.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @webprofile }\n end\n end",
"def new\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @university }\n end\n end",
"def new\n @official_profile = OfficialProfile.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @official_profile }\n end\n end",
"def new\n @private_profile = PrivateProfile.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @private_profile }\n end\n end",
"def create\n @profile = current_user.profiles.new(params[:profile])\n\n respond_to do |format|\n if @profile.save\n format.html { redirect_to @profile, notice: 'Profile was successfully created.' }\n format.json { render json: @profile, status: 201, location: @profile }\n else\n format.html { render action: \"new\" }\n format.json { render json: @profile.errors, status: 422 }\n end\n end\n end",
"def new\n @profile_attribute = current_user.profile_attributes.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @profile_attribute }\n format.json { render :json => @profile_attribute }\n end\n end",
"def new\n @uniprot = Uniprot.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @uniprot }\n end\n end",
"def new\n @interested_university = InterestedUniversity.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @interested_university }\n end\n end",
"def new\n @profile_type = ProfileType.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @profile_type }\n end\n end",
"def new\n @profilepage = Profilepage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @profilepage }\n end\n end",
"def new\n @provisioning_profile = current_user.provisioning_profiles.new\n end",
"def new\n @ProfileType = ProfileType.new\n\n respond_to do |format|\n format.html # new.html.haml\n format.json { render json: @ProfileType }\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
POST /university_profiles POST /university_profiles.json
|
def create
@university_profile = UniversityProfile.new(params[:university_profile])
respond_to do |format|
if @university_profile.save
format.html { redirect_to @university_profile, notice: 'University profile was successfully created.' }
format.json { render json: @university_profile, status: :created, location: @university_profile }
else
format.html { render action: "new" }
format.json { render json: @university_profile.errors, status: :unprocessable_entity }
end
end
end
|
[
"def create\n @profile = current_user.profiles.new(params[:profile])\n\n respond_to do |format|\n if @profile.save\n format.html { redirect_to @profile, notice: 'Profile was successfully created.' }\n format.json { render json: @profile, status: 201, location: @profile }\n else\n format.html { render action: \"new\" }\n format.json { render json: @profile.errors, status: 422 }\n end\n end\n end",
"def new\n @university_profile = UniversityProfile.new\n @university_profile.university_courses || @user.build_university_courses\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @university_profile }\n end\n end",
"def create\n @profile = current_user.profiles.build(profile_params)\n\n respond_to do |format|\n if @profile.save\n format.html { redirect_to @profile, notice: 'Profile was successfully created.' }\n format.json { render action: 'show', status: :created, location: @profile }\n else\n format.html { render action: 'new' }\n format.json { render json: @profile.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @university = University.new(params[:university])\n\n respond_to do |format|\n if @university.save\n format.html { redirect_to @university, notice: 'University was successfully created.' }\n format.json { render json: @university, status: :created, location: @university }\n else\n format.html { render action: \"new\" }\n format.json { render json: @university.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @interested_university = InterestedUniversity.new(params[:interested_university])\n\n respond_to do |format|\n if @interested_university.save\n format.html { redirect_to @interested_university, notice: 'Interested university was successfully created.' }\n format.json { render json: @interested_university, status: :created, location: @interested_university }\n else\n format.html { render action: \"new\" }\n format.json { render json: @interested_university.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @profile = Profile.new(profile_params)\n\n if @profile.save\n render json: @profile, status: :created\n else\n render json: @profile.errors, status: :unprocessable_entity\n end\n end",
"def create\n @surgical_profile = SurgicalProfile.new(surgical_profile_params)\n\n # TODO: minimize into single query\n @patient = Patient.query_one_by_id(\n current_user, @surgical_profile.patient_id\n )\n @surgeon = User.find(@surgical_profile.user_id)\n\n respond_to do |format|\n if @surgical_profile.preprocess_and_save()\n format.html { redirect_to patient_surgical_profiles_path(@patient), notice: 'Surgical profile was successfully created.' }\n format.json { render action: 'show', status: :created, location: @surgical_profile }\n else\n format.html { render action: 'new' }\n format.json { render json: @surgical_profile.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @user_surgeon_profile = UserSurgeonProfile.new(user_surgeon_profile_params)\n\n respond_to do |format|\n if @user_surgeon_profile.save\n format.html { redirect_to :back, notice: 'User surgeon profile was successfully created.' }\n format.json { render action: 'show', status: :created, location: @user_surgeon_profile }\n else\n format.html { render action: 'new' }\n format.json { render json: @user_surgeon_profile.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @profile = Profile.new(profile_params)\n\n if @profile.save\n render json: @profile, status: :created, location: [:web, @profile]\n else\n render json: @profile.errors, status: :unprocessable_entity\n end\n end",
"def create\n\t\t# Creating the profile for particular surgeon\n\t\tsurgeon_profile = current_user.setting.build_profile(speciality_name: params[:speciality_name],sub_speciality_name:params[:sub_speciality_name],:medical_school=>params[:medical_school],:residency=>params[:residency],:spe_training=>params[:spe_training],:my_favourite=>params[:my_favourite],:my_hobby=>params[:my_hobby],:more_about=>params[:more_about],:cover_image=>params[:cover_image],:profile_image=>params[:profile_image]) \n\t\t# Condition checking the profile is saved or not\n\t\tif surgeon_profile.save\n\t\t# response to the JSON\n\t\t render json: { success: true,message: \"Profile Successfully Created.\", response: surgeon_profile.as_json },:status=>200\n\t else\n\t render :json=> { success: false, message: surgeon_profile.errors },:status=> 203\n\t end\n\tend",
"def create\n @representative_profile = RepresentativeProfile.new(representative_profile_params)\n\n respond_to do |format|\n if @representative_profile.save\n format.html { redirect_to @representative_profile, notice: 'Representative profile was successfully created.' }\n format.json { render :show, status: :created, location: @representative_profile }\n else\n format.html { render :new }\n format.json { render json: @representative_profile.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @regional_profile = RegionalProfile.new(regional_profile_params)\n\n respond_to do |format|\n if @regional_profile.save\n format.html { redirect_to @regional_profile, notice: 'Regional profile was successfully created.' }\n format.json { render :show, status: :created, location: @regional_profile }\n else\n format.html { render :new }\n format.json { render json: @regional_profile.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @provisioning_profile = current_user.provisioning_profiles.new(params[:provisioning_profile])\n\n if @provisioning_profile.save\n redirect_to provisioning_profiles_url, notice: 'Provisioning profile was successfully created.'\n else\n render action: \"new\"\n end\n end",
"def create\n @webprofile = Webprofile.new(params[:webprofile])\n\n respond_to do |format|\n if @webprofile.save\n format.html { redirect_to @webprofile, notice: 'Webprofile was successfully created.' }\n format.json { render json: @webprofile, status: :created, location: @webprofile }\n else\n format.html { render action: \"new\" }\n format.json { render json: @webprofile.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @profile = current_user.profiles.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @profile }\n end\n end",
"def create\n @profile_university_class = ProfileUniversityClass.new(params[:profile_university_class])\n\n respond_to do |format|\n if @profile_university_class.save\n format.html { redirect_to(@profile_university_class, :notice => 'Profile university class was successfully created.') }\n format.xml { render :xml => @profile_university_class, :status => :created, :location => @profile_university_class }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @profile_university_class.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @official_profile = OfficialProfile.new(params[:official_profile])\n\n respond_to do |format|\n if @official_profile.save\n format.html { redirect_to @official_profile, notice: 'Official profile was successfully created.' }\n format.json { render json: @official_profile, status: :created, location: @official_profile }\n else\n format.html { render action: \"new\" }\n format.json { render json: @official_profile.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n byebug\n @skill_user_profile = SkillUserProfile.new(skill_user_profile_params)\n\n if @skill_user_profile.save\n render json: @skill_user_profile, status: :created, location: @skill_user_profile\n else\n render json: @skill_user_profile.errors, status: :unprocessable_entity\n end\n end",
"def show\n @university_profile = UniversityProfile.find(params[:id])\n \n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @university_profile }\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
PUT /university_profiles/1 PUT /university_profiles/1.json
|
def update
@university_profile = UniversityProfile.find(params[:id])
respond_to do |format|
if @university_profile.update_attributes(params[:university_profile])
format.html { redirect_to @university_profile, notice: 'University profile was successfully updated.' }
format.json { head :no_content }
else
format.html { render action: "edit" }
format.json { render json: @university_profile.errors, status: :unprocessable_entity }
end
end
end
|
[
"def update\n if @university.update(university_params)\n render json: @university, status: :ok #, location: @university\n else\n render json: @university.errors, status: :unprocessable_entity\n end\n end",
"def update_current_user_profile_data(user_profile_data)\n # PUT /d2l/api/lp/(version)/profile/myProfile\nend",
"def update\n respond_to do |format|\n if @university.update(university_params)\n format.html { redirect_to @university, notice: 'University was successfully updated.' }\n format.json { respond_with_bip(@university) }\n else\n format.html { render :edit }\n format.json { respond_with_bip(@university) }\n end\n end\n end",
"def update\n @profiles = current_user.profiles.find(params[:id])\n\n respond_to do |format|\n if @profiles.update(profile_params)\n format.html { redirect_to profiles_path, notice: 'Profile was successfully updated.' }\n format.json { render :show, status: :ok, location: @profiles }\n else\n format.html { render :edit }\n format.json { render json: @profiles.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @university = University.find(params[:id])\n\n respond_to do |format|\n if @university.update_attributes(params[:university])\n format.html { redirect_to @university, notice: 'University was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @university.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @api_university.update(api_university_params)\n format.html { redirect_to @api_university, notice: 'University was successfully updated.' }\n format.json { render :show, status: :ok }\n else\n format.html { render :edit }\n format.json { render json: @api_university.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @interested_university = InterestedUniversity.find(params[:id])\n\n respond_to do |format|\n if @interested_university.update_attributes(params[:interested_university])\n format.html { redirect_to @interested_university, notice: 'Interested university was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @interested_university.errors, status: :unprocessable_entity }\n end\n end\n end",
"def assign_default_profile(args = {}) \n put(\"/profiles.json/#{args[:profileId]}/default\", args)\nend",
"def update\n @official_profile = OfficialProfile.find(params[:id])\n\n respond_to do |format|\n if @official_profile.update_attributes(params[:official_profile])\n format.html { redirect_to @official_profile, notice: 'Official profile was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @official_profile.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @university_profile = UniversityProfile.new(params[:university_profile])\n \n respond_to do |format|\n if @university_profile.save\n format.html { redirect_to @university_profile, notice: 'University profile was successfully created.' }\n format.json { render json: @university_profile, status: :created, location: @university_profile }\n else\n format.html { render action: \"new\" }\n format.json { render json: @university_profile.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @profile = current_user.profiles.find(params[:id])\n\n respond_to do |format|\n if @profile.update_attributes(params[:profile])\n format.html { redirect_to(@profile, :notice => 'Profile was successfully updated.') }\n format.xml { head :ok }\n format.json { render :json => @profile }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @profile.errors, :status => :unprocessable_entity }\n format.json { render :json => {:error => @profile.errors.full_messages} }\n end\n end\n end",
"def update\n @skill_user_profile = SkillUserProfile.find(params[:id])\n\n if @skill_user_profile.update(skill_user_profile_params)\n head :no_content\n else\n render json: @skill_user_profile.errors, status: :unprocessable_entity\n end\n end",
"def update\n @profile_university_class = ProfileUniversityClass.find(params[:id])\n\n respond_to do |format|\n if @profile_university_class.update_attributes(params[:profile_university_class])\n format.html { redirect_to(@profile_university_class, :notice => 'Profile university class was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @profile_university_class.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n\t\t# Updating the details according the that particular profile\n\t\t@profile.update_attributes(profile_params)\n\t\t# Redirect to the particular surgeon profile show page\n\t\tredirect_to profile_path(@profile)\n\tend",
"def update\n respond_to do |format|\n @student_profile.interests = student_profile_params[:interests]\n @student_profile.majors = student_profile_params[:majors]\n if @student_profile.update(student_profile_params)\n format.html { redirect_to @student_profile, notice: 'Student profile was successfully updated.' }\n format.json { render :show, status: :ok, location: @student_profile }\n else\n format.html { render :edit }\n format.json { render json: @student_profile.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @profile = current_user.profiles\n\n respond_to do |format|\n if @profile.update_attributes(params[:profile])\n flash[:notice] = 'Profile was successfully updated.'\n format.html { redirect_to(@profile) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @profile.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @tutor_profile.update(tutor_profile_params)\n format.html { redirect_to @tutor_profile, notice: 'Tutor profile was successfully updated.' }\n format.json { render :show, status: :ok, location: @tutor_profile }\n else\n format.html { render :edit }\n format.json { render json: @tutor_profile.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @tutor_profile.update(tutor_profile_params)\n format.html { redirect_to @tutor_profile, notice: 'Tutor profile was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @tutor_profile.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @interviewer_profile.update(interviewer_profile_params)\n format.html { redirect_to admin_interviewer_profiles_path, notice: 'Interviewer profile was successfully updated.' }\n format.json { render :show, status: :ok, location: @interviewer_profile }\n else\n format.html { render :edit }\n format.json { render json: @interviewer_profile.errors, status: :unprocessable_entity }\n end\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
DELETE /university_profiles/1 DELETE /university_profiles/1.json
|
def destroy
@university_profile = UniversityProfile.find(params[:id])
@university_profile.destroy
respond_to do |format|
format.html { redirect_to university_profiles_url }
format.json { head :no_content }
end
end
|
[
"def destroy\n @profile = Profile.find(params[:id])\n @profile.user.destroy\n @profile.destroy\n\n\n respond_to do |format|\n format.html { redirect_to profiles_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @official_profile = OfficialProfile.find(params[:id])\n @official_profile.destroy\n\n respond_to do |format|\n format.html { redirect_to official_profiles_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @human_profile.destroy\n respond_to do |format|\n format.html { redirect_to human_profiles_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @profile = @user.profile\n @profile.destroy\n\n respond_to do |format|\n format.html { redirect_to profiles_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user_profile = UserProfile.find(params[:id])\n @user_profile.destroy\n\n respond_to do |format|\n format.html { redirect_to user_profiles_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @userprofile = Userprofile.find(params[:id])\n @userprofile.destroy\n\n respond_to do |format|\n format.html { redirect_to userprofiles_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user_surgeon_profile.destroy\n respond_to do |format|\n format.html { redirect_to user_surgeon_profiles_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @profile = Profile.find(params[:id])\n @profile.destroy\n\n respond_to do |format|\n format.html { redirect_to profiles_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @personal_profile.destroy\n respond_to do |format|\n format.html { redirect_to resume_path, notice: 'Personal profile was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @representative_profile.destroy\n respond_to do |format|\n format.html { redirect_to representative_profiles_url, notice: 'Representative profile was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @full_profile.destroy\n respond_to do |format|\n format.html { redirect_to full_profiles_url, notice: 'Full profile was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @attorney_profile.destroy\n respond_to do |format|\n format.html { redirect_to attorney_profiles_url, notice: 'Attorney profile was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @alumni_profile = AlumniProfile.find(params[:id])\n @alumni_profile.destroy\n\n respond_to do |format|\n format.html { redirect_to(alumni_profiles_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @captain_profile.destroy\n respond_to do |format|\n format.html { redirect_to profile_path(@captain_profile.user.profile.id), notice: 'Captain profile was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @chef_profile = ChefProfile.find(params[:id])\n @chef_profile.destroy\n\n respond_to do |format|\n format.html { redirect_to chef_profiles_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @regional_profile.destroy\n respond_to do |format|\n format.html { redirect_to regional_profiles_url, notice: 'Regional profile was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @hmm_profile = HmmProfile.find(params[:id])\n @hmm_profile.destroy\n\n respond_to do |format|\n format.html { redirect_to hmm_profiles_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @graduate_profile.destroy\n respond_to do |format|\n format.html { redirect_to graduate_profiles_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @profile_detail.destroy\n respond_to do |format|\n format.html { redirect_to profile_details_url }\n format.json { head :no_content }\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Find the basic blocks in an array of instructions.
|
def basic_blocks(insns)
# Jump and branch targets give us the start of each basic block.
targets = targets(insns)
# Create a basic block for each jump or branch target.
blocks = {}
targets.each_with_index do |start, index|
# Slice the instructions that form this block by looking for where
# the next block starts.
if index == targets.size - 1
last = insns.size
else
last = targets[index + 1]
end
block_insns = insns.slice(start, last - start)
last_insn = block_insns.last
# Create an array for the previous basic blocks, but we'll fill it
# in later.
prev_blocks = []
# Look at the last instruction and its targets to see what the next
# possible basic blocks are.
next_blocks = []
next_blocks.push last_insn[1] if [:jump, :branch].include?(last_insn.first)
next_blocks.push targets[index + 1] unless [:jump, :return].include?(last_insn.first)
blocks[start] = BasicBlock.new(start, block_insns, prev_blocks, next_blocks)
end
# No go back and use the information about the basic blocks following
# each block to store the reverse - the basic blocks previous to each
# block.
blocks.values.each do |block|
block.next.each do |n|
blocks[n].prev.push block.start
end
end
blocks
end
|
[
"def build_basic_blocks\n block_starts = find_basic_block_starts\n\n length = 0\n blocks =\n iseq\n .insns\n .grep(Instruction)\n .slice_after do |insn|\n length += insn.length\n block_starts.include?(length)\n end\n\n block_starts\n .zip(blocks)\n .to_h do |block_start, insns|\n # It's possible that we have not detected a block start but still\n # have branching instructions inside of a basic block. This can\n # happen if you have an unconditional jump which is followed by\n # instructions that are unreachable. As of Ruby 3.2, this is\n # possible with something as simple as \"1 => a\". In this case we\n # can discard all instructions that follow branching instructions.\n block_insns =\n insns.slice_after { |insn| insn.branch_targets.any? }.first\n\n [block_start, BasicBlock.new(block_start, block_insns)]\n end\n end",
"def find_basic_block_starts\n block_starts = Set.new([0])\n\n insns.each do |index, insn|\n branch_targets = insn.branch_targets\n\n if branch_targets.any?\n branch_targets.each do |branch_target|\n block_starts.add(labels[branch_target])\n end\n\n block_starts.add(index + insn.length) if insn.falls_through?\n end\n end\n\n block_starts.to_a.sort\n end",
"def find_basic_block_starts; end",
"def prune_basic_blocks(blocks); end",
"def basic_blocks\n @basic_block_collection ||= BasicBlockCollection.new(self)\n end",
"def get_blocks\n if (branches = @cur_state.branches[@tape.val])\n branches.map { |branch| branch.block } \n else\n [@cur_state.default]\n end\n end",
"def connect_basic_blocks(blocks)\n blocks.each do |block_start, block|\n insn = block.insns.last\n\n insn.branch_targets.each do |branch_target|\n block.outgoing_blocks << blocks.fetch(labels[branch_target])\n end\n\n if (insn.branch_targets.empty? && !insn.leaves?) ||\n insn.falls_through?\n fall_through_start = block_start + block.insns.sum(&:length)\n block.outgoing_blocks << blocks.fetch(fall_through_start)\n end\n\n block.outgoing_blocks.each do |outgoing_block|\n outgoing_block.incoming_blocks << block\n end\n end\n end",
"def basic_blocks\n cfg.blocks\n end",
"def by_basic_block(bb, level)\n assert(\"RelationNodeList#by_basic_block: level != :src,:dst\") { [:src,:dst].include?(level) }\n lookup(@basic_block_index[level], bb, \"#{level}-block\", false) || []\n end",
"def look_for_block(game_arr) \n # loop thru the winning index combos and see if we find\n # for each combo, see if there's 2 x's or o's in a row indexes of game_arr\n WIN_PATTERNS.each_with_index do |indexes, i| \n values = game_arr.values_at(*indexes)\n can_block = values.select { |item| item == \"x\"}.size == 2\n \n # now get the empty index they need to block\n if can_block\n # find the index that's empty in the local values list\n index1 = values.find_index { |item| item == EMPTY }\n if (index1)\n # index into the game_array where we'll set computer's mark\n index2 = indexes[index1]\n return index2\n end\n end\n end\n return nil\nend",
"def connect_basic_blocks(blocks); end",
"def prune_basic_blocks(blocks)\n visited = Set.new\n queue = [blocks.fetch(0)]\n\n until queue.empty?\n current_block = queue.shift\n next if visited.include?(current_block)\n\n visited << current_block\n queue.concat(current_block.outgoing_blocks)\n end\n\n blocks.select! { |_, block| visited.include?(block) }\n end",
"def blank_blocks(filter = writable_mifare_blocks, blks = Array.new(16, 0))\n filter.map do |x|\n [x, blks]\n end\n end",
"def blocks\n unless defined?(@blocks)\n bs = block_sizes\n qst = query.starts\n tst = target.starts\n qseqs = query.seqs\n tseqs = target.seqs\n @blocks = (0...block_count).collect do |i|\n SegmentPair.new(query.size, strand, bs[i],\n qst[i], tst[i], qseqs[i], tseqs[i])\n end\n end\n @blocks\n end",
"def each\n\t\tfor addr in (first_offset.address .. last_offset.address)\n\t\t\n\t\t\tflags = @ida.getFlags(addr)\n\t\t\t\n\t\t\tif @ida.isHead(flags) and @ida.isCode(flags)\n\t\t\t\tyield Instruction.new(@ida, addr)\n\t\t\tend\n\t\tend\n\tend",
"def extract_branches blocks\n\n branches = []\n blocks.each do |block|\n block.each do |stmt|\n case stmt\n when GotoStatement\n next if stmt.identifiers.length < 2\n unless stmt.identifiers.length == 2\n fail \"Unexpected goto statement: #{stmt}\"\n end\n\n if annotation = stmt.previous_sibling\n fail \"Expected :branchcond annotation\" unless\n annotation.has_attribute?(:branchcond)\n end\n\n branches.push(stmt)\n end\n end\n end\n return branches\n end",
"def find_blocks(node, block_name)\n return if node.nil?\n\n node.each_child_node(:block).select { |block_node| block_name == block_node.method_name }\n end",
"def calculate_blocks(s, start_i, end_i, *operators)\n # The reason you do this is because on the final pass, Rs won't me removed\n # before it does one last run, often Rs replace brackets and any number\n # multiplied by R is 0\n # If you don't remove the useless elements it all fucks up\n # IF SOMETHING IS NOT DELETED, UNCOMMENT THIS\n s.delete(\"R\")\n op_index = \"\"\n get_logger.debug(\"Calculating formula blocks with operators #{operators} in range #{start_i}..#{end_i} of #{s}\")\n (start_i..end_i).each.with_index(start_i) { |i|\n v = s[i]\n get_logger.debug(\"Checking index #{i} in array, value present #{v}\")\n # First check if operator is valid, this is done to conform to PEDMAS\n if CalcParser.includes_operator(*operators, v)\n op = CalcParser.extract_operator(v)[0]\n get_logger.debug(\"Found operator '#{op}'\")\n if CalcParser.is_scientific_formula(v)\n # Since a scientific formula can have anything in the brackets, we need\n # check inside for a formula, recursive again\n v = v.delete_prefix(CalcParser.extract_prefix(v)[0]).delete_suffix(\")\")\n v = CalcParser.parse(v)\n # Pow uses a special format\n if op == \"pow\"\n s[i] = get_result(op, v[0].to_f, v[1].to_f)\n else\n perform_pedmas(v, 0, v.length - 1)\n s[i] = get_result(op, v[0].to_f)\n end\n next\n end\n if CalcParser.is_square(v)\n number = CalcParser.extract_number(v)[0].to_f\n # There are two situations where ^ can appear, x^ or (x*x)^, we check for both\n # since the parser don't care\n if number == 0\n number = s[i - 1].to_f\n s[i - 1] = \"R\"\n end\n s[i] = get_result(op, number)\n next\n end\n # We just assume that it's standard left and right operand operations at this point, maybe safe?\n left_operand, right_operand = assign_operands(s, i)\n # Important check, makes sure that there are no double or missing operators. Needs tidied and debug messages added\n if !CalcParser.is_number(left_operand) or !CalcParser.is_number(right_operand)\n exit\n end\n # Replaces the operator found with the answer found using the left and right operator\n s[i - 1] = \"R\"\n s[i + 1] = \"R\"\n result = get_result(op, left_operand.to_f, right_operand.to_f)\n get_logger.debug(\"Replace operator '#{v}' at index '#{op_index}' with result '#{result}'\")\n s[i] = result\n break\n end\n }\nend",
"def detect_invalid_blocks\n self.class.combination(@frontier.select(&:invalid?)).detect do |block_array|\n holds_all_syntax_errors?(block_array)\n end || []\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Convert one basic block to a fragment of graph.
|
def basic_block_to_fragment(insns, ip=0, profile=nil)
# Basic blocks start with a merge node.
merge = Node.new(:merge)
# We're going to build up a list of names and stack values coming into
# this basic block that we're going to need to connect up later when
# we form a complete graph.
names_in = {}
stack_in = []
# Create an initial deoptimisation map.
deopt_map = FrameStateBuilder.new(insns, ip, names_in.dup, stack_in.dup)
last_node = nil
last_control = merge
# We're also going to build up a list of names and stack values that
# are available when this basic block is finished.
names = {}
stack = []
# When we pop a value off the stack, we may be using a value that
# wasn't created within this basic block - the value could have been
# expected to be on the stack already. This proc tries to pop a value
# off the basic block's stack, but if there isn't one, it then
# records that the basic block will need another stack value coming in
# and creates an unconnected value to represent that, called an input
# node.
pop = proc {
if stack.empty?
input = Node.new(:connector)
stack_in.unshift input
input
else
stack.pop
end
}
# Go through each instruction in the basic block, and perform a kind
# of abstract interpretation on it. This loop looks a lot like the one
# in the interpreter. The difference here is that we don't deal with
# real values, instead we deal with an abstraction of the values that
# will be there in the future. It's somewhat like computing with
# promises that are never fulfilled. It's simpler than it sounds
# because as this is a basic block the control flow is simple - it's a
# linear sequence of instructions with no branches. For each
# instruction we create a graph node that represents the computation
# on the abstract values. We build a backbone of linear control flow
# through the instructions by linking each instruction that has some
# kind of side effect to the previous instruction with a side effect.
insns.each_with_index do |insn, n|
case insn.first
when :trace
last_node = Node.new(:trace, line: insn[1])
when :self
last_node = Node.new(:self)
stack.push last_node
when :arg
last_node = Node.new(:arg, n: insn[1])
stack.push last_node
when :load
name = insn[1]
value = names[name]
unless value
# Like the pop proc, if we don't find that a name has been set
# in this basic block, we remember that we need it as an input
# and create an input node to represent its future value.
input = Node.new(:connector)
names_in[name] = input
names[name] = input
value = input
end
stack.push value
when :store
names[insn[1]] = pop.call
when :push
last_node = Node.new(:constant, value: insn[1])
stack.push last_node
when :send
name = insn[1]
argc = insn[2]
send_node = Node.new(:send, name: name, argc: argc)
argc.times do |n|
arg_node = pop.call
arg_node.output_to :value, send_node, :"arg(#{argc-n-1})"
end
receiver_node = pop.call
receiver_node.output_to :value, send_node, :receiver
if profile
send_node.props[:profile] = profile.sends[ip + n]
end
stack.push send_node
last_node = send_node
when :not
value_node = pop.call
not_node = Node.new(:not)
value_node.output_to :value, not_node
stack.push not_node
last_node = not_node
when :jump
jump_node = Node.new(:jump)
last_node = jump_node
when :branch
value_node = pop.call
branch_node = Node.new(:branch)
value_node.output_to :value, branch_node, :condition
last_node = branch_node
when :return
last_node = pop.call
stack.push last_node
else
raise 'unknown instruction'
end
# Does this instruction need a deoptimisation map becuase it could deoptimise?
if @build_deopt_maps && [:send].include?(insn.first)
# Create a deoptimisation map node?
deopt_map_node = Node.new(:deopt_map, insns: deopt_map.insns, ip: deopt_map.ip)
# Send the value of the receiver to it.
Node.new(:self).output_to :value, deopt_map_node, :receiver
# Send the value of all arguments to it.
args = insns.select { |i| i.first == :arg }.map { |i| i[1] }
if args.empty?
args_count = 0
else
args_count = args.max + 1
end
args_count.times do |n|
Node.new(:arg, n: n).output_to :value, deopt_map_node, :"arg(#{n})"
end
# Send the value of all names to it.
deopt_map.names.each_pair do |name, value|
value.output_to :value, deopt_map_node, name
end
# Send all values on the stack to it.
deopt_map.stack.each_with_index do |value, index|
value.output_to :value, deopt_map_node, :"stack(#{index})"
end
# Add the deoptimisation map to the instruction by adding a special edge.
deopt_map_node.output_to :deopt_map, last_node
end
# The trace and send instructions have side effects - link them into
# the backbone of control flow so that we know one instruction with
# side effects needs to happen before any other after it. The jump and
# branch instructions could or could not be seen as side effects. We
# treat them so because it seems to make this easier.
if [:trace, :send, :jump, :branch].include?(insn.first)
last_control.output_to :control, last_node
last_control = last_node
end
# If this instruction potentially has side-effects, create a new
# deoptimisation map for the state after the instruction.
if [:send].include?(insn.first)
deopt_map = FrameStateBuilder.new(insns, ip + n + 1, names.dup, stack.dup)
end
end
GraphFragment.new(names_in, stack_in, merge, last_node, last_control, names, stack)
end
|
[
"def fragment(ast_node, &block)\n GeneratedFragment.new(self, ast_node).tap do |frag|\n yield frag if block_given?\n end\n end",
"def visit_block(node); end",
"def linearize(graph)\n # The basic blocks.\n blocks = []\n \n # Details of the basic block that contain the finish operation which\n # won't be added to the list of basic blocks until the end.\n first_node_last_block = nil\n last_block = nil\n \n # Two maps that help us map between nodes and the names of the blocks\n # that they go into, and the merge instruction indicies and the blocks\n # they're coming from.\n first_node_to_block_index = {}\n merge_index_to_first_node = {}\n\n # Look at each node that begins a basic block.\n\n basic_block_starters = graph.all_nodes.select { |n| n.begins_block? }\n\n last_basic_block_starter = basic_block_starters.select { |s| nodes_in_block(s).any? { |n| n.op == :finish } }.first\n\n basic_block_starters.delete last_basic_block_starter\n basic_block_starters.push last_basic_block_starter\n\n basic_block_starters.each do |node|\n original_first_node = node\n first_node = first_in_block(original_first_node, nodes_in_block(original_first_node))\n\n # We're going to create an array of operations for this basic\n # block.\n\n block = []\n next_to_last_control = nil\n\n # Follow the local sequence.\n\n node = first_node\n\n begin\n # We don't want to include operations that are just there to form\n # branches or anchor points in the graph such as start and merge.\n\n unless [:start, :merge].include?(node.op)\n op = node.op\n\n # We rename finish to return to match the switch from the\n # declarative style of the graph to the imperative style\n # of the list of operations.\n op = :return if op == :finish\n\n # The instruction begins with the operation.\n insn = [op]\n\n # Then any constant values or similar.\n [:line, :n, :value].each do |p|\n insn.push node.props[p] if node.props.has_key?(p)\n end\n\n # Then any input registers.\n node.inputs.with_input_name(:value).from_nodes.each do |input_values|\n insn.push input_values.props[:register]\n end\n\n # Phi instructions need pairs of source registers with the blocks they came from.\n if node.op == :phi\n node.inputs.edges.each do |input|\n if input.input_name =~ /^value\\((.+)\\)$/\n n = $1.to_i\n insn.push n\n insn.push input.from.props[:register]\n end\n end\n # Elide phi instructions if register allocation has run correctly and values are\n # already in the correct registers.\n insn = nil if insn.drop(2).select.with_index{ |_,i| i.even? }.uniq.size == 1\n end\n\n # Send instructions and lowered equivalents need the arguments.\n if [:send, :call_managed, :int64_add, :int64_sub, :int64_imul, :int64_and, :int64_shift_left, :int64_shift_right].include?(node.op)\n insn.push node.inputs.with_input_name(:receiver).from_node.props[:register]\n\n if node.op == :send\n insn.push node.props[:name]\n elsif node.op == :call_managed\n insn.push node.inputs.with_input_name(:name).from_node.props[:register]\n end\n\n node.props[:argc].times do |n|\n arg = node.inputs.with_input_name(:\"arg(#{n})\").from_node\n\n if arg.op == :immediate\n insn.push arg.props[:value]\n else\n insn.push arg.props[:register]\n end\n end\n end\n\n # Then the target register if the instruction has one.\n insn.push node.props[:register] if insn && (node.produces_value? || node.op == :move)\n\n # If it's a branch then the target basic blocks and the test.\n if node.op == :branch\n insn.push node.inputs.with_input_name(:condition).from_node.props[:register]\n\n [:true, :false].each do |branch|\n target = node.outputs.with_output_name(branch).to_node\n raise unless target\n insn.push target\n end\n\n if node.props[:test]\n insn.push node.props[:test]\n end\n end\n\n # Send instructions and lowered managed calls need the list of live registers.\n if [:send, :call_managed].include?(node.op)\n insn.push node.props[:live_registers]\n end\n \n # Guards are like branches, but only have one side.\n if node.op == :guard\n insn.push node.inputs.with_input_name(:condition).from_node.props[:register]\n \n if node.props[:test]\n insn.push node.props[:test]\n end\n end\n\n # Kind instructions need the kind.\n if node.op == :kind_is?\n insn.push node.props[:kind]\n end\n \n # Frame states need the instructions, the ip, and to know where values are for\n # the receiver, the arguments, and the stack.\n if node.op == :deopt_map\n insn.push node.props[:insns]\n insn.push node.props[:ip]\n insn.push node.inputs.with_input_name(:receiver).from_node.props[:register]\n\n insn.push node.inputs.edges.select { |e| e.input_name.to_s.start_with?('arg(') }.map { |e|\n /arg\\((\\d+)\\)/ =~ e.input_name.to_s\n [$1.to_i, e.from.props[:register]]\n }.sort_by { |pair| pair.first }.map { |pair| pair.last }\n\n insn.push node.inputs.edges.select { |e| e.input_name.to_s.start_with?('stack(') }.map { |e|\n /stack\\((\\d+)\\)/ =~ e.input_name.to_s\n [$1.to_i, e.from.props[:register]]\n }.sort_by { |pair| pair.first }.map { |pair| pair.last }\n end\n\n # Add the instruction to the block.\n block.push insn if insn\n end\n\n next_to_last_control = node if node.has_control_output?\n\n # Follow the local schedule edge to the next node.\n node = node.outputs.with_output_name(:local_schedule).to_nodes.first\n end while node && node.op != :merge\n\n # Empty blocks cause problems elsewhere - it's easier to just have a nop\n # in them. Really, we should remove empty blocks by modifying the\n # instruction that jumps here to jump to wherever this leads to.\n\n if block.empty?\n block.push [:nop]\n end\n\n # If the last node is a merge, we need to remember which merge index this is.\n\n if node && node.op == :merge\n next_to_last_control.outputs.with_output_name(:control).edges.first.input_name =~ /^control\\((.+)\\)$/\n n = $1.to_i\n merge_index_to_first_node[n] = first_node\n end\n\n # Add a jump instruction if this block was going to just flow into the next\n # - we'll remove it later if the block followed it anyway and we can just\n # fallthrough.\n\n unless [:return, :branch].include?(block.last.first)\n begin\n block.push [:jump, next_to_last_control.outputs.with_output_name(:control).to_node]\n rescue\n block.push [:jump, :broken]\n end\n end\n\n first_node_to_block_index[original_first_node] = blocks.size\n first_node_to_block_index[first_node] = blocks.size\n blocks.push block\n end\n\n # Record the number that this basic block has and then add it to the list of basic blocks.\n\n first_node_to_block_index[first_node_last_block] = blocks.size\n\n # Go back through the basic blocks and update some references that were to things that\n # hadn't been decided yet.\n\n blocks.each do |block|\n block.each do |insn|\n insn.map! do |e|\n # If part of an instruction references a basic block, turn that into the index of\n # the basic block instead.\n\n if e.is_a?(IR::Node)\n :\"block#{first_node_to_block_index[e]}\"\n else\n e\n end\n end\n end\n end\n\n # Go back through the basic blocks and change how the branch instructions out of them\n # work.\n\n blocks.each_with_index do |block, n|\n next_block = :\"block#{n + 1}\"\n last = block.last\n\n if last == [:jump, next_block]\n # A jump that just goes to the next block can be removed and left to fall through.\n block.pop\n elsif last.first == :branch && last[3] == next_block\n # A branch where the else goes to the next block can branch only when true.\n block.pop\n block.push [:branch_if, last[1], last[2], *last.drop(4)]\n elsif last.first == :branch && last[2] == next_block\n # A branch where the if goes to the next block can branch only unless true.\n block.pop\n test = last.drop(4)\n block.push [:branch_unless, last[1], last[3], *test]\n elsif last.first == :branch\n # A branch that doesn't go to the next block at all can be a branch if true\n # and then fallthrough to a new jump instruction.\n block.pop\n block.push [:branch_if, last[1], last[2], *last.drop(4)]\n block.push [:jump, last[3]]\n end\n end\n\n blocks\n end",
"def block\n\t\tdeclaration_nodes = declarations\n\t\tcompound_statement_node = compound_statement\n\t\tBlock.new(declaration_nodes, compound_statement_node)\n\tend",
"def create_block(name = 'B' + (@block_counter += 1).to_s)\n result = BasicBlock.new(name)\n @graph.add_vertex result\n result\n end",
"def BlockNode(opening, block_var, bodystmt); end",
"def read_graph\n # Already read BEGIN_GRAPH, id, format, args, and props\n graph = Graph.new(@graph_props)\n edge_delay = []\n @reader.read_sint32.times do\n id = @reader.read_sint32\n node_class = read_pool_object\n has_predecessor = read_bool\n props = read_props\n props[:id] = id\n props[:node_class] = node_class\n props[:has_predecessor] = has_predecessor\n node = graph.create_node(id, props)\n edge_delay.push(*read_edges(node, node_class, true))\n edge_delay.push(*read_edges(node, node_class, false))\n end\n edge_delay.each do |edge|\n node = edge[:node]\n props = edge[:edge]\n inputs = edge[:inputs]\n others = edge[:ids].reject(&:nil?).map { |id| graph.nodes[id] || raise(EncodingError, \"BGV edge with unknown node #{id}\") }\n others.each_with_index do |other, index|\n # We need to give each edge their own property as they're annotated separately.\n props = props.dup\n props[:index] = index\n if inputs\n graph.create_edge other, node, props\n else\n graph.create_edge node, other, props\n end\n end\n end\n\n # Read block information.\n @reader.read_sint32.times do\n block_id = @reader.read_sint32\n block_nodes = @reader.read_sint32.times.map { @reader.read_sint32 }\n # Followers aren't used but could be.\n @reader.read_sint32.times.map { @reader.read_sint32 }\n graph.create_block block_id, block_nodes\n end\n graph\n end",
"def write_blocks(graph)\n graph.blocks.each do |block|\n @stream.puts \" subgraph cluster_block#{block.id} {\"\n @stream.puts ' fontname = \"Arial\";'\n @stream.puts \" label = \\\"B#{block.id}\\\";\"\n @stream.puts ' style=dotted;'\n\n block.nodes.each do |node|\n next if node.props[:hidden] || node.props[:inlined]\n\n @stream.puts \" node#{node.id};\"\n end\n\n @stream.puts ' }'\n end\n end",
"def basic_blocks\n @basic_block_collection ||= BasicBlockCollection.new(self)\n end",
"def connect_basic_blocks(blocks); end",
"def subgraph name = nil, &block\n Graph.new name, self, &block\n end",
"def start_block_to_dsl(block)\n return \"\" unless block\n\n block_dsls = block.each_block(false).map do |block|\n dsl_content = block.to_dsl\n\n if block.has_comment?\n dsl_content = method_call_with_possible_block(\n \"with_comment\",\n [block.comment, block.comment_pinned].compact.map(&:inspect).join(\", \"),\n dsl_content\n )\n end\n\n if block.is_shadow?\n dsl_content = method_call_with_possible_block(\"shadow\", \"\", dsl_content)\n end\n\n if block.has_position?\n dsl_content = method_call_with_possible_block(\"with_position\", [block.x, block.y].map(&:inspect).join(\", \"), dsl_content)\n end\n\n dsl_content\n end\n block_dsls.join(\"\\n\")\n end",
"def add_single_block(block)\n\n\t\tif @decoded[block.blocks[0]].nil? \n\t\t \t@decoded[block.blocks[0]] = block.data \n\t\t\t@num_valid += 1\n\t\tend\n\tend",
"def create_first_block\n\ti = 0\n\tinstance_variable_set(\"@b#{i}\", Block.first(\"Genesis\")) #Méta programmation\n\tLEDGER << @b0\n\tpp @b0\n\tadd_block\nend",
"def add_block\r\n block = params[:block].to_s.underscore\r\n if block.present?\r\n gr_graph_id = block[/\\d+/].to_i\r\n graph_search = GrGraph.find(gr_graph_id)\r\n unless graph_search.nil?\r\n @user = User.current\r\n layout = @user.pref[:graph_alloc] || {}\r\n # remove if already present in a group\r\n %w(top left right).each {|f| (layout[f] ||= []).delete block }\r\n # add it on top\r\n layout['top'].unshift block\r\n @user.pref[:graph_alloc] = layout\r\n @user.pref.save\r\n end\r\n end\r\n redirect_to personalise_index_project_gr_graphs_path\r\n end",
"def new\n\t\t@block = Block.new\n\tend",
"def fragment!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 3 )\n\n type = FRAGMENT\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 9:12: 'fragment'\n match( \"fragment\" )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 3 )\n\n end",
"def connect_basic_blocks(blocks)\n blocks.each do |block_start, block|\n insn = block.insns.last\n\n insn.branch_targets.each do |branch_target|\n block.outgoing_blocks << blocks.fetch(labels[branch_target])\n end\n\n if (insn.branch_targets.empty? && !insn.leaves?) ||\n insn.falls_through?\n fall_through_start = block_start + block.insns.sum(&:length)\n block.outgoing_blocks << blocks.fetch(fall_through_start)\n end\n\n block.outgoing_blocks.each do |outgoing_block|\n outgoing_block.incoming_blocks << block\n end\n end\n end",
"def fragment!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 3)\n\n type = FRAGMENT\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 9:12: 'fragment'\n match(\"fragment\")\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 3)\n\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Handle warning emails to all users
|
def warning_email(user, warning)
@user = user
@warning = warning
mail to: @user.email, subject: 'CastleBridge Warning Email'
end
|
[
"def sendWarning(warning)\n users = User.all\n users.each do |user|\n TextMailer.warning_email(user, warning).deliver_now\n end\n end",
"def warned\n\t\t# Pick a random user and temporary set his status to 'warned'\n\t\tuser = User.first\n\t\tuser.status = 'warned'\n\t\tUserStatusMailer.set_mail(user)\n\tend",
"def low_credit_warning_to_admin(user)\n @user = user\n @advertiser = user.advertiser\n\n set_attachments\n\n case APP_CONFIG['app_country']\n when 'AR'\n mail(to: 'sebastian@socialtarget.net', subject: \"Anunciante con crédito debajo de $1000 - Its time to go social\")\n when 'CO'\n mail(to: 'sebastian@socialtarget.net', subject: \"Anunciante con crédito debajo de $1000 - Its time to go social\")\n when 'MX'\n mail(to: 'sebastian@socialtarget.net', subject: \"Anunciante con crédito debajo de $1000 - Its time to go social\")\n end\n\n end",
"def sendResolved(warning)\n users = User.all\n users.each do |user|\n TextMailer.resolved_email(user, warning).deliver_now\n end\n end",
"def admin_warning\n default_domain = \"example.com\"\n default_password = \"admin\"\n if logged_in? and current_person.admin? \n if current_person.email =~ /@#{default_domain}$/\n flash[:notice] = %(Warning: your email address is still at \n #{default_domain}.\n <a href=\"#{edit_person_path(current_person)}\">Change it here</a>.)\n end\n if current_person.unencrypted_password == default_password\n flash[:error] = %(Warning: your password is still the default.\n <a href=\"#{edit_person_path(current_person)}\">Change it here</a>.) \n end\n end\n end",
"def notify_users\n Service.same_district_of(self.user).where(category: self.category).where.not(user: self.user).each do |service|\n Mailer::UserMailerWorker.perform_in(3.seconds, :new_mission_posted, mission_id: self.id, user_id: service.user.id)\n end\n Mailer::UserMailerWorker.perform_in(3.seconds, :people_availble_to_help, mission_id: self.id)\n end",
"def admin_warning\n default_domain = \"example.com\"\n default_password = \"admin\"\n if logged_in? and current_person.admin? \n if current_person.email =~ /@#{default_domain}$/\n flash[:notice] = t('flash.email_is_default',\n :warning => t('global.warning'),\n :default_domain => default_domain,\n :edit_person => edit_person_path(current_person))\n end\n if current_person.unencrypted_password == default_password\n flash[:error] = t('flash.password_is_default',\n :warning => t('global.warning'),\n :edit_person => edit_person_path(current_person)) \n end\n end\n end",
"def warn_old\n upcoming_expired_requests.find_each do |bulk_update_request|\n if bulk_update_request.forum_topic\n body = \"This bulk update request is pending automatic rejection in #{WARNING_PERIOD.inspect}.\"\n unless bulk_update_request.forum_topic.forum_posts.where(creator_id: User.system.id, body: body).exists?\n bulk_update_request.forum_updater.update(body)\n end\n end\n end\n end",
"def users_to_notify\n super\n end",
"def global_warning\n # NOTE (!) if you set this value and don't see it change in 10 minutes, CHECK YOUR SLAVE LAG. It reads from slaves.\n warning = EolConfig.global_site_warning\n flash.now[:error] = warning if warning\n end",
"def send_warning_email(student, items)\n @student = student\n @items = items\n mail( :to => @student.email, :subject => 'Items Due Soon' )\n end",
"def service_provider_assignment_unassignment_mail(lawyer_user_id,action)\n url = url_link\n sp = User.find(ServiceProvider.find(params[:secretary_id]).user_id)\n user = User.find(lawyer_user_id)\n recipient = []\n recipient << user.email\n recipient << sp.email\n recipient << get_lawfirm_admin_email(user.id)\n cc = current_user.email \n subject = (action.to_s == \"unassigned\") ? \"Service Provider unassignment to #{user.full_name}\" : \"Service Provider assignment to #{user.full_name}\"\n email = {}\n (action.to_s == \"unassigned\") ? (email[:message] = \"Service Provider named #{sp.full_name} has been unassigned to #{user.full_name}\") : (email[:message] = \"Service Provider named \" + sp.full_name + \" has been assigned to \" + user.full_name)\n @liviaMailer = LiviaMailer \n end",
"def notify_users\n Mission.same_district_of(self.user).where(category: self.category).where.not(user: self.user).each do |service|\n Mailer::UserMailerWorker.perform_in(3.seconds, :new_service_posted, service_id: self.id, user_id: service.user.id)\n end\n Mailer::UserMailerWorker.perform_in(3.seconds, :people_who_need_help, service_id: self.id)\n end",
"def idv_mail_only_warning_visited(**extra)\n track_event(\n 'IdV: Mail only warning visited',\n **extra,\n )\n end",
"def disable_all_notifications\r\n @user = current_user\r\n\r\n return if show_session_user_on_get\r\n\r\n if @params['disable'] == \"on\"\r\n @user.set_att(UserAttribute::ATT_REMIND_BY_EMAIL, 0)\r\n end\r\n end",
"def final_review_warning(design)\n\n final_review = design.design_reviews.detect { |dr| dr.review_type.name == \"Final\" }\n\n to_list = final_review.design_review_results.collect { |rr|\n User.find(rr.reviewer_id).email }.uniq\n cc_list = (MailerMethods.copy_to(final_review) + \n MailerMethods.copy_to_on_milestone(final_review.design.board) -\n to_list).uniq\n subject = MailerMethods.subject_prefix(design) + 'Notification of upcoming Final Review'\n\n \n mail( :to => to_list,\n :subject => subject,\n :cc => cc_list\n )\n\n end",
"def sanity_check(missing_files)\n @missing_files = missing_files\n @admins = User.where(admin: true).map(&:email)\n\n mail(to: @admins, subject: \"[Single Cell Portal Admin Notification #{Rails.env != 'production' ? \" (#{Rails.env})\" : nil}]: Sanity check results: #{@missing_files.size} files missing\") do |format|\n format.html\n end\n end",
"def skip_email_changed_notification!; end",
"def emails_to_notify_on_failure\n if billing_log_report.papers_to_process.any?\n Journal.staff_admins_for_papers(\n billing_log_report.papers_to_process\n ).map(&:email).uniq\n else\n Journal.staff_admins_across_all_journals\n .map(&:email)\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Reindex specific user only , instead of whole class.
|
def reindex_user!
Sunspot.index! self
end
|
[
"def reindex\n return unless search_data.keys.detect { |key| send(\"#{key}_changed?\") }\n Rails.logger.debug \"User #{id} is gonna be re-indexed\"\n super\n end",
"def reindex( _uid, _text )\n # TODO\n end",
"def refresh_user_search_index()\n start.uri('/api/user/search')\n .put()\n .go()\n end",
"def reindex!\n indexed if generate_solr_index\n end",
"def add_user_to_index(userid)\n name = FBHot.redis.get \"#{userid}:name\"\n name = _convert_vietnamese_to_english name\n if name == nil then return end\n name.strip!\n name.split.each do |token|\n if token.length > 0 then\n # if a token is 'kien' => we make index for \n # ['ki', 'kie', 'kien', 'ie', 'ien', 'en']\n (0..token.length - 3).each do |i|\n ((i + 1)..token.length - 1).each do |j|\n FBHot.redis.zadd(\"#{@index_name}:#{token[i..j]}\", 0, userid)\n end\n end\n end\n end\n end",
"def reindex\n @indexes = {}\n add_to_indexes( @records )\n end",
"def index\n\t\t@users = User.where(user: @user).all\n\tend",
"def reindex!\n doc = {:id => neo_node_id }\n self.class.index_updaters.each_value do |updater|\n updater.call(self, doc)\n end\n lucene_index << doc\n end",
"def index_users_repositories(user)\n queue_index \"#{user}/*\"\n end",
"def fetch_user\n # Check if user is already in the index\n##if @elastic.search(\"custom_url:#{@name}\", :type => 'user').hits.empty?\n id = SteamId.new(@name)\n flist = ''\n id.friends.each do |f| \n flist << f.steam_id64.to_s + \",\" \n end\n glist = ''\n id.games.each do |g,k|\n glist << k.app_id.to_s + \",\"\n end\n \n @elastic.index({\n :custom_url => id.custom_url,\n :nickname => id.nickname,\n :real_name => id.real_name,\n :friends => flist,\n :games => glist,\n :fetch_time => id.fetch_time\n }, :id => id.steam_id64, :type => 'user')\n## end\n end",
"def reindex_class(klass, options={})\n opts = {:verbose => false}.merge(options)\n # First, delete all docs of this class\n XapianDb.database.delete_docs_of_class(klass)\n blueprint = XapianDb::DocumentBlueprint.blueprint_for(klass)\n indexer = XapianDb::Indexer.new(XapianDb.database, blueprint)\n show_progressbar = false\n obj_count = klass.count\n if opts[:verbose]\n if defined?(ProgressBar)\n show_progressbar = true\n end\n puts \"reindexing #{obj_count} objects of #{klass}...\"\n pbar = ProgressBar.new(\"Status\", obj_count) if show_progressbar\n end\n\n # Process the objects in batches to reduce the memory footprint\n nr_of_batches = (obj_count / 1000) + 1\n nr_of_batches.times do |batch|\n klass.all(:offset => batch * 1000, :limit => 1000) .each do |obj|\n if blueprint.should_index? obj\n doc = indexer.build_document_for(obj)\n XapianDb.database.store_doc(doc)\n else\n XapianDb.database.delete_doc_with_unique_term(obj.xapian_id)\n end\n pbar.inc if show_progressbar\n end\n end\n XapianDb.database.commit\n true\n end",
"def reindex\n Transaction.current.reindex(self)\n end",
"def user_search\n @users = User.admin_search(params[:query])\n end",
"def reindex\n # We would need to set this explicitly to re-index\n if BoolENV[\"REINDEX_SOLR\"]\n Sunspot.session = Sunspot::SessionProxy::Retry5xxSessionProxy.new(Sunspot.session)\n reindex_options = { :batch_commit => false }\n Dir.glob(Rails.root.join('app/models/**/*.rb')).each { |path| require path }\n sunspot_models = Sunspot.searchable\n sunspot_models.each do |model|\n model.solr_reindex(reindex_options)\n end\n end\n end",
"def index\n @userxes = Userx.all\n end",
"def search_users\n unless @current_admin.is_super_admin\n unless @current_admin.privilages.include? '1'\n flash[:authority_error]=\"You are not authorized to navigate to this page \"\n redirect_to admin_index_path\n empty_user_id\n return\n end\n end\n empty_user_id\n @check=0\n @searched_user=User.new\n end",
"def update_index\n self.class.index_update(self)\n end",
"def index_all\n # @users = User.all\n @user_contracts = UserContract.order('created_at DESC')\n @users = User.all.includes(:user_contract)\n end",
"def reindex!(index: nil, dataset: nil, batch_size: 100)\n index_name = index || timestamped_index\n import!(index: index_name, dataset: dataset, batch_size: batch_size)\n\n # Create an alias to the newly created index\n alias_index(index_name)\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Instantiates a new securityContainerRegistryEvidence and sets the default values.
|
def initialize()
super
@odata_type = "#microsoft.graph.security.containerRegistryEvidence"
end
|
[
"def initialize()\n super\n @odata_type = \"#microsoft.graph.security.registryValueEvidence\"\n end",
"def initialize()\n super\n @odata_type = \"#microsoft.graph.security.registryKeyEvidence\"\n end",
"def initialize()\n super\n @odata_type = \"#microsoft.graph.security.containerEvidence\"\n end",
"def initialize()\n super\n @odata_type = \"#microsoft.graph.security.containerImageEvidence\"\n end",
"def initialize()\n super\n @odata_type = \"#microsoft.graph.security.googleCloudResourceEvidence\"\n end",
"def initialize()\n super\n @odata_type = \"#microsoft.graph.security.kubernetesServiceEvidence\"\n end",
"def initialize()\n super\n @odata_type = \"#microsoft.graph.security.blobContainerEvidence\"\n end",
"def initialize()\n super\n @odata_type = \"#microsoft.graph.security.kubernetesSecretEvidence\"\n end",
"def initialize()\n super\n @odata_type = \"#microsoft.graph.security.kubernetesServiceAccountEvidence\"\n end",
"def initialize()\n super\n @odata_type = \"#microsoft.graph.security.cloudApplicationEvidence\"\n end",
"def initialize()\n super\n @odata_type = \"#microsoft.graph.security.amazonResourceEvidence\"\n end",
"def initialize()\n super\n @odata_type = \"#microsoft.graph.security.azureResourceEvidence\"\n end",
"def initialize()\n super\n @odata_type = \"#microsoft.graph.security.kubernetesClusterEvidence\"\n end",
"def initialize()\n super\n @odata_type = \"#microsoft.graph.security.ipEvidence\"\n end",
"def initialize()\n super\n @odata_type = \"#microsoft.graph.security.deviceEvidence\"\n end",
"def initialize()\n super\n @odata_type = \"#microsoft.graph.security.kubernetesPodEvidence\"\n end",
"def initialize()\n super\n @odata_type = \"#microsoft.graph.security.kubernetesControllerEvidence\"\n end",
"def initialize()\n super\n @odata_type = \"#microsoft.graph.security.processEvidence\"\n end",
"def initialize()\n super\n @odata_type = \"#microsoft.graph.security.blobEvidence\"\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Sets the registry property value. The registry URI.
|
def registry=(value)
@registry = value
end
|
[
"def registry_value=(value)\n @registry_value = value\n end",
"def registry_key=(value)\n @registry_key = value\n end",
"def registry_path=(path=nil)\n if(path)\n custom_paths[:registry_directory] = path\n end\n custom_paths[:registry_directory]\n end",
"def location=(value)\n registry_data['location'] = value\n end",
"def registry_path=(path = nil)\n if path\n custom_paths[:registry_directory] = path\n end\n custom_paths[:registry_directory]\n end",
"def uri=(value)\n @uri = value\n end",
"def registry_hive=(value)\n @registry_hive = value\n end",
"def registry_value_name=(value)\n @registry_value_name = value\n end",
"def set_registry_key(name, value, type_name = 'REG_SZ')\n require 'win32/registry'\n case type_name\n when 'REG_SZ'\n type = Win32::Registry::REG_SZ\n when 'REG_EXPAND_SZ'\n type = Win32::Registry::REG_EXPAND_SZ\n when 'REG_BINARY'\n type = Win32::Registry::REG_BINARY\n when 'REG_DWORD'\n type = Win32::Registry::REG_DWORD\n else\n raise \"Cannot parse type '#{type}'.\"\n end\n hkey, subkey_name = self.parse_registry_key_name(name)\n hkey.open(subkey_name, Win32::Registry::KEY_WRITE) do |node|\n node.write('', type, value)\n end\n end",
"def resource=(rsrc)\n self.uri_parts['Resource'] = rsrc\n end",
"def registry_value_type=(value)\n @registry_value_type = value\n end",
"def uri=(value)\n host, port = value.split(':')\n self.host = host\n self.port = port\n end",
"def location_uri=(value)\n @location_uri = value\n end",
"def registry=(_arg)\n Datadog.logger.warn('Setting a custom registry is no longer supported and was ignored. ' \\\n 'Remove this assignment from your configuration to stop seeing this warning.')\n end",
"def launch_uri=(value)\n @launch_uri = value\n end",
"def set_uri(base, path)\n @uri = \"#{base}/#{path}/#{self.identifier}\"\n end",
"def discovery_uri=(new_discovery_uri)\n @options[:discovery_uri] = Addressable::URI.parse(new_discovery_uri)\n end",
"def log_uri=(val)\r\n @@log_uri = (val.is_a? URI) ? val : URI.parse(val)\r\n end",
"def []=(path,value)\n set(path, value)\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
DELETE /qs/1 DELETE /qs/1.json
|
def destroy
@q.destroy
respond_to do |format|
format.html { redirect_to qs_url }
format.json { head :no_content }
end
end
|
[
"def destroy\n @q = Q.find_by_unique_id(params[:id])\n @q.destroy\n\n respond_to do |format|\n format.html { redirect_to qs_url }\n format.json { head :no_content }\n end\n end",
"def delete\n if body.empty? && params[:id]\n client.delete(params)\n elsif body.empty?\n client.delete_by_query(params.merge(body: body.merge(ALL)))\n else\n client.delete_by_query(params.merge(body: body))\n end\n end",
"def delete_by_query query\n delete :query => query\n end",
"def destroy\n @rnaseq = Rnaseq.find(params[:id])\n @rnaseq.destroy\n\n respond_to do |format|\n format.html { redirect_to rnaseqs_url }\n format.json { head :no_content }\n end\n end",
"def delete\n RestClient.delete \"#{@uri}/api/requests/request/#{@data['requestId']||@data['id']}\"\n puts ' Deleted request: '.red + \"#{@data['requestId']||@data['id']}\".light_blue\n end",
"def destroy\n authorize! :delete_atq, @atq\n Comunicate.where(atq_id: @atq.id).delete_all\n @atq.destroy\n respond_to do |format|\n format.json { head :no_content }\n end\n end",
"def delete!(*rest) end",
"def delete endpoint\n do_request :delete, endpoint\n end",
"def delete_request\n client.create_request('DELETE', url_path)\n end",
"def delete(path, params={}); make_request(:delete, host, port, path, params); end",
"def delete\n request_method('DELETE')\n end",
"def cmd_delete argv\n setup argv\n uuid = @hash['uuid']\n response = @api.delete(uuid)\n msg response\n return response\n end",
"def destroy\n @json.destroy\n\n head :no_content\n end",
"def destroy\n @qa = Qa.find(params[:id])\n @qa.destroy\n\n respond_to do |format|\n format.html { redirect_to qas_url }\n format.json { head :no_content }\n end\n end",
"def delete_by_query index, query, conflicts_proceed\n conflicts = conflicts_proceed ? 'conflicts=proceed' : ''\n uri = URI(\"http://#{@host}:#{@port_s}/#{index}/_doc/_delete_by_query?#{conflicts}\")\n\n req = Net::HTTP::Post.new(uri)\n req.body = query.to_json\n run(uri, req)\n end",
"def destroy\n @query.destroy\n respond_to do |format|\n format.html { redirect_to queries_url }\n format.json { head :no_content }\n end\n end",
"def delete_by_query(queries)\n { delete: { query: queries } }.to_json\n end",
"def destroy\n @req = Req.find(params[:id])\n @req.destroy\n\n respond_to do |format|\n format.html { redirect_to reqs_url }\n format.json { head :no_content }\n end\n end",
"def delete(url, resource_name, options = {})\n build_response(resource_name) do\n connection.delete do |req|\n req.url url\n req.body = options.to_json\n end\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
even if list order is incorrect, parser shouldn't failed and it is recognized as toplevel.
|
def test_nested_ordered_list_incorrect
assert_nothing_raised do
parser = Juli::Parser.new
parser.parse(data_path('t005.txt'), Juli::Visitor::Tree.new)
end
end
|
[
"def parse_first_list_line(indentation, content); end",
"def parse_list; end",
"def validate_top_level!\n invalid = @root.nil? && @tokens.any? do |t|\n !t.route?\n end\n fail UnexpectedTypeError, 'Found non-route at toplevel' if invalid\n end",
"def parse_args\n nolfws\n if literal(LP)\n ws; args = parse_arglist; ws\n literal(RP) or expected(\"')'\")\n return args\n end\n return parse_arglist\n end",
"def do_listed_element token\n if token.tag == 'li'\n if @open_elements.length >= 1\n if @open_elements[-1].tag == \"ol\" || @open_elements[-1].tag == \"ul\"\n do_start_token token\n elsif @open_elements[-1].tag == \"li\"\n do_end_token @open_elements[-1]\n do_start_token token\n else\n do_listed_element MonkeyToken.new '<ul>', :tag\n do_start_token token\n end\n else\n do_table_token MonkeyToken.new '<ul>', :tag\n do_start_token token\n end\n\n elsif token.tag == 'ol' || token.tag == 'ul'\n do_start_token token\n end\n\n end",
"def parse_list(tokens, level = 0)\n # puts ' ' * level + tokens.inspect\n raise IncompleteError if tokens.empty?\n element = tokens.shift\n\n if element == :'('\n car = parse_list(tokens, level + 2) || cons\n cdr = parse_list(tokens, level + 2)\n \n cons(car, cdr)\n elsif element == :')'\n return\n else\n cons(element, parse_list(tokens, level + 2))\n end\n end",
"def parse_remaining_files\n if globals.ordered_parser\n globals.ordered_parser.parse\n log.debug(\"Re-processing #{@file}...\")\n end\n end",
"def parse_top_level_statements container\n comment = collect_first_comment\n\n look_for_directives_in container, comment\n\n throw :eof if container.done_documenting\n\n @markup = comment.format\n\n # HACK move if to RDoc::Context#comment=\n container.comment = comment if container.document_self unless comment.empty?\n\n parse_statements container, NORMAL, nil, comment\n end",
"def parse_list_cmd(args, listsize, center_correction)\n \n frame = @proc.frame\n \n container = @proc.frame_container(frame, false)\n\n # FIXME: put into a helper routine\n # See also duplicate code in print_location\n if container[0] != 'file'\n try_container = container\n while try_container[0] != 'file' && frame.prev do\n frame = frame.prev\n try_container = @proc.frame_container(frame, false)\n end\n container = try_container if try_container[0] == 'file'\n end\n\n filename = container[1]\n \n last = nil\n if args.empty? and not frame\n errmsg(\"No Ruby program loaded.\")\n return nil, nil, nil\n end\n \n if args.size > 0\n if args[0] == '-'\n return no_frame_msg unless @proc.line_no\n first = [1, @proc.line_no - 2*listsize - 1].max\n elsif args[0] == '.'\n return no_frame_msg unless @proc.line_no\n if args.size == 2\n opts = {\n :msg_on_error => \n \"#{NAME} command last or count parameter expected, \" +\n 'got: %s.' % args[2]\n }\n second = @proc.get_an_int(args[1], opts)\n return nil, nil, nil unless second\n first = @proc.frame_line \n last = adjust_last(first, second)\n else\n first = [1, @proc.frame_line - center_correction].max\n end\n\n else\n modfunc, container, first = @proc.parse_position(args[0])\n if first == nil and modfunc == nil\n # error should have been shown previously\n return nil, nil, nil\n end\n if args.size == 1\n first = 1 if !first and modfunc\n first = [1, first - center_correction].max\n elsif args.size == 2 or (args.size == 3 and modfunc)\n opts = {\n :msg_on_error => \n \"#{NAME} command starting line expected, got %s.\" % args[-1]\n }\n last = @proc.get_an_int(args[1], opts)\n return nil, nil, nil unless last\n if modfunc\n if first\n first = last\n if args.size == 3 and modfunc\n opts[:msg_on_error] = \n (\"#{NAME} command last or count parameter expected, \" +\n 'got: %s.' % args[2])\n last = @proc.get_an_int(args[2], opts)\n return nil, nil, nil unless last\n end\n end\n end\n last = adjust_last(first, last)\n elsif not modfunc\n errmsg('At most 2 parameters allowed when no module' +\n ' name is found/given. Saw: %d parameters' % args.size)\n return nil, nil, nil\n else\n errmsg(('At most 3 parameters allowed when a module' +\n ' name is given. Saw: %d parameters') % args.size)\n return nil, nil, nil\n end\n end\n elsif !@proc.line_no and @proc.frame\n first = [1, @proc.frame_line - center_correction].max\n else\n first = [1, @proc.line_no - center_correction].max \n end\n last = first + listsize - 1 unless last\n \n LineCache::cache(container[1]) unless \n 'file' != container[0] || LineCache::cached?(container[1])\n return container, first, last\n end",
"def parse\n s_expr_list\n end",
"def validate_parse_tree(docx_parser, errors, warnings)\n docx_parser.parse\n end",
"def parse_list_cmd(args, listsize, center_correction)\n \n last = nil\n \n if args.size > 0\n if args[0] == '-'\n return no_frame_msg unless @proc.line_no\n first = [1, @proc.line_no - 2*listsize - 1].max\n file = @proc.frame.file\n elsif args[0] == '.'\n return no_frame_msg unless @proc.line_no\n if args.size == 2\n opts = {\n :msg_on_error => \n \"#{NAME} command last or count parameter expected, \" +\n 'got: %s.' % args[2]\n }\n second = @proc.get_an_int(args[1], opts)\n return nil, nil, nil unless second\n first = @proc.frame.line \n last = adjust_last(first, second)\n else\n first = [1, @proc.frame.line - center_correction].max\n end\n\n file = @proc.frame.file\n else\n modfunc, file, first = @proc.parse_position(args[0])\n if first == nil and modfunc == nil\n # error should have been shown previously\n return nil, nil, nil\n end\n if args.size == 1\n first = 1 if !first and modfunc\n first = [1, first - center_correction].max\n elsif args.size == 2 or (args.size == 3 and modfunc)\n opts = {\n :msg_on_error => \n \"#{NAME} command starting line expected, got %s.\" % args[-1]\n }\n last = @proc.get_an_int(args[1], opts)\n return nil, nil, nil unless last\n if modfunc\n if first\n first = last\n if args.size == 3 and modfunc\n opts[:msg_on_error] = \n (\"#{NAME} command last or count parameter expected, \" +\n 'got: %s.' % args[2])\n last = @proc.get_an_int(args[2], opts)\n return nil, nil, nil unless last\n end\n end\n end\n last = adjust_last(first, last)\n elsif not modfunc\n errmsg('At most 2 parameters allowed when no module' +\n ' name is found/given. Saw: %d parameters' % args.size)\n return nil, nil, nil\n else\n errmsg(('At most 3 parameters allowed when a module' +\n ' name is given. Saw: %d parameters') % args.size)\n return nil, nil, nil\n end\n end\n elsif !@proc.line_no and @proc.frame\n first = [1, @proc.frame.line - center_correction].max\n file = @proc.frame.file\n else\n first = [1, @proc.line_no - center_correction].max \n file = @proc.frame.file\n end\n last = first + listsize - 1 unless last\n\n if @proc.frame.eval?\n script = @proc.frame.vm_location.static_scope.script \n LineCache::cache(script)\n else\n LineCache::cache(file)\n script = nil\n end\n return file, script, first, last\n end",
"def parse parent, indent = 0\n p :parse_start => indent if @debug\n\n until @tokens.empty? do\n type, data, column, = get\n\n case type\n when :BREAK then\n parent << RDoc::Markup::BlankLine.new\n skip :NEWLINE, false\n next\n when :NEWLINE then\n # trailing newlines are skipped below, so this is a blank line\n parent << RDoc::Markup::BlankLine.new\n skip :NEWLINE, false\n next\n end\n\n # indentation change: break or verbatim\n if column < indent then\n unget\n break\n elsif column > indent then\n unget\n parent << build_verbatim(indent)\n next\n end\n\n # indentation is the same\n case type\n when :HEADER then\n parent << build_heading(data)\n when :RULE then\n parent << RDoc::Markup::Rule.new(data)\n skip :NEWLINE\n when :TEXT then\n unget\n parse_text parent, indent\n when :BLOCKQUOTE then\n type, _, column = get\n if type == :NEWLINE\n type, _, column = get\n end\n unget if type\n bq = RDoc::Markup::BlockQuote.new\n p :blockquote_start => [data, column] if @debug\n parse bq, column\n p :blockquote_end => indent if @debug\n parent << bq\n when *LIST_TOKENS then\n unget\n parent << build_list(indent)\n else\n type, data, column, line = @current_token\n raise ParseError, \"Unhandled token #{type} (#{data.inspect}) at #{line}:#{column}\"\n end\n end\n\n p :parse_end => indent if @debug\n\n parent\n\n end",
"def test_incomplete_semantic_hoisted_context\n equeue = ErrorQueue.new\n ErrorManager.set_error_listener(equeue)\n g = Grammar.new(\"parser grammar t;\\n\" + \"a : b | B;\\n\" + \"b : {p1}? B | B ;\")\n expecting = \".s0-B->:s1=>1\\n\"\n check_decision(g, 1, expecting, Array.typed(::Java::Int).new([2]), Array.typed(::Java::Int).new([1, 2]), \"B\", Array.typed(::Java::Int).new([1]), nil, 3, false)\n end",
"def process_parser!\n # load_all_parser_constraints\n\n load_parser_default_constraints\n load_parser_primary_constraints\n # load_parser_local_constraints\n parser.consume_all(constraints)\n\n # hack\n parser.consume { \n opt :version, \"Print version and exit\" if @version unless @specs[:version] || @long[\"version\"]\n opt :help, \"Show this message\" unless @specs[:help] || @long[\"help\"]\n resolve_default_short_options\n } # hack\n end",
"def test_incomplete_semantic_hoisted_context2\n equeue = ErrorQueue.new\n ErrorManager.set_error_listener(equeue)\n g = Grammar.new(\"parser grammar t;\\n\" + \"a : b | B;\\n\" + \"b : {p1}? B | B D ;\")\n expecting = \".s0-B->:s1=>1\\n\"\n check_decision(g, 1, expecting, Array.typed(::Java::Int).new([2]), Array.typed(::Java::Int).new([1, 2]), \"B\", Array.typed(::Java::Int).new([1]), nil, 3, false)\n end",
"def parse_root_node\n\n tok = l.front\n case tok.type\n when :cmt then parse_comment\n when :idt then parse_directive_or_instr_or_label_decl_or_const\n when :eof then parse_eof\n else\n error \"unexpected token: #{tok}\"\n end\n end",
"def parse_list(text)\n text.split(self.class.list_split).collect do |str|\n next if str.blank?\n \n str.gsub!(self.class.list_clean, \"\")\n item, str = $1, $2 if str =~ self.class.nested_block\n \n struct = parse_blocks(str, :p => !!(str =~ /\\n\\n/)) unless str.blank?\n struct.unshift(parse_inlines(item)) if item\n \n [ :li, struct.size > 1 ? struct : struct.first ]\n end.compact\n end",
"def order( items )\n self.parser.order(items) \n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
ajax answer the next page for paginated Prescriber search
|
def nextPrescribers
@searchPrescribers = Prescriber.nextPrescribers params[:start], params[:page], 9
render template: 'common/search/js/nextSearchPrescribers.js'
end
|
[
"def nextPrescribers\n @searchPrescribers = Prescriber.nextActivePrescribers params[:start], params[:page], 9\n @searchController = \"prescriptions\"\n render template: 'common/search/js/nextSearchPrescribers.js'\n end",
"def nextPrescriptions\n nextPage = params[:page] ? params[:page].to_i : 1\n @searchPrescriptions = Customer.find( params[:startId].to_i ).prescriptions.page(nextPage).per(9)\n render template: 'common/search/js/nextSearchPrescriptons.js'\n end",
"def next_page!\n self.page += 1\n\n client.make_request(params: { keyword: query, page: page })\n end",
"def next_page\n @api.search(@result_type, @params.merge(page: @metadata.page + 1)) if @metadata.page < @metadata.pages\n end",
"def load_next_page\n @browser.pluggable_parser.html = SearchPaginationParser\n\n @pagination.next\n previous_length = @results.size\n\n page = @browser.get(ajax_url)\n\n @results += page.search('.match_row').collect do |node|\n OKCupid::Profile.from_search_result(node)\n end\n\n @browser.pluggable_parser.html = Mechanize::Page\n\n previous_length != @results.size\n end",
"def next_page\n api.send(self.method, options.merge(:page => self.page.to_i + 1)) if self.page.to_i < self.pages.to_i\n end",
"def next_page\n return false if final_page?\n next_button = @search_results_page.links_with(text: 'next')[0]\n @search_results_page = next_button.click\n end",
"def next\n perform_request(next_page_uri) if next?\n end",
"def next_page\n if @page < @page_count\n @client.find(@query, :page_size => @page_size,\n :page_index => @page + 1)\n end\n end",
"def next_page(params = {})\n @items = load_page(params)\n end",
"def next\n if @options[\"page\"] && !cbp_request?\n clear_cache\n @options[\"page\"] = @options[\"page\"].to_i + 1\n elsif (@query = @next_page)\n # Send _only_ url param \"?page[after]=token\" to get the next page\n @options.page&.delete(\"before\")\n fetch(true)\n else\n clear_cache\n @resources = []\n end\n end",
"def next_page\n anchors = get_url.css(\"a.paginatorActive\")\n if anchors.last.children[0].text == \"volgende\"\n @url = anchors.last.attributes[\"href\"].value\n else\n @url = \"\"\n end\n end",
"def nextItems\n @searchItems = Item.nextItems params[:start], params[:page], 9\n render template: 'common/search/js/nextSearchItems.js'\n end",
"def nextCustomers\n @searchCustomers = Customer.nextActiveCustomers params[:start], params[:page], 9\n @searchController = \"prescriptions\"\n render template: 'common/search/js/nextSearchCustomers.js'\n end",
"def scrape_and_look_for_next_link(page)\n scrape_speech_list(page.body)\n link = page.link_with(:text => 'Next >')\n if link\n page.form_with(:name => 'form1') do |f|\n f['__EVENTTARGET'] = 'ctl01$listing$ctl00$pager$nextButton'\n f['__EVENTARGUMENT'] = ''\n page = f.submit()\n end\n scrape_and_look_for_next_link(page)\n end\nend",
"def next_page(response)\n max_pages = response[\"numberOfPages\"]\n original_query = response.request.options[:query]\n current_page = original_query[:page]\n next_page = current_page.to_i + 1\n raise \"Already at Last Page\" if current_page >= max_pages\n\n query = original_query.merge({page: next_page})\n SchoolDigger::Api.get( response.request.path, query: query, timeout: 30)\n end",
"def go_next!\n check_page_availability!(:next)\n\n response = UserEngage.client.get(self.next)\n update_page_related_attributes!(response)\n\n @attributes[:current_page] += 1\n\n self\n end",
"def next_page\n self.page + 1\n end",
"def next_page_link\n paging['next'] if paging\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
DELETE /prescribers/1 DELETE /prescribers/1.json
|
def destroy
@prescriber.destroy
respond_to do |format|
format.html { redirect_to prescribers_url, notice: 'Prescriber was successfully destroyed.' }
format.json { head :no_content }
end
end
|
[
"def destroy\n\n @subscriber = Subscriber.find(params[:id])\n @subscriber.destroy\n\n respond_to do |format|\n format.html { redirect_to subscribers_url }\n format.json { head :ok }\n end\n end",
"def destroy\n # @prescriber.destroy\n # respond_to do |format|\n # format.html { redirect_to prescribers_url, notice: 'Prescriber was successfully destroyed.' }\n # format.json { head :no_content }\n # end\n end",
"def destroy\n @subscriber = Subscriber.find(params[:id])\n @subscriber.destroy\n\n respond_to do |format|\n format.html { redirect_to subscribers_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @subscriber = Subscriber.find(params[:id])\n @subscriber.destroy\n\n respond_to do |format|\n format.html { redirect_to admin_subscribers_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @subscriber = Subscriber.find(params[:id])\n @subscriber.destroy\n\n respond_to do |format|\n format.html { redirect_to(subscribers_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @subscriber = current_user.subscribers.find(params[:id])\n @subscriber.destroy\n\n respond_to do |format|\n format.html { redirect_to(subscribers_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @launchsubscriber = Launchsubscriber.find(params[:id])\n @launchsubscriber.destroy\n\n respond_to do |format|\n format.html { redirect_to launchsubscribers_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @newsletter.destroy\n respond_to do |format|\n format.json { head :no_content }\n end\n end",
"def destroy\n @subscriber.destroy\n respond_to do |format|\n format.html { redirect_to subscriber_group_path(@subscriber_group), notice: 'Subscriber was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @subscribe.destroy\n\n respond_to do |format|\n format.html { redirect_to subscribes_url }\n format.json { head :no_content }\n end\n end",
"def delete_subscriber(id_or_email)\n make_json_api_request :delete, \"v2/#{account_id}/subscribers/#{CGI.escape id_or_email}\"\n end",
"def destroy\n @admin_subscribe.destroy\n respond_to do |format|\n format.html { redirect_to admin_subscribes_url, notice: 'Subscribe was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @newsletter_subscriber.destroy\n respond_to do |format|\n format.html { redirect_to newsletter_subscribers_url, notice: 'Newsletter subscriber was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @visit = Visit.find(params[:visit_id])\n @prescription = @visit.prescriptions.find(params[:id])\n @prescription.destroy\n\n respond_to do |format|\n format.html { redirect_to visit_prescriptions_url() }\n format.json { head :ok }\n end\n end",
"def destroy\n @subscribe.destroys\n respond_to do |format|\n format.html { redirect_to subscribes_url, notice: 'Subscribe was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @emails_of_client = EmailsOfClient.find(params[:id])\n @emails_of_client.destroy\n\n respond_to do |format|\n format.html { redirect_to emails_of_clients_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @prescription = Prescription.find(params[:id])\n @prescription.destroy\n\n respond_to do |format|\n format.json { head :no_content }\n end\n end",
"def destroy\n @subscribtion = Subscribtion.find(params[:id])\n @subscribtion.destroy\n\n respond_to do |format|\n format.html { redirect_to subscribtions_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @subs_email.destroy\n respond_to do |format|\n format.html { redirect_to subs_emails_url }\n format.json { head :no_content }\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Return true if one of the sample attachments is audio
|
def contains_audio?
result = false
sample_attachments.map{|sa| result = true if sa.is_audio?}
result
end
|
[
"def audio?\n !!(self.file_content_type =~ %r{^(audio)/.*$})\n end",
"def audio?\n media_type == :audio\n end",
"def audio?\n content_type.start_with?(\"audio\")\n end",
"def is_audio_type?\n file_content_type =~ /\\Aaudio\\/.*\\Z/\n end",
"def audio?\n has_stream_of :audio\n end",
"def is_audio?\n result = false\n if !mime_type.blank?\n splits = mime_type.split('/')\n if splits[0] == AUDIO\n result = true\n end\n end\n \n result\nend",
"def audio?\n @streams.any? { |stream| stream.is_a? AudioStream }\n end",
"def audio?\n self.sti_type == AUDIO_TYPE\n end",
"def audio?\n #or @volume == 0\n if @mute or @type == \"image\"\n #puts \"no audio\"\n return false\n else\n #puts \"has audio\"\n return true\n end\n end",
"def audio?\n !!audio_stream\n end",
"def has_audios?\n audios[I18n.locale.to_sym].length + attached_audios[I18n.locale.to_sym].length > 0\n end",
"def conforms_to_audio\n return false if bad_uti?\n return true if @uti == \"com.apple.m4a-audio\"\n return true if @uti == \"com.microsoft.waveform-audio\"\n return true if @uti == \"public.aiff-audio\"\n return true if @uti == \"public.midi-audio\"\n return true if @uti == \"public.mp3\"\n return false\n end",
"def has_sound?\n @has_sound = if @movie_info[:format][:nb_streams] == 1\n false\n elsif @movie_info[:format][:nb_streams] == 2\n true\n end\n end",
"def is_embeddable?\n self.filetype.match(/^(audio|image|video)$/)\n end",
"def audio_without_closed_captions?\n audio? && closed_captions.blank?\n end",
"def is_audioframe?\n @id[2, 2] == 'wb'\n end",
"def any?(audio_recording)\n overlap_query(audio_recording).any?\n end",
"def is_sound?\n type == 'SoundMessage'\n end",
"def audio\n first_stream_of :audio\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Return true if one of the sample attachments is video
|
def contains_video?
result = false
sample_attachments.map{|sa| result = true if sa.is_video?}
result
end
|
[
"def video?\n content_type.start_with?(\"video\")\n end",
"def is_video_type?\n file_content_type =~ %r(video)\n end",
"def video?\n !!( content_type =~ Transit.config.video_regexp )\n end",
"def video?\n media_type == :video\n end",
"def video_file_type?\n MIME::Types.of(@raw_name).any? do |x| x.to_s =~/^video/ end\n end",
"def video?\n self.type == \"Assetabler::Video\"\n end",
"def video?\n @tag == :video\n end",
"def video?\n @streams.any? { |stream| stream.is_a? VideoStream }\n end",
"def video?\n is_a?(OEmbed::Response::Video)\n end",
"def is_embeddable?\n self.filetype.match(/^(audio|image|video)$/)\n end",
"def video?\n !!video_stream\n end",
"def is_mp4?\n video_type == :mp4\n end",
"def web_video?\n %w(mp4 ogg webm).each do |sub_type|\n return true if Rack::Mime.match?(mime_type, \"video/#{sub_type}\")\n end\n\n false\n end",
"def is_video_file?(file)\n regex = /(.mkv|.mp4|.avi)$/i\n file.match?(regex)\n end",
"def video?\n marc_leader_06_match = record.leader.byteslice(6) == 'g'\n marc_008_33_match = record.fields('008').find do |field|\n %w[f m v].include?(field.value.byteslice(33))\n end\n\n marc_006_match = record.fields('006').find do |field|\n field.value.byteslice(0) == 'g' &&\n %w[f m v].include?(field.value.byteslice(16))\n end\n\n video_007_types = %w[m v]\n marc_007_match = record.fields('007').find do |field|\n video_007_types.include?(field.value.byteslice(0))\n end\n\n return true if (marc_leader_06_match && marc_008_33_match) ||\n marc_006_match || marc_007_match\n end",
"def playable?\n video_source\n end",
"def mpeg?(f)\n if MimeMagic.by_path(f) == 'video/mpeg'\n true\n elsif MimeMagic.by_magic(File.open(f)) == 'video/mpeg'\n true\n else\n false\n end\nend",
"def movie?\n content_type == \"Movie\"\n end",
"def multiple_video_sets?\n @videos.keys.size > 1\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Return true if one of the sample attachments is document
|
def contains_document?
result = false
sample_attachments.map{|sa| result = true if sa.is_document?}
result
end
|
[
"def is_document?\n @type == :document\n end",
"def document?\n self.type == \"Document\"\n end",
"def document?\n self.type == \"Assetabler::Document\"\n end",
"def is_type_attachment?()\n\t\t\t\tif !@result.nil?\n\t\t\t\t\t@result.each do |elem|\n\t\t\t\t\t\tif elem[:type] == 'attachment'\n\t\t\t\t\t\t\treturn true\n\t\t\t\t\t\tend\n\t\t\t\t\tend\n\t\t\t\t\treturn false\n\t\t\t\tend\n\t\t\tend",
"def multiple_documents?(document_type)\n documents = self.design_review_documents\n other = DocumentType.get_other_document_type.name\n pad_p = DocumentType.get_pad_patterns_document_type.name\n mechd = DocumentType.get_mech_drawing_document_type.name\n test = DocumentType.get_test_document_type.name\n\n other_document = document_type.name == other\n pad_p_document = document_type.name == pad_p\n mechd_document = document_type.name == mechd\n test_document = document_type.name == test\n \n if !other_document && !pad_p_document && !mechd_document && !test_document\n documents |= self.design_review_documents\n docs = documents.collect { |d| d if d.document_type_id == document_type.id }.compact\n end\n\n !other_document && !pad_p_document && !mechd_document && !test_document && docs.size > 1\n \n end",
"def attachment?\n type == \"attachment\"\n end",
"def attachment?\n !!find_attachment\n end",
"def attachments?\n !attachments.empty?\n end",
"def doc_type?(doc_types)\n doc_types.to_set.intersection(document_types).any?\n end",
"def specification_attached?\n self.eco_documents.detect { |d| d.specification? } != nil\n end",
"def embed?\n inheritable_documents.any?\n end",
"def conforms_to_inline_attachment\n return true if @uti.start_with?(\"com.apple.notes.inlinetextattachment\")\n return false\n end",
"def opendocument?\n OPENDOCUMENT_EXTENSIONS.include? file_extension.upcase\n end",
"def allows_document?\n self.allows_title? && ![TITLE, COVER].include?(self.kind)\n end",
"def conforms_to_inline_attachment\n return false if bad_uti?\n return true if @uti.start_with?(\"com.apple.notes.inlinetextattachment\")\n return false\n end",
"def has_attachments?\n !attachments.empty?\n end",
"def attachment?\n attachment.present? && attachment.readable?\n end",
"def attachment_type?(attachment_type)\n !attachments.nil? && attachments.first['type'] == attachment_type\n end",
"def has_docs_for_type?(type)\n self.vlp_documents.any?{ |doc| doc.verification_type == type && doc.identifier }\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Return true if one of the sample attachments is image
|
def contains_image?
result = false
sample_attachments.map{|sa| result = true if sa.is_image?}
result
end
|
[
"def image?\n attachment.content_type =~ /^image/\n end",
"def image?\n !(self.attachment_content_type =~ /^image.*/).nil?\n end",
"def image?\n attachment_content_type =~ %r{^(image|(x-)?application)/(bmp|gif|jpeg|jpg|pjpeg|png|x-png)$}\n end",
"def image?\n self.file_content_type == 'image/png' || self.file_content_type == 'image/jpeg'\n end",
"def is_image?\n !(attachment_content_type =~ /^image.*/).nil?\n end",
"def is_image?\n (self.mime =~ /(jpg|gif|png)/) != nil\n end",
"def image?(file)\n file.content_type.include? 'image'\n end",
"def is_image_type?\n item_content_type =~ %r(image)\n end",
"def is_image?\n type == :image\n end",
"def image?(message)\n # if message has image award bonus Starbucks\n return false if message.nil? or message.attachments.nil?\n\n has_image = false\n message.attachments.each do |attachment|\n has_image = attachment.image?\n break if has_image\n end\n\n return has_image\n end",
"def is_image\n return (self.file.blob.content_type == 'image/jpeg' || self.file.blob.content_type == 'image/jpg' || self.file.blob.content_type == 'image/png' || self.file.blob.content_type == 'image/gif' || self.file.blob.content_type == 'image/bmp')\n end",
"def image?\n self.sti_type == IMAGE_TYPE\n end",
"def not_image?(_new_file)\r\n !file.content_type.include? 'image'\r\n end",
"def image?(content_type)\n content_types.include?(content_type)\n end",
"def image?(content_type)\n content_types.include?(content_type)\n end",
"def not_image?(_new_file)\n !file.content_type.include? 'image'\n end",
"def not_image?(new_file)\n !self.file.content_type.include? 'image'\n end",
"def image?\n end",
"def is_type_attachment?()\n\t\t\t\tif !@result.nil?\n\t\t\t\t\t@result.each do |elem|\n\t\t\t\t\t\tif elem[:type] == 'attachment'\n\t\t\t\t\t\t\treturn true\n\t\t\t\t\t\tend\n\t\t\t\t\tend\n\t\t\t\t\treturn false\n\t\t\t\tend\n\t\t\tend"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Get videos using playlist ID
|
def get_vidio_playlist key id
#playlstId is the Uploads ID in channel_details
channel_vidios = Youtube.get('/playlistItems?part=snippet&playlistId=UU-lHJZR3Gqxm24_Vd_AJ5Yw&key=' + key)
end
|
[
"def all_videos_of_one_playlist playlist_id\n result = get_videos playlist_id\n next_page_token = result['nextPageToken']\n videos = result['items']\n loop do\n break if next_page_token.nil?\n result = get_videos(playlist_id, next_page_token)\n next_page_token = result['nextPageToken']\n videos += result['items']\n end\n videos\n end",
"def videos_by_playlist(id, page = 1, per_page = 20)\n response = videos_list_by_playlist(:id => id, :page => page, :per_page => per_page)\n _parse_video_response(response)\n end",
"def videos_by_playlist(params)\n params[:page] = integer_or_default(params[:page], 1)\n\n unless params[:max_results]\n params[:max_results] = integer_or_default(params[:per_page], 25)\n end\n\n unless params[:offset]\n params[:offset] = calculate_offset(params[:page], params[:max_results] )\n end\n\n request = YouTubeG::Request::PlaylistVideoSearch.new(params)\n\n logger.debug \"Submitting request [url=#{request.url}].\" if logger\n if params[:playlist_video_id] and params[:playlist_id]\n parser = YouTubeG::Parser::PlaylistVideoFeedParser.new(request.url)\n parser.parse(params[:playlist_id])\n else\n parser = YouTubeG::Parser::PlaylistVideosFeedParser.new(request.url)\n parser.parse\n end\n end",
"def get_folder_videos folder_id, **args\n get(\"/projects/#{folder_id}/videos\", args)\n end",
"def get_playlist_vids(playlist)\n Log.log.debug \"Returning playlist vids\"\n vids = Array.new\n playlist.playlist_items.each {|item| vids << item.video_id}\n return vids\nend",
"def get_album_videos album_id\n get(\"/albums/#{album_id}/videos\")\n end",
"def videos options={}\n response = client.get(\"/#{id}/videos\", options)\n end",
"def video_by(params)\n params = {:video_id => params} if !params.is_a?(Hash)\n url = \"http://gdata.youtube.com/feeds/api/\"\n video_id = params[:video_id].split(\"/\").last\n if params[:user]\n url << \"users/#{params[:user]}/uploads/#{video_id}\"\n else\n url << \"videos/#{video_id}\"\n end\n parser = YouTubeG::Parser::VideoFeedParser.new(url, request_headers, request_options)\n parser.parse\n end",
"def get_video_by_id(id)\n Video.find_by_id(id)\n end",
"def videos\n video_ids = []\n @list_data['items'].each do |top_video|\n video_ids.push(top_video['id']['videoId']) unless top_video['id']['videoId'].nil?\n end\n\n # since we need to call youtube/api/videos to get the embed_link\n videos_data = @gateway.certain_id_videos_data(video_ids)\n @top_video_mapper.build_video_items(videos_data['items'])\n end",
"def get_uploads_playlist_items(playlist_id)\n playlists = HTTParty.get(\"https://www.googleapis.com/youtube/v3/channels?part=id%2C+contentDetails&id=#{playlist_id}&key=#{@key}\")\n @playlist_items = playlists['items'].first['contentDetails']['relatedPlaylists']['uploads']\n end",
"def playlist_by(pid)\n playlist_id = pid =~ /^http/ ? pid : \"http://gdata.youtube.com/feeds/playlists/#{pid}\"\n logger.debug \"Submitting request [url=#{playlist_id}].\" if logger\n parser = YouTubeG::Parser::PlaylistFeedParser.new(playlist_id)\n parser.parse\n end",
"def video\n YouTubeApi.find_video(youtube_id)\n end",
"def get_playlist(id)\n result = playlists.select { |p| p.id == id }\n result.nil? ? nil : result.first\n end",
"def find_playlist\n @playlist = Playlist.find(params[:id])\n end",
"def videos(product_id, params = {})\n response, status = BeyondApi::Request.get(@session,\n \"/products/#{product_id}/videos\",\n params)\n\n handle_response(response, status)\n end",
"def played_videos\n Room.find(id).video_polls\n .select do |video_poll|\n video_poll.played_video_id &&\n (video_poll.created_at > last_playlist_completion_time)\n end\n .map do |video_poll|\n Video.find(video_poll.played_video_id)\n end\n end",
"def playlist\n player.playlist\n end",
"def asset_get_by_playlist_id(playlist_id)\n params = { :playlist_id => playlist_id }\n call_method('Asset.GetByPlaylistID', params, :primary_key_name => 'ASSETS')\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
POST /work_articles POST /work_articles.json
|
def create
@work_article = WorkArticle.new(work_article_params)
respond_to do |format|
if @work_article.save
format.html { redirect_to @work_article, notice: t('notifications_masc.success.resource.created',
resource: t('work_articles.form.resource')) }
format.json { render :show, status: :created, location: @work_article }
else
format.html { render :new }
format.json { render json: @work_article.errors, status: :unprocessable_entity }
end
end
end
|
[
"def create\n article = @project.articles.create(params_for_create)\n\n respond_with article do |format|\n format.json do\n if article.errors.blank?\n render json: decorate_article(article), status: :accepted\n else\n render json: { error: { errors: article.errors } }, status: :bad_request\n end\n end\n end\n end",
"def create\n @working_article = WorkingArticle.new(working_article_params)\n\n respond_to do |format|\n if @working_article.save\n format.html { redirect_to @working_article, notice: 'Working article was successfully created.' }\n format.json { render :show, status: :created, location: @working_article }\n else\n format.html { render :new }\n format.json { render json: @working_article.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n uri = URI.parse('http://0.0.0.0:3000/articles.json')\n @article = Article.new(params[:article])\n @response = Net::HTTP.post_form(uri, {\"auth_token\" => session[:api_token], \"article\" => @article.to_json})\n @article = Article.new.from_json(@response.body)\n redirect_to :action => \"show\", :id => @article.id\n end",
"def create\n @article = Article.new(params[:article])\n\n respond_to do |format|\n if @article.save\n format.html { redirect_to @article, :notice => 'Article was successfully created.' }\n format.json { render :json => @article, :status => :created, :location => @article }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @article.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @creator.works << Work.where(id: work_ids)\n if @creator.save\n render json: @creator, status: :created, location: @creator\n else\n render json: @creator.errors, status: :unprocessable_entity\n end\n end",
"def create\n @journal_article = JournalArticle.new(journal_article_params)\n\n respond_to do |format|\n if @journal_article.save\n format.html { redirect_to @journal_article, notice: 'Journal article was successfully created.' }\n format.json { render :show, status: :created, location: @journal_article }\n else\n format.html { render :new }\n format.json { render json: @journal_article.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @article = Article.new(params[:article])\n\n respond_to do |format|\n if @article.save\n format.html { redirect_to @article, notice: 'Article was successfully created.' }\n format.json { render json: @article, status: :created, location: @article }\n else\n format.html { render action: \"new\" }\n format.json { render json: @article.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @yh_article = YhArticle.new(yh_article_params)\n\n respond_to do |format|\n if @yh_article.save\n format.html { redirect_to @yh_article, notice: 'Yh article was successfully created.' }\n format.json { render :show, status: :created, location: @yh_article }\n else\n format.html { render :new }\n format.json { render json: @yh_article.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @task_article = TaskArticle.new(task_article_params)\n\n respond_to do |format|\n if @task_article.save\n format.html { redirect_to @task_article, notice: 'Task article was successfully created.' }\n format.json { render :show, status: :created, location: @task_article }\n else\n format.html { render :new }\n format.json { render json: @task_article.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @art_work = ArtWork.new(art_work_params)\n\n respond_to do |format|\n if @art_work.save\n format.html { redirect_to @art_work, notice: 'Art work was successfully created.' }\n format.json { render :show, status: :created, location: @art_work }\n else\n format.html { render :new }\n format.json { render json: @art_work.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @work = Work.new(params[:work])\n\n respond_to do |format|\n if @work.save\n format.html { redirect_to @work, notice: 'Work was successfully created.' }\n format.json { render json: @work, status: :created, location: @work }\n else\n format.html { render action: \"new\" }\n format.json { render json: @work.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n if params[:workflow_step_id].present?\n @knowledge_article = KnowledgeArticle.new(knowledge_article_params)\n @workflow_step = WorkflowStep.find(params[:workflow_step_id])\n respond_to do |format|\n if @knowledge_article.save\n WorkflowStepKnowledgeArticle.create!(workflow_step_id: @workflow_step.id, knowledge_article_id: @knowledge_article.id)\n format.html { redirect_to @knowledge_article, notice: 'Knowledge article was successfully created.' }\n format.json { render :show, status: :created, location: @knowledge_article }\n else\n format.html { render :new }\n format.json { render json: @knowledge_article.errors, status: :unprocessable_entity }\n end\n end\n else\n render json: {success: false, message: 'Missing parameter: workflow_step_id.'}\n end\n end",
"def create\n @work = @client.works.build(work_params)\n\n respond_to do |format|\n if @work.save\n format.html { redirect_to client_work_path(@client, @work), notice: 'Work was successfully created.' }\n format.json { render :show, status: :created, location: @work }\n else\n format.html { render :new }\n format.json { render json: @work.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @saved_article = SavedArticle.new(params[:saved_article])\n\n respond_to do |format|\n if @saved_article.save\n format.html { redirect_to @saved_article, notice: 'Saved article was successfully created.' }\n format.json { render json: @saved_article, status: :created, location: @saved_article }\n else\n format.html { render action: \"new\" }\n format.json { render json: @saved_article.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @saved_article = SavedArticle.new(saved_article_params)\n\n respond_to do |format|\n if @saved_article.save\n format.html { redirect_to @saved_article, notice: 'Saved article was successfully created.' }\n format.json { render :show, status: :created, location: @saved_article }\n else\n format.html { render :new }\n format.json { render json: @saved_article.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @myarticle = Myarticle.new(myarticle_params)\n\n respond_to do |format|\n if @myarticle.save\n format.html { redirect_to @myarticle, notice: 'Myarticle was successfully created.' }\n format.json { render :show, status: :created, location: @myarticle }\n else\n format.html { render :new }\n format.json { render json: @myarticle.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @test_article = TestArticle.new(test_article_params)\n\n respond_to do |format|\n if @test_article.save\n format.html { redirect_to @test_article, notice: 'Test article was successfully created.' }\n format.json { render :show, status: :created, location: @test_article }\n else\n format.html { render :new }\n format.json { render json: @test_article.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @published_article = PublishedArticle.new(published_article_params)\n\n respond_to do |format|\n if @published_article.save\n format.html { redirect_to @published_article, notice: 'Published article was successfully created.' }\n format.json { render :show, status: :created, location: @published_article }\n else\n format.html { render :new }\n format.json { render json: @published_article.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @extrawork = Extrawork.new(extrawork_params)\n\n respond_to do |format|\n if @extrawork.save\n format.html { redirect_to @extrawork, notice: 'Extrawork was successfully created.' }\n format.json { render action: 'show', status: :created, location: @extrawork }\n else\n format.html { render action: 'new' }\n format.json { render json: @extrawork.errors, status: :unprocessable_entity }\n end\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
PATCH/PUT /work_articles/1 PATCH/PUT /work_articles/1.json
|
def update
respond_to do |format|
if @work_article.update(work_article_params)
format.html { redirect_to @work_article, notice: t('notifications_masc.success.resource.updated',
resource: t('work_articles.form.resource')) }
format.json { render :show, status: :ok, location: @work_article }
else
format.html { render :edit }
format.json { render json: @work_article.errors, status: :unprocessable_entity }
end
end
end
|
[
"def update\n article = Article.find(params[:id])\n article.update(article_params)\n render json: { article: article }\n end",
"def update\n respond_to do |format|\n if @articles123.update(articles123_params)\n format.html { redirect_to @articles123, notice: 'Articles123 was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @articles123.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n artwork = Artwork.find(params[:id])\n if artwork.update(artwork_params)\n render json: artwork\n else\n render json: artwork.errors.full_messages, status: :unprocessable_entity \n end\n end",
"def update\n respond_to do |format|\n if @articles1.update(articles1_params)\n format.html { redirect_to @articles1, notice: 'Articles1 was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @articles1.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n # Require remoderation after update\n full_article_params = article_params.merge(fresh: true)\n if !@article.update(full_article_params) \n format.html { render action: 'edit' }\n format.json { render json: @article.errors, status: :unprocessable_entity }\n else\n format.html { redirect_to @article, notice: t('articles.successfully_updated') }\n format.json { head :no_content }\n end\n end\n end",
"def update\n @issue = Issue.find(params[:issue_id])\n @article = Article.find(params[:id]) \n @article.update_attributes(params[:article])\n #@article = @issue.articles.find(params[:id])\n\n # respond_to do |format|\n # if @article.update_attributes(params[:issue])\n # format.html { redirect_to @issue, notice: 'Article was successfully updated.' }\n # format.json { head :no_content }\n # else\n # format.html { render action: \"edit\" }\n # format.json { render json: @issue.errors, status: :unprocessable_entity }\n # end\n # end\n redirect_to issue_path(@issue)\n end",
"def update\n respond_to do |format|\n if @art_work.update(art_work_params)\n format.html { redirect_to @art_work, notice: 'Art work was successfully updated.' }\n format.json { render :show, status: :ok, location: @art_work }\n else\n format.html { render :edit }\n format.json { render json: @art_work.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @simple_article.update(simple_article_params)\n format.html { redirect_to @simple_article, notice: 'Simple article was successfully updated.' }\n format.json { render :show, status: :ok, location: @simple_article }\n else\n format.html { render :edit }\n format.json { render json: @simple_article.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @extrawork.update(extrawork_params)\n format.html { redirect_to @extrawork, notice: 'Extrawork was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @extrawork.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @art_work.update(art_work_params)\n format.html { redirect_to @art_work }\n format.json { render :show, status: :ok, location: @art_work }\n else\n format.html { render :edit }\n format.json { render json: @art_work.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @article = Article.find(params[:id])\n authorize @article\n if @article.update(article_params)\n render 'api/v1/articles/show'\n else\n render json: @article.errors, status: :unprocessable_entity\n end\n end",
"def update\n\n @work = Work.find(params[:id])\n\n respond_to do |format|\n if @work.update_attributes(params[:work])\n format.html { redirect_to @work, notice: 'Work was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @work.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @work = Work.find(params[:id])\n\n respond_to do |format|\n if @work.update_attributes(params[:work])\n format.html { redirect_to @work, notice: 'Work was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @work.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @task_article.update(task_article_params)\n format.html { redirect_to @task_article, notice: 'Task article was successfully updated.' }\n format.json { render :show, status: :ok, location: @task_article }\n else\n format.html { render :edit }\n format.json { render json: @task_article.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @scientific_article.update(scientific_article_params)\n format.html { redirect_to @scientific_article, notice: 'Scientific article was successfully updated.' }\n format.json { render :show, status: :ok, location: @scientific_article }\n else\n format.html { render :edit }\n format.json { render json: @scientific_article.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @clothing_article = ClothingArticle.find(params[:id])\n\n respond_to do |format|\n if @clothing_article.update_attributes(params[:clothing_article])\n format.html { redirect_to @clothing_article, notice: 'Clothing article was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @clothing_article.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @artwork.update(artwork_params)\n format.html { redirect_to artwork_path(@artwork), notice: 'Artwork was successfully updated.' }\n format.json { render :nothing => true }\n else\n format.html { render :edit }\n format.json { render :nothing => true }\n end\n end\n end",
"def update\n @artikel = Artikel.find(params[:id])\n\n if @artikel.update(artikel_params)\n head :no_content\n else\n render json: @artikel.errors, status: :unprocessable_entity\n end\n end",
"def update\n respond_to do |format|\n if @journalarticle.update(journalarticle_params)\n format.html { redirect_to :back, notice: 'Journalarticle was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @journalarticle.errors, status: :unprocessable_entity }\n end\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
DELETE /work_articles/1 DELETE /work_articles/1.json
|
def destroy
@work_article.destroy
respond_to do |format|
format.html { redirect_to work_articles_url }
format.json { head :no_content }
end
end
|
[
"def destroy\n @articles123.destroy\n respond_to do |format|\n format.html { redirect_to articles123s_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @articles1.destroy\n respond_to do |format|\n format.html { redirect_to articles1s_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @journalarticle.destroy\n respond_to do |format|\n format.html { redirect_to journalarticles_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @article = Article.find(params[:id]) \n @article.destroy\n\n respond_to do |format|\n format.html { redirect_to articles_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @article = Article.where(id: params[:id]).last\n @article.destroy\n\n respond_to do |format|\n format.html { redirect_to admin_articles_url }\n format.json { head :ok }\n end\n end",
"def destroy\n \n uri = URI.parse('http://0.0.0.0:3000/articles/delete/' + params[:id] + '.json')\n @response = Net::HTTP.post_form(uri, {\"auth_token\" => session[:api_token]})\n\n respond_to do |format|\n format.html { redirect_to articles_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @simple_article.destroy\n respond_to do |format|\n format.html { redirect_to simple_articles_url, notice: 'Simple article was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n artwork = Artwork.find(params[:id])\n artwork.destroy\n render json: artwork\n end",
"def destroy\n @saved_article = SavedArticle.find(params[:id])\n @saved_article.destroy\n\n respond_to do |format|\n format.html { redirect_to saved_articles_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @yh_article.destroy\n respond_to do |format|\n format.html { redirect_to yh_articles_url, notice: 'Yh article was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @journal_article.destroy\n respond_to do |format|\n format.html { redirect_to journal_articles_url, notice: 'Journal article was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @article_datum = ArticleDatum.find(params[:id])\n @article_datum.destroy\n\n respond_to do |format|\n format.html { redirect_to article_data_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @interest_news.destroy\n respond_to do |format|\n format.html { redirect_to interest_news_index_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @extrawork.destroy\n respond_to do |format|\n format.html { redirect_to extraworks_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @admin_article = Admin::Article.find(params[:id])\n @admin_article.destroy\n\n respond_to do |format|\n format.html { redirect_to admin_articles_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @work_entry.destroy\n respond_to do |format|\n format.html { redirect_to work_entries_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @article1.destroy\n respond_to do |format|\n format.html { redirect_to article1s_url, notice: 'Article1 was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @clothing_article = ClothingArticle.find(params[:id])\n @clothing_article.destroy\n\n respond_to do |format|\n format.html { redirect_to clothing_articles_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @art_work.destroy\n respond_to do |format|\n format.html { redirect_to art_works_url, notice: 'Art work was successfully destroyed.' }\n format.json { head :no_content }\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Converts all .less files in +paths+ to STYLESHEETS_PATH/[filename].css. Options: compress Remove all newlines? `true` or `false`.
|
def run(options = {})
paths.map {|path| Dir["#{path}/*.less"]}.flatten.each {|less_source|
destination_filename = "#{File.basename(less_source, File.extname(less_source))}.css"
destination = "#{STYLESHEETS_PATH}/#{destination_filename}"
if !File.exists?(destination) || File.stat(less_source).mtime > File.stat(destination).mtime
engine = Less::Engine.new(File.read(less_source))
css = Less.version > "1.0" ? engine.to_css : engine.to_css(:desc)
css.delete!("\n") if options[:compress]
File.open(destination, "w") {|file|
file.write HEADER % [destination_filename] if Rails.env == "development"
file.write css
}
end
}
end
|
[
"def run(options = {})\n paths.each do |path|\n Dir.chdir(path) do\n Dir[\"**/*.less\"].each do |less_sheet|\n directory = File.dirname(less_sheet)\n target_filename = File.basename(less_sheet, File.extname(less_sheet)) + \".css\"\n destination = \"#{directory}/#{target_filename}\"\n \n if !File.exists?(destination) || File.stat(less_sheet).mtime > File.stat(destination).mtime\n engine = File.open(less_sheet) {|f| Less::Engine.new(f) }\n css = engine.to_css\n css.delete(\"\\n\") if options[:compress]\n \n FileUtils.mkdir_p(directory)\n File.open(destination, \"w\") {|file|\n file.write HEADER % [path + \"/\" + less_sheet] if Rails.env == \"development\"\n file.write css\n }\n end\n end\n end # chdir\n end # each\n end",
"def run(options = {})\n less_sheets = STYLESHEET_PATHS.map {|p| Dir[\"#{p}/*.less\"] }.flatten\n less_sheets.each {|less|\n engine = Less::Engine.new(File.read(less))\n css = Less.version > \"1.0\" ? engine.to_css : engine.to_css(:desc)\n css = css.delete(\"\\n\") if options[:compress]\n \n destination_file = File.basename(less, File.extname(less))\n destination_directory = \"#{Rails.root}/public/stylesheets\"\n destination_path = \"#{destination_directory}/#{destination_file}.css\"\n \n FileUtils.mkdir_p(destination_directory)\n File.open(destination_path, \"w\") {|file|\n file.write HEADER % [destination_file] if Rails.env == \"development\"\n file.write css\n }\n }\n end",
"def css\n puts 'Compiling LESS files to CSS...'\n `lessc ./less/main.less ../www/_res/css/uncompressed/main.css`\n puts 'Compressing CSS files...'\n `java -jar ./_scripts/yuicompressor-2.4.2.jar ../www/_res/css/uncompressed/main.css -o ../www/_res/css/main.css`\nend",
"def compress_css(paths, variant=nil, asset_url=nil)\n return @yui_css.compress(concatenate(paths)) if variant.nil?\n compressed_css = @yui_css.compress(concatenate_and_tag_images(paths))\n return with_data_uris(compressed_css) if variant == :datauri\n return with_mhtml(compressed_css, asset_url) if variant == :mhtml\n raise PackageNotFound, \"\\\"#{variant}\\\" is not a valid stylesheet variant\"\n end",
"def compress_js(paths)\n if (jst_paths = paths.grep(Jammit.template_extension_matcher)).empty?\n js = concatenate(paths)\n else\n js = concatenate(paths - jst_paths) + compile_jst(jst_paths)\n end\n Jammit.compress_assets ? @js_compressor.compress(js) : js\n end",
"def compress_js(paths)\n @yui_js.compress(concatenate(paths))\n end",
"def minify_css(*files_in_and_out)\n files_in_and_out.each do |file_in_and_out|\n if file_in_and_out.is_a? String\n file_in = \"#{ROOT_PATH}#{file_in_and_out}\"\n file_out = \"#{ROOT_PATH}#{file_in_and_out}.tmp\"\n else\n file_in = \"#{ROOT_PATH}#{file_in_and_out[0]}\"\n file_out = \"#{ROOT_PATH}#{file_in_and_out[1]}\"\n end\n\n system \"java -jar #{ROOT_PATH}framework/scripts/libs/yuicompressor-2.4.6.jar --type css #{file_in} -o #{file_out}\"\n\n if file_in_and_out.is_a? String\n FileUtils.rm file_in\n FileUtils.mv file_out, file_in\n end \n end\nend",
"def compress_js(paths)\n js = concatenate(paths)\n @options[:disabled] ? js : @js_compressor.compress(js)\n end",
"def build_css\n Dir[\"#{@src_dir}/stylesheets/**/*.{sass,scss}\"].each do |path|\n file_dir, template = source_template_from_path(path.sub(/^#{@src_dir}\\/stylesheets/, ''))\n\n if !template.match(/(^|\\/)\\_/)\n save_stylesheet(File.join(file_dir, template), generate_css(template, file_dir))\n end\n end\n end",
"def compile_less\n system 'lessc -s less/styles.less css/styles.css'\n end",
"def compress\n files=@input_files=collect_filepaths\n \n # This will consist of only an entry (the concatenated file body)\n # when compressing javascript or css files \n\n files=[concatenate_files] if @concatenate_input\n cmd= (@commands.empty?)? nil : @commands[ @default_command ]\n\n files.each do |path|\n output_path=process(path,cmd)\n size_check(path,output_path) unless %w(.css .js).include?( File.extname(output_path)) \n end\n \n # summarise compression stats\n print_summary unless ['.css','.js' ].include?(@input_file_extensions)\n end",
"def compile_less_styles(base_dir, definitions)\n\n chdir(base_dir) do\n definitions.each do |definition|\n\n lessc = 'lessc'\n flags = []\n\n if definition['include']\n flags << \"--include-path=\" + definition['include'].join(':')\n end\n\n sources = definition['inputs'].join(' ')\n\n sh %{#{lessc} #{flags.join(' ')} #{sources} #{definition['output']}}\n\n end\n end\nend",
"def process_flat_ui_stylesheet_assets!\n log_status 'Processing stylesheets...'\n files = read_files('less', flat_ui_less_files)\n\n log_status ' Converting LESS files to Scss:'\n files.each do |name, file|\n log_processing name\n\n # apply common conversions\n # icon-font bombs on this so skip it\n file = convert_less(file) unless name =~ /flat-ui|glyphicons/\n file = replace_file_imports(file)\n file = cleanup_whitespace(file)\n case name\n when 'flat-ui.less'\n lines = file.split \"\\n\"\n lines.reject! {|line|\n #kill the fonts lines, those are up to the user\n #kill variables since those need to be manually imported before bootstrap\n line =~ /fonts|url|variables/\n }\n\n # Add a comment for the icon font\n icon_font_import = lines.index {|line| line =~ /glyphicons/}\n lines.insert(icon_font_import, '// Flat-UI-Icons')\n lines.delete_at(icon_font_import+2)\n\n file = lines.join \"\\n\"\n when 'mixins.less'\n NESTED_MIXINS.each do |selector, prefix|\n file = flatten_mixins(file, selector, prefix)\n end\n file = varargify_mixin_definitions(file, *VARARG_MIXINS)\n file = deinterpolate_vararg_mixins(file)\n %w(responsive-(in)?visibility input-size text-emphasis-variant bg-variant).each do |mixin|\n file = parameterize_mixin_parent_selector file, mixin\n end\n file = replace_ms_filters(file)\n if pro?\n file = replace_all file, /(?<=[.-])\\$state/, '#{$state}'\n else\n # calc-color mixin only exists in Flat-UI free\n file = replace_all file, /-(\\$.+-color)/, '-#{\\1}'\n file = replace_all file, /#\\{\\$\\$\\{(.+)\\}\\}/, 'interpolate_variable($\\1)'\n end\n file = replace_rules(file, ' .list-group-item-') { |rule| extract_nested_rule rule, 'a&' }\n file = replace_all file, /,\\s*\\.open \\.dropdown-toggle& \\{(.*?)\\}/m,\n \" {\\\\1}\\n .open & { &.dropdown-toggle {\\\\1} }\"\n file = replace_all file, '$ratio, $ratio-y', '$scale-args'\n file = convert_grid_mixins file\n when 'variables.less'\n file = insert_default_vars(file)\n if ::Sass::VERSION >= '3.3.0'\n file = unindent <<-SCSS + file, 14\n // a flag to toggle asset pipeline / compass integration\n $flat-ui-sass-asset-helper: function-exists(flat-ui-font-path) !default;\n\n SCSS\n else\n file = unindent <<-SCSS + file, 14\n // a flag to toggle asset pipeline / compass integration\n // defaults to true if flat-ui-font-path function is present (no function => twbs-font-path('') parsed as string == right side)\n $flat-ui-sass-asset-helper: (flat-ui-font-path(\"\") != unquote('flat-ui-font-path(\"\")')) !default;\n\n SCSS\n end\n file = fix_variable_declaration_order file\n file = replace_all file, /(\\$icon-font-path:\\s+).*(!default)/, '\\1\"'+@output_dir+'/\" \\2'\n when 'modules/buttons.less'\n file = extract_nested_rule file, '.btn-xs&'\n file = extract_nested_rule file, '.btn-hg&'\n when 'modules/forms.less'\n # Fix mixin regex not supporting non-variable arguments\n file.gsub! /@include input-size\\((?:\\$.+)\\);/ do |match|\n match.gsub /; /, ', '\n end\n file = apply_mixin_parent_selector(file, '\\.input-(?:sm|lg|hg)')\n when 'modules/input-groups.less'\n file = replace_rules(file, '.input-group-rounded') do |rule|\n extract_and_combine_nested_rules rule\n end\n when 'modules/glyphicons.less'\n file = replace_vars(file)\n file = replace_escaping(file)\n file = replace_all file, /\\#\\{(url\\(.*?\\))}/, '\\1'\n file = replace_rules(file, '@font-face') { |rule|\n rule = replace_all rule, /(\\$icon-font(?:-\\w+)+)/, '#{\\1}'\n replace_asset_url rule, :font\n }\n when 'modules/login.less'\n file = fix_flat_ui_image_assets file\n when 'modules/navbar.less'\n # Fix mixin regex not supporting non-variable arguments\n file.gsub! /@include input-size\\((?:\\$.+)\\);/ do |match|\n match.gsub /; /, ', '\n end\n file = apply_mixin_parent_selector(file, '\\.navbar-input')\n when 'modules/palette.less'\n file.gsub! /@include calc-color\\((.+)\\);/ do |match|\n match.gsub /#\\{([\\w\\-]+)\\}/, '\"\\1\"'\n end\n when 'modules/select.less'\n # Fix the include that the converter makes an extend\n file = replace_all file, /@extend \\.caret/, '@include caret'\n when 'modules/spinner.less'\n # Fix mixin regex not supporting non-variable arguments\n file.gsub! /@include spinner-variant\\((?:\\$?.+)\\);/ do |match|\n match.gsub /; /, ', '\n end\n when 'modules/switch.less'\n file = fix_flat_ui_image_assets file\n when 'modules/tile.less'\n file = fix_flat_ui_image_assets file\n when 'modules/todo.less'\n file = fix_flat_ui_image_assets file\n when 'modules/thumbnails.less'\n file = extract_nested_rule file, 'a&'\n when 'modules/type.less'\n # Since .bg-primary has a color associated with it we need to divide it into\n # two selectors\n file = replace_rules(file, '.bg-primary') do |rule|\n parts = rule.split \"\\n\"\n selector = parts.index {|line| line =~ /\\.bg-primary/}\n mixin = parts.index {|line| line =~ /@include/}\n parts.insert(mixin, \"}\\n#{parts[selector]}\")\n rule = parts.join \"\\n\"\n end\n file = apply_mixin_parent_selector(file, '\\.(text|bg)-(success|primary|info|warning|danger)')\n when 'modules/video.less'\n file = replace_rules(file, /\\s*\\.vjs(?:-(?:control|time))?(?!-\\w+)/) do |rule|\n selector = get_selector(rule).scan(/\\.vjs(?:-(?:control|time))?(?!-\\w+)/).first\n convert_arbitrary_less_ampersand(rule, selector)\n end\n file = fix_flat_ui_image_assets file\n end\n\n name = name.sub(/\\.less$/, '.scss')\n base = File.basename(name)\n name.gsub!(base, \"_#{base}\") unless base == 'flat-ui.scss'\n path = File.join(@dest_path[:scss], name)\n save_file(path, file)\n log_processed File.basename(path)\n end\n\n manifest = File.join(@dest_path[:scss], '..', \"#{@output_dir}.scss\")\n save_file(manifest, \"@import \\\"#{@output_dir}/flat-ui\\\";\")\n end",
"def minify_css(folder)\n # Select all files within folder.\n Dir.open(folder).each do |source_name|\n source_file = \"#{folder}/#{source_name}\"\n temp_file = \"#{folder}/temp-#{source_name}\"\n # Skip if not css.\n next unless (File.extname(source_file) == '.css')\n # Create temporary empty file.\n File.open(temp_file, \"w\") do |minified_file|\n # Minify file.\n sass_engine = Sass::Engine.new(\n File.read(source_file),\n :syntax => :scss,\n :style => :compressed,\n :cache => false\n )\n minified_file.write(sass_engine.render)\n end\n # Delete original file and rename minified to original.\n File.delete(source_file)\n File.rename(temp_file, source_file)\n end\n end",
"def rewrite_css_paths\n [\"fonts\", \"images\"].each do |folder|\n pattern = %Q{url(\"..\\\\/#{folder}\\\\/}\n replacement = %Q{url(\"\\\\/packages\\\\/meteor-flat-ui\\\\/assets\\\\/#{folder}\\\\/}\n `sed -e 's/#{pattern}/#{replacement}/g' -i '' assets/css\\/flat-ui.css`\n end\nend",
"def precompile(*paths)\n raise \"missing static root\" unless static_root\n\n paths.each do |path|\n files.each do |logical_path|\n if path.is_a?(Regexp)\n # Match path against `Regexp`\n next unless path.match(logical_path)\n else\n # Otherwise use fnmatch glob syntax\n next unless File.fnmatch(path.to_s, logical_path)\n end\n\n if asset = find_asset(logical_path)\n attributes = attributes_for(logical_path)\n digest_path = attributes.path_with_fingerprint(asset.digest)\n filename = static_root.join(digest_path)\n\n # Ensure directory exists\n FileUtils.mkdir_p filename.dirname\n\n # Write file\n asset.write_to(filename)\n\n # Write compressed file if its a bundled asset like .js or .css\n asset.write_to(\"#{filename}.gz\") if asset.is_a?(BundledAsset)\n end\n end\n end\n end",
"def compile(lessfile, cssfile)\n import_paths = options[:import_paths].unshift(File.dirname(lessfile))\n parser = ::Less::Parser.new paths: import_paths, filename: lessfile\n File.open(lessfile, 'r') do |infile|\n File.open(cssfile, 'w') do |outfile|\n tree = parser.parse(infile.read)\n outfile << tree.to_css(compress: options[:compress], yuicompress: options[:yuicompress])\n end\n end\n true\n rescue StandardError => e\n Compat::UI.info \"Guard::Less: Compiling #{lessfile} failed with message: #{e.message}\"\n false\n end",
"def url(*paths)\n # debugger\n $configHash ||= load_json(File.expand_path(\"#{File.dirname(options[:filename])}/../config/sassmagic.json\")) || Hash.new\n kwargs = paths.last.is_a?(Hash) ? paths.pop : {}\n raise Sass::SyntaxError, 'url() needs one path at least' if paths.empty?\n\n encode = kwargs['base64'] == bool(true)\n ts = timestamp(kwargs['timestamp'])\n\n paths = paths.map { |path| sass_to_ruby(path) }.flatten\n .map { |path| compress_img(path, encode, ts); }\n\n list(paths, :comma)\n end",
"def all_css\n css_files = Dir.glob File.join(RAILS_ROOT, \"public/stylesheets/\",\"*.css\")\n css_content = \"\"\n css_files.each {|file| css_content << IO.read(file)}\n \n css_content.instance_eval do\n \n # opens with less command\n def open\n less self\n # system('echo \"' << self << '\"|less')\n end\n \n end\n \n css_content\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
execute cassandra's nodetool and return output as string
|
def nodetool_cmd(cmd)
`nodetool -h #{config[:hostname]} -p #{config[:port]} #{cmd}`
end
|
[
"def nodetool_cmd(cmd)\n out = `nodetool -h #{config[:hostname]} -p #{config[:port]} #{cmd} 2>&1`\n [out, $CHILD_STATUS]\n end",
"def nodetool_info()\n out = `/opt/cassandra/bin/nodetool info`\n if $? == 0\n props = {}\n out.split(\"\\n\").each do |line|\n p = line.split(':')\n props[p.first.strip] = p.last.strip\n end\n return props\n else\n Chef::Log.warn(\"nodetool info failed with #{out}\")\n return nil\n end\n end",
"def nodetool_command(*more_args)\n [File.join(@cassbin, 'nodetool'), '-h', @host, '-p', @jmx_port, more_args].flatten\n end",
"def nodetool_info\n @nodetool_info ||= DaemonRunner::ShellOut.new(command: 'nodetool info', timeout: 300)\n @nodetool_info.run!\n @nodetool_info.stdout\n end",
"def nodetool_status()\n out = `/opt/cassandra/bin/nodetool status`\n raise 'nodetool status failed' if $? != 0\n rows = out.split(\"\\n\")\n hash = {}\n dc_exp = /Datacenter: (.*)/\n #vnode\n #-- Address Load Tokens Owns Host ID Rack\n #non-vnode\n #-- Address Load Owns Host ID Token Rack\n #node_ex = /^(?<status>[UD\\?][NLJM]) +(?<address>(?:[0-9]{1,3}\\.){3}[0-9]{1,3}) +(?<load>(\\d+\\.?\\d* (TB|GB|MB|KB|bytes))|\\?) +(?<tokens>\\d+) +(?<owns>(\\d+\\.?\\d*%|\\?)) +(?<hostid>[a-z0-9\\-]+) +(?<rack>.*)$/\n node_ex = /^([UD\\?][NLJM]) +((?:[0-9]{1,3}\\.){3}[0-9]{1,3}) +((?:\\d+\\.?\\d* (?:TB|GB|MB|KB|bytes))|\\?) +(\\d+) +(\\d+\\.?\\d*%|\\?) +([a-z0-9\\-]+) +(.*)$/\n datacenter = nil\n rows.each do |row|\n m = dc_exp.match(row)\n if m\n datacenter = m[1]\n next\n end\n m = node_ex.match(row)\n next if m == nil\n node = {'datacenter' => datacenter}\n hash[m[2]] = node\n i = 0\n %w(status address load tokens owns hostid rack).each do |name|\n node[name] = m[i += 1]\n end\n # m.names.each do |name|\n # node[name] = m[name]\n # end\n end\n return hash\n end",
"def nodetool_status\n @nodetool_status ||= DaemonRunner::ShellOut.new(command: 'nodetool status', timeout: 300)\n @nodetool_status.run!\n @nodetool_status.stdout\n end",
"def nodetool_statusthrift\n @nodetool_statusthrift||= DaemonRunner::ShellOut.new(command: 'nodetool statusthrift')\n @nodetool_statusthrift.run!\n @nodetool_statusthrift.stdout\n end",
"def nodetool_netstats\n @nodetool_netstats ||= DaemonRunner::ShellOut.new(command: 'nodetool netstats', timeout: 300)\n @nodetool_netstats.run!\n @nodetool_netstats.stdout\n end",
"def nodetool_netstats\n @nodetool_netstats ||= DaemonRunner::ShellOut.new(command: 'nodetool netstats', timeout: 300)\n @nodetool_netstats.run!\n @nodetool_netstats.stdout\n end",
"def nodetool_statusgossip\n @nodetool_statusgossip ||= DaemonRunner::ShellOut.new(command: 'nodetool statusgossip')\n @nodetool_statusgossip.run!\n @nodetool_statusgossip.stdout\n end",
"def nodetool_ring\n @nodetool_ring ||= DaemonRunner::ShellOut.new(command: 'nodetool ring', timeout: 300)\n @nodetool_ring.run!\n @nodetool_ring.stdout\n end",
"def get_node_info\n require 'chef/mixin/shell_out'\n\n output = shell_out!(\"#{node['pcluster']['python_root']}/aws dynamodb \" \\\n \"--region #{node['pcluster']['region']} query --table-name #{node['pcluster']['cfn_stack_outputs']['Outputs']['DynamoDBTable']} \" \\\n \"--index-name InstanceId --key-condition-expression 'InstanceId = :instanceid' \" \\\n \"--expression-attribute-values '{\\\":instanceid\\\": {\\\"S\\\":\\\"#{node['ec2']['instance_id']}\\\"}}' \" \\\n \"--projection-expression 'Id' \" \\\n \"--output text --query 'Items[0].[Id.S]'\", user: 'root').stdout.strip\n\n raise 'Failed when retrieving Compute info from DynamoDB' if output == 'None'\n\n slurm_nodename = output\n\n Chef::Log.info(\"Retrieved Slurm nodename is: #{slurm_nodename}\")\n\n slurm_nodename\nend",
"def get_node_version\n run_ssh_command('node --version')\n end",
"def get_node_launch_command(node_info, mode, offset, nw_name)\n pwd = `pwd`.gsub(/\\n+/,\"\")\n ds = node_info.dir_structure\n if :docker.eql? mode\n docker_rpc_port = 6666\n docker_api_port = 7777\n \"docker run \\\n --name #{node_info.id} \\\n --hostname #{node_info.id} \\\n -e NODE_ID=#{node_info.id} \\\n -e ELECTION_TIMEOUT_MILLIS=3000 \\\n --mount type=bind,source=#{pwd}/#{ds.log},target=/node/cluster-data/log \\\n --mount type=bind,source=#{pwd}/#{ds.state},target=/node/cluster-data/state \\\n --mount type=bind,source=#{pwd}/#{ds.snapshot},target=/node/cluster-data/snapshot \\\n --mount type=bind,source=#{pwd}/#{ds.cluster},target=/node/cluster-data/cluster,readonly \\\n -p #{docker_api_port + offset}:#{docker_api_port} \\\n --network=#{nw_name} \\\n raft:local \\\n \".gsub(/\\s+/, \" \")\n else\n \"./raft \n --id=#{node_info.id} \\\n --api-port=#{node_info.api_port} \\\n --rpc-port=#{node_info.rpc_port} \\\n --log-entry-path=#{ds.log} \\\n --log-metadata-path=#{ds.log}/metadata.json \\\n --raft-state-path=#{ds.state}/state.json \\\n --election-timeout=3000 \\\n --rpc-timeout=2000 \\\n --api-timeout=2000 \\\n --api-fwd-timeout=1500 \\\n --max-conn-retry-attempts=5 \\\n --snapshot-path=#{ds.snapshot} \\\n --cluster-config-path=#{ds.cluster}/config.json \\\n \".gsub(/\\s+/, \" \")\n end\nend",
"def status\n return(:down).tap { logger.warn 'Cassandra node is DOWN' } if address.nil?\n results = (nodetool_status || '').split(\"\\n\")\n results.map! { |line| line.strip }\n results.select! { |line| line.include? address }\n results.map! { |line| line.split(/\\s+/)[0] }\n results.compact!\n return(:down).tap do\n logger.warn \"Cannot find the Cassandra node (#{address}) in `nodetool status`\"\n end if results.size != 1\n (results.first[0] == 'U') ? :up : :down\n end",
"def execute(node, command)\n params = {}\n params[:exceptiononfailure] = false\n params[:exitcode] = true\n (exitcode, output) = node.execute(command, params)\n return [exitcode.to_i, output]\n end",
"def rpc_cmd(cmd)\n file = Tempfile.new('ruby_rpcclient')\n if @hashpass\n success = system(\"#{@rpcclient} #{@host} -p #{@port} --pw-nt-hash -U #{@user}%#{@pass} -c #{cmd} > #{file.path} 2>&1\")\n else\n success = system(\"#{@rpcclient} #{@host} -p #{@port} -U #{@user}%#{@pass} -c #{cmd} > #{file.path} 2>&1\")\n end\nputs \"#{@rpcclient} #{@host} -p #{@port} -U #{@user}%#{@pass} -c #{cmd} > #{file.path} 2>&1\"\nputs success.class\n if success\n output = File.open(file.path).readlines\nputs output\n else\n output=nil\n end\n file.unlink\n return output\n end",
"def discovery_cmd\n cmd = ''\n discovery = node[:etcd][:discovery]\n if discovery.length > 0\n cmd << \" -discovery='#{discovery}'\"\n elsif slave == true\n cmd << ' -peers-file=/etc/etcd_members'\n end\n cmd\n end",
"def puppet_command\n cmdline = []\n\n # Where is the puppet binary?\n puppet = @options[:puppet_binary]\n raise ArgumentError, 'Puppet binary was not supplied' if puppet.nil?\n raise Errno::ENOENT, \"Puppet binary #{puppet} doesn't exist\" unless File.file?(puppet)\n cmdline << puppet\n\n # Node to compile\n cmdline.concat ['master', '--compile', Shellwords.escape(@node)]\n\n # storeconfigs?\n if @options[:storeconfigs]\n cmdline.concat %w(--storeconfigs --storeconfigs_backend=puppetdb)\n else\n cmdline << '--no-storeconfigs'\n end\n\n # enc?\n if @options[:enc]\n raise Errno::ENOENT, \"Did not find ENC as expected at #{@options[:enc]}\" unless File.file?(@options[:enc])\n cmdline << \"--node_terminus=exec --external_nodes=#{Shellwords.escape(@options[:enc])}\"\n end\n\n # Future parser?\n cmdline << '--parser=future' if @options[:parser] == :future\n\n # Path to facts, or a specific fact file?\n facts_terminus = @options.fetch(:facts_terminus, 'yaml')\n if facts_terminus == 'yaml'\n cmdline << \"--factpath=#{Shellwords.escape(File.join(@compilation_dir, 'var', 'yaml', 'facts'))}\"\n if @options[:fact_file].is_a?(String) && @options[:fact_file] =~ /.*\\.(\\w+)$/\n fact_file = File.join(@compilation_dir, 'var', 'yaml', 'facts', \"#{@node}.#{Regexp.last_match(1)}\")\n FileUtils.cp @options[:fact_file], fact_file unless File.file?(fact_file) || @options[:fact_file] == fact_file\n end\n cmdline << '--facts_terminus=yaml'\n elsif facts_terminus == 'facter'\n cmdline << '--facts_terminus=facter'\n else\n raise ArgumentError, \"Unrecognized facts_terminus setting: '#{facts_terminus}'\"\n end\n\n # Some typical options for puppet\n cmdline.concat %w(\n --no-daemonize\n --no-ca\n --color=false\n --config_version=\"/bin/echo catalogscript\"\n )\n\n # Add environment - only make this variable if preserve_environments is used.\n # If preserve_environments is not used, the hard-coded 'production' here matches\n # up with the symlink created under the temporary directory structure.\n environ = @options[:preserve_environments] ? @options.fetch(:environment, 'production') : 'production'\n cmdline << \"--environment=#{Shellwords.escape(environ)}\"\n\n # For people who aren't running hiera, a hiera-config will not be generated when @options[:hiera_config]\n # is nil. For everyone else, the hiera config was generated/copied/munged in the 'builddir' class\n # and was installed into the compile directory and named hiera.yaml.\n unless @options[:hiera_config].nil?\n cmdline << \"--hiera_config=#{Shellwords.escape(File.join(@compilation_dir, 'hiera.yaml'))}\"\n end\n\n # Options with parameters\n cmdline << \"--environmentpath=#{Shellwords.escape(File.join(@compilation_dir, 'environments'))}\"\n cmdline << \"--vardir=#{Shellwords.escape(File.join(@compilation_dir, 'var'))}\"\n cmdline << \"--logdir=#{Shellwords.escape(File.join(@compilation_dir, 'var'))}\"\n cmdline << \"--ssldir=#{Shellwords.escape(File.join(@compilation_dir, 'var', 'ssl'))}\"\n cmdline << \"--confdir=#{Shellwords.escape(@compilation_dir)}\"\n\n # Other parameters provided by the user\n override_and_append_commandline_with_user_supplied_arguments(cmdline)\n\n # Return full command\n cmdline.join(' ')\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
$ nodetool status Datacenter: LON5 ================ Status=Up/Down |/ State=Normal/Leaving/Joining/Moving Address Load Tokens Owns Host ID Rack UN 172.16.1.1 1.88 GB 256 ? 5uu5274d0c1c46f1b73cc28ffdcad10e A125 DN 172.16.1.2 2.55 GB 256 ? 4uu6478c0e29468cad38f417ccbcf403 A125 UL 172.16.1.3 3.24 GB 256 ? fuu0063da0334a7895e840a479d99a6b A125 UJ 172.16.1.4 4.92 GB 256 ? 1uuace8eaf9c4eff99771a34c09c5535 A125 UN 172.16.1.5 5.22 GB 256 ? 7uu9ee6cf0934fa0874b3f5bcaa5b952 A125
|
def parse_ndstatus# rubocop:disable all
nodestatus = nodetool_cmd('status')
nodestatus.each_line do |line|
next if line.match(/^Datacenter:/)
next if line.match(/^=======/)
next if line.match(/^Status/)
next if line.match(/State/)
next if line.match(/^--/)
next if line.match(/^Note/)
if m = line.match(/^UN\s\s(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})/)# rubocop:disable all
address = m[1]
ndstatus_attr = {"node.#{address}.status" => 'UN'}
else
m = line.match(/(\w+)\s\s(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})/)
address = m[2]
ndstatus = m[1]
ndstatus_attr = {"node.#{address}.status" => ndstatus}
critical ndstatus_attr.to_json
end
ok ndstatus_attr.to_json
end
end
|
[
"def nodetool_status()\n out = `/opt/cassandra/bin/nodetool status`\n raise 'nodetool status failed' if $? != 0\n rows = out.split(\"\\n\")\n hash = {}\n dc_exp = /Datacenter: (.*)/\n #vnode\n #-- Address Load Tokens Owns Host ID Rack\n #non-vnode\n #-- Address Load Owns Host ID Token Rack\n #node_ex = /^(?<status>[UD\\?][NLJM]) +(?<address>(?:[0-9]{1,3}\\.){3}[0-9]{1,3}) +(?<load>(\\d+\\.?\\d* (TB|GB|MB|KB|bytes))|\\?) +(?<tokens>\\d+) +(?<owns>(\\d+\\.?\\d*%|\\?)) +(?<hostid>[a-z0-9\\-]+) +(?<rack>.*)$/\n node_ex = /^([UD\\?][NLJM]) +((?:[0-9]{1,3}\\.){3}[0-9]{1,3}) +((?:\\d+\\.?\\d* (?:TB|GB|MB|KB|bytes))|\\?) +(\\d+) +(\\d+\\.?\\d*%|\\?) +([a-z0-9\\-]+) +(.*)$/\n datacenter = nil\n rows.each do |row|\n m = dc_exp.match(row)\n if m\n datacenter = m[1]\n next\n end\n m = node_ex.match(row)\n next if m == nil\n node = {'datacenter' => datacenter}\n hash[m[2]] = node\n i = 0\n %w(status address load tokens owns hostid rack).each do |name|\n node[name] = m[i += 1]\n end\n # m.names.each do |name|\n # node[name] = m[name]\n # end\n end\n return hash\n end",
"def status\n return(:down).tap { logger.warn 'Cassandra node is DOWN' } if address.nil?\n results = (nodetool_status || '').split(\"\\n\")\n results.map! { |line| line.strip }\n results.select! { |line| line.include? address }\n results.map! { |line| line.split(/\\s+/)[0] }\n results.compact!\n return(:down).tap do\n logger.warn \"Cannot find the Cassandra node (#{address}) in `nodetool status`\"\n end if results.size != 1\n (results.first[0] == 'U') ? :up : :down\n end",
"def nodetool_status\n @nodetool_status ||= DaemonRunner::ShellOut.new(command: 'nodetool status', timeout: 300)\n @nodetool_status.run!\n @nodetool_status.stdout\n end",
"def data_center\n results = (nodetool_info_cached || '').split(\"\\n\")\n results.map! { |line| line.strip }\n results.select! { |line| line.include?('Data Center') }\n results.map! { |line| line.split(':')[1] }\n results.compact!\n return nil if results.size != 1\n results.first.strip\n end",
"def nodetool_netstats\n @nodetool_netstats ||= DaemonRunner::ShellOut.new(command: 'nodetool netstats', timeout: 300)\n @nodetool_netstats.run!\n @nodetool_netstats.stdout\n end",
"def nodetool_info()\n out = `/opt/cassandra/bin/nodetool info`\n if $? == 0\n props = {}\n out.split(\"\\n\").each do |line|\n p = line.split(':')\n props[p.first.strip] = p.last.strip\n end\n return props\n else\n Chef::Log.warn(\"nodetool info failed with #{out}\")\n return nil\n end\n end",
"def calc_full_nodes_avail\n @full_nodes_available = 0\n #if @cluster_title.eql?(\"Ruby\")\n # # See line 62\n #@full_nodes_available = nodes_info.lines(\"\\n\\n\").select { |node| node.include?(\"dedicated_threads = 0\") && node.include?(\"np = 20\") && node.include?(\"gpu_state=Unallocated\")}.size\n #else\n #@full_nodes_available = nodes_info.lines(\"\\n\\n\").select { |node| node.include?(\"dedicated_threads = 0\") && node.include?(\"gpu_state=Unallocated\") }.size\n #end\n end",
"def nodetool_netstats\n @nodetool_netstats ||= DaemonRunner::ShellOut.new(command: 'nodetool netstats', timeout: 300)\n @nodetool_netstats.run!\n @nodetool_netstats.stdout\n end",
"def nodes_info\n client.nodes.info(os: true) if client.ping\n end",
"def load_balancer_status\n puts\n @haproxy.identity_filter(@load_balancer)\n rpcresult = @haproxy.backend_status(:backend => 'puppetcamp')\n puts \"Enabled Nodes :\".green\n rpcresult.each do |enabled| \n enabled[:data][:enabled].each do |host|\n puts \" #{host}\".green\n end\n end\n puts\n puts \"Disabled Nodes :\".red\n rpcresult.each do |disabled|\n disabled[:data][:disabled].each do |host|\n puts \" #{host}\".red\n end\n end\n puts\nend",
"def nodetool_info\n @nodetool_info ||= DaemonRunner::ShellOut.new(command: 'nodetool info', timeout: 300)\n @nodetool_info.run!\n @nodetool_info.stdout\n end",
"def owner_gpu_nodes\n return @owner_available_gpu_nodes if defined?(@owner_available_gpu_nodes)\n\n o, e, s = Open3.capture3(\"#{sinfo_cmd} -N -h -p #{cluster_title.downcase}-gpu-guest --Format='nodehost,gres:#{gres_length}' | uniq | grep gpu: | wc -l\")\n\n# File.write(\"/uufs/chpc.utah.edu/common/home/u0101881/ondemand/dev/osc-systemstatus/log.txt\", \"#{o}\\n#{e}\\n#{s}\\n\", mode: \"a\")\n if s.success?\n @owner_available_gpu_nodes = o.to_i\n else\n # Return stderr as error message\n @error_message = \"An error occurred when retrieving available GPU nodes. Exit status #{s.exitstatus}: #{e.to_s}\"\n 0\n end\n end",
"def output_success\n puts \"status Cman node status for #{@hostname}\"\n @metrics.each do |name,v|\n puts \"metric #{name} #{v[:type]} #{v[:value]}\"\n end\nend",
"def send_node_stats(node_ip)\n metrics_queue = Librato::Metrics::Queue.new\n\n cadvisor_res = Typhoeus.get(\"http://#{node_ip}:4194/api/v1.3/docker/\")\n data = Oj.load(cadvisor_res.body)\n\n data.values.each do |container|\n # Skip containers that aren't managed by kube:\n next if container['spec']['labels'].nil?\n\n # Parse the container name out of the container name auto-generated by kube\n # see https://github.com/kubernetes/heapster/blob/78ff89c01f52c0ab49dac2d356a8371e79482544/sources/datasource/kubelet.go#L156 \n container_name = container['aliases'].first.split('.').first.sub('k8s_','')\n\n # Join all of this together into a librato source name:\n source_name = ENV['CONTEXT'] + '.' + container['spec']['labels']['io.kubernetes.pod.name'].sub('/', '.') + '.' + container_name\n\n puts source_name\n\n stats = container['stats'].last\n\n # k8s_POD form the virtual network for a pod. We must collect net stats from this container,\n # since net counters for indvidual pod containers are always 0. See http://stackoverflow.com/questions/33472741/what-work-does-the-process-in-container-gcr-io-google-containers-pause0-8-0-d\n # for more info. No need to collect memory and cpu stats for this container.\n if container_name == 'POD'\n metrics_queue.add \"kube.network.tx_bytes\" => { type: :counter, value: stats['network']['tx_bytes'], source: source_name }\n metrics_queue.add \"kube.network.rx_bytes\" => { type: :counter, value: stats['network']['rx_bytes'], source: source_name }\n next\n end\n\n if stats['cpu']\n cpu_ms = stats['cpu']['usage']['total'] / 1000000\n metrics_queue.add \"kube.cpu.usage_ms\" => { type: :counter, value: cpu_ms, source: source_name }\n end\n \n if stats['memory']\n metrics_queue.add \"kube.memory.usage\" => { value: stats['memory']['usage'], source: source_name }\n metrics_queue.add \"kube.memory.rss\" => { value: stats['memory']['working_set'], source: source_name }\n end\n end\n\n metrics_queue.submit\nend",
"def nodetool_statusgossip\n @nodetool_statusgossip ||= DaemonRunner::ShellOut.new(command: 'nodetool statusgossip')\n @nodetool_statusgossip.run!\n @nodetool_statusgossip.stdout\n end",
"def parse_info\n info = nodetool_cmd('info')\n # #YELLOW\n # TODO: come back and refactor me to be better\n info.each_line do |line| # rubocop:disable Metrics/BlockLength\n if (m = line.match(/^Exceptions\\s*:\\s+([0-9]+)$/))\n output \"#{config[:scheme]}.exceptions\", m[1], @timestamp\n end\n\n if (m = line.match(/^Load\\s*:\\s+([0-9.]+)\\s+([KMGT]i?B|bytes)$/))\n output \"#{config[:scheme]}.load\", convert_to_bytes(m[1], m[2]), @timestamp\n end\n\n if (m = line.match(/^Uptime[^:]+:\\s+(\\d+)$/))\n output \"#{config[:scheme]}.uptime\", m[1], @timestamp\n end\n\n if (m = line.match(/^Heap Memory[^:]+:\\s+([0-9.]+)\\s+\\/\\s+([0-9.]+)$/))\n output \"#{config[:scheme]}.heap.used\", convert_to_bytes(m[1], 'MB'), @timestamp\n output \"#{config[:scheme]}.heap.total\", convert_to_bytes(m[2], 'MB'), @timestamp\n end\n\n # v1.1+\n if (m = line.match(/^Key Cache[^:]+: size ([0-9]+) \\(bytes\\), capacity ([0-9]+) \\(bytes\\), ([0-9]+) hits, ([0-9]+) requests/))\n output \"#{config[:scheme]}.key_cache.size\", m[1], @timestamp\n output \"#{config[:scheme]}.key_cache.capacity\", m[2], @timestamp\n output \"#{config[:scheme]}.key_cache.hits\", m[3], @timestamp\n output \"#{config[:scheme]}.key_cache.requests\", m[4], @timestamp\n end\n\n # cassandra nodetool v3.0+ Changed the key cache output\n # Key Cache : entries 569669, size 100 MiB, capacity 100 MiB, 35689224 hits, 70654365 requests, 0.505 recent hit rate, 14400 save period in seconds\n # Key Cache : entries 13291, size 7.83 MB, capacity 50 MB, 119444 hits, 139720 requests, 0.855 recent hit rate, 14400 save period in seconds\n if (m = line.match(/^Key Cache[^:]+: entries ([0-9]+), size ([-+]?[0-9]*\\.?[0-9]+) ([KMGT]i?B|bytes), capacity ([-+]?[0-9]*\\.?[0-9]+) ([KMGT]i?B|bytes), ([0-9]+) hits, ([0-9]+) requests, ([-+]?[0-9]*\\.?[0-9]+) recent hit rate/)) # rubocop:disable Layout/LineLength\n output \"#{config[:scheme]}.key_cache.size\", convert_to_bytes(m[2], m[3]), @timestamp\n output \"#{config[:scheme]}.key_cache.capacity\", convert_to_bytes(m[4], m[5]), @timestamp\n output \"#{config[:scheme]}.key_cache.hits\", m[6], @timestamp\n output \"#{config[:scheme]}.key_cache.requests\", m[7], @timestamp\n output \"#{config[:scheme]}.key_cache.hit_rate\", m[8], @timestamp\n end\n\n if (m = line.match(/^Row Cache[^:]+: size ([0-9]+) \\(bytes\\), capacity ([0-9]+) \\(bytes\\), ([0-9]+) hits, ([0-9]+) requests/))\n output \"#{config[:scheme]}.row_cache.size\", m[1], @timestamp\n output \"#{config[:scheme]}.row_cache.capacity\", m[2], @timestamp\n output \"#{config[:scheme]}.row_cache.hits\", m[3], @timestamp\n output \"#{config[:scheme]}.row_cache.requests\", m[4], @timestamp\n end\n\n # cassandra nodetool v3.0+ Changed the row cache output\n # Row Cache : entries 569669, size 100 MiB, capacity 100 MiB, 35689224 hits, 70654365 requests, 0.505 recent hit rate, 14400 save period in seconds\n # Row Cache : entries 13291, size 7.83 MB, capacity 50 MB, 119444 hits, 139720 requests, 0.855 recent hit rate, 14400 save period in seconds\n if (m = line.match(/^Row Cache[^:]+: entries ([0-9]+), size ([-+]?[0-9]*\\.?[0-9]+) ([KMGT]i?B|bytes), capacity ([-+]?[0-9]*\\.?[0-9]+) ([KMGT]i?B|bytes), ([0-9]+) hits, ([0-9]+) requests, ([-+]?[0-9]*\\.?[0-9]+) recent hit rate/)) # rubocop:disable Layout/LineLength\n output \"#{config[:scheme]}.row_cache.size\", convert_to_bytes(m[2], m[3]), @timestamp\n output \"#{config[:scheme]}.row_cache.capacity\", convert_to_bytes(m[4], m[5]), @timestamp\n output \"#{config[:scheme]}.row_cache.hits\", m[6], @timestamp\n output \"#{config[:scheme]}.row_cache.requests\", m[7], @timestamp\n output \"#{config[:scheme]}.row_cache.hit_rate\", m[8], @timestamp\n end\n end\n end",
"def node_info\n client.get('/admin/nodeinfo')\n end",
"def state\n results = (nodetool_netstats || '').split(\"\\n\")\n results.map! { |line| line.strip }\n results.select! { |line| line.include? 'Mode:' }\n results.map! { |line| line.split(':')[1] }\n results.compact!\n return nil if results.size != 1\n results.first.strip.downcase.to_sym\n end",
"def gpu_nodes\n return @available_gpu_nodes if defined?(@available_gpu_nodes)\n\n o, e, s = Open3.capture3(\"#{sinfo_cmd} -N -h -p #{cluster_title.downcase}-gpu --Format='nodehost,gres:#{gres_length}' | uniq | grep gpu: | wc -l\")\n\n# File.write(\"/uufs/chpc.utah.edu/common/home/u0101881/ondemand/dev/osc-systemstatus/log.txt\", \"#{o}\\n#{e}\\n#{s}\\n\", mode: \"a\")\n if s.success?\n @available_gpu_nodes = o.to_i\n else\n # Return stderr as error message\n @error_message = \"An error occurred when retrieving available GPU nodes. Exit status #{s.exitstatus}: #{e.to_s}\"\n 0\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Return the default locale for the object
|
def default_locale
evaluate_localization_option!(:default_locale)
end
|
[
"def default_locale\n return @default_locale\n end",
"def default_locale\n self.found_locale ||= find_locale\n end",
"def default_locale\n client.default_locale\n end",
"def default_locale=(value)\n @default_locale = value\n end",
"def locale\n I18n.locale if defined?(I18n)\n end",
"def default_language_code\n I18n.locale_language(I18n.default_locale).to_s if I18n.locale_language(I18n.default_locale)\n end",
"def default_language\n return @default_language\n end",
"def system\n locale_from_env || default_locale\n end",
"def default_language\n @default_language || :en\n end",
"def get_locale\n @locale\n end",
"def locale\n self.padma.try :locale\n end",
"def base_locale; end",
"def locale\n Translatomatic::Model::Locale\n end",
"def find_locale\n locale = locales.all.detect(&:default)\n return locale.code unless locale.nil?\n @default_locale\n end",
"def find_locale\n locale = ::Contentful::Management::Locale.all(id).detect(&:default)\n return locale.code unless locale.nil?\n @default_locale\n end",
"def find_locale\n locale = locales.all.detect(&:default)\n return locale.code if locale\n\n default_locale\n end",
"def locale\n ::I18n.locale.to_s\n end",
"def locale\n return @locale\n end",
"def po_locale\n @po_locale\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Return the missing translation placeholder
|
def missing_translation_placeholder field
evaluate_localization_option!(:placeholder, field)
end
|
[
"def text_unfound\n case @lang\n when :en then \"Unfound localized text for `#{id}#{extension}'\"\n when :fr then \"Impossible de trouver le texte localisé `#{id}#{extension}'\"\n end\n end",
"def t(key, default = '')\n c = I18n.t(key)\n if c.match(/translation missing/i)\n c = I18n.t(key, locale: 'en') \n # Still not found. Return default if set\n c = default unless default.blank?\n end\n c\nend",
"def t(key, default='')\n c = I18n.t(key)\n if c.match( 'translation missing' )\n c = I18n.t(key, locale: 'en') \n# Still not found. Return default if set\n c = default unless default.blank?\n end\n c\nend",
"def missing_translations\n @missing_translations ||= []\n end",
"def test_translate_given_a_bogus_key_raises_missing_translation_data\n assert_equal \"translation missing: en, bogus\", I18n.t(:bogus)\n end",
"def test_translate_given_a_bogus_key_raises_missing_translation_data\r\n assert_equal \"translation missing: en, bogus\", I18n.t(:bogus)\r\n end",
"def missing_translation?\n translation.new_record? && translation_for(:en).new_record?\n end",
"def t(key, default='')\n c = I18n.t(key)\n if c.class == Hash or c.match( 'translation missing' )\n c = I18n.t(key, locale: 'en') \n# Still not found. Return default if set\n if c.class == Hash or c.match( 'translation missing' )\n c = default.blank? ? key : default\n end\n end\n c\nend",
"def localization_exists(key)\n @template.t(key, :default => 'not_found') != 'not_found'\n end",
"def assert_no_missing_translations(msg = \"\")\n assert_select \"span[class=translation_missing]\", false, \"Missing translation #{msg}\"\n end",
"def missing_translation?\n @translations.any? { |x, v| v.nil? or v.blank? }\n end",
"def translated?\n texts = errors.values.flatten.map(&:message)\n texts.select { |text| text.start_with?(\"translation missing\") }.empty?\n end",
"def present_translations(label)\n translations(label).where(\"value <> ''\").where.not(value: nil)\n end",
"def translation_missing( attribute = nil )\n missing = {}\n current_locales_used = translated_locales # ... across all attributes\n\n translated_attributes.each do |attr|\n missing_locales = current_locales_used - translation_coverage(attr.to_sym)\n if missing_locales.size > 0\n missing[attr.to_sym] = missing_locales\n end\n end\n if attribute.nil?\n return missing\n else\n return missing[attribute.to_sym]\n end\n end",
"def record_not_found_message\n resource_name&.present? ? I18n.t(:x_not_found, name: resource_name&.singularize&.titleize) : I18n.t(:not_found)\n end",
"def test_fetch_missing_word_from_empty_dictionary\n missing_word = \"asdf\"\n assert_raises Exceptions::MissingWordException do # HW: Checks for an exception!\n @dict.translate(missing_word)\n end\n end",
"def fallback_translations\n @fallback_translations ||= I18n.backend.send(:translations)\n @fallback_translations[I18n.default_locale]\n end",
"def missing_text; end",
"def validate_translations\n# logger.debug \"***** validates question translations\"\n if self.time_series.default_language.present?\n# logger.debug \"***** - default is present; text = #{self.text_translations[self.time_series.default_language]}\"\n if self.text_translations[self.time_series.default_language].blank?\n# logger.debug \"***** -- text not present!\"\n errors.add(:base, I18n.t('errors.messages.translation_default_lang',\n field_name: self.class.human_attribute_name('text'),\n language: Language.get_name(self.time_series.default_language),\n msg: I18n.t('errors.messages.blank')) )\n end\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Remove the node at the end of the list and returns its value
|
def remove_last
raise 'No such element' if @size == 0
elt = @tail.value
if @size == 1
@head = nil
@tail = nil
else
@tail = @tail.previous
@tail.next.previous = nil
@tail.next = nil
end
@size -= 1
return elt
end
|
[
"def pop\r\n # Return nil if the list is empty\r\n if self.head.nil?\r\n return nil\r\n end\r\n self.size -= 1\r\n return delete_element(self.head).value\r\n end",
"def remove_last\n raise 'Empty List' if empty?\n\n item = @tail\n @tail = item.prev_node\n @tail.next_node = nil if @tail\n @size -= 1\n item.data\n end",
"def remove_last\r\n\t\t\tunless @head.next_node == @tail\r\n\t\t\t\tto_be_removed = @tail.prev_node\r\n\t\t\t\t@tail.prev_node = @tail.prev_node.prev_node\r\n\t\t\t\t@tail.prev_node.next_node = @tail\r\n\t\t\t\treturn to_be_removed.key\r\n\t\t\tend\r\n\t\tend",
"def shift\r\n # Return nil if the list is empty\r\n if self.head.nil?\r\n return nil\r\n end\r\n self.size -= 1\r\n return delete_element(self.tail).value\r\n end",
"def remove_last_child\n @elements.pop\n end",
"def delete\n raise 'No such element' if @size == 0\n elt = @current.value\n if @size == 1\n @current.next = nil\n @current.previous = nil\n @current = nil\n else\n @current.next.previous = @current.previous\n @current.previous.next = @current.next\n @current = @current.next\n end\n @size -= 1\n return elt\n end",
"def remove(value)\n\t\t\telement = self.head\n\t\t\tprevious_element = @head\n\t\t\twhile element.value != value\n\t\t\t\tif element.next.nil?\n\t\t\t\t\treturn nil\n\t\t\t\telse\n\t\t\t\t\tprevious_element = element\n\t\t\t\t\telement = element.next\n\t\t\t\tend\n\t\t\tend\n\n\t\t\tprevious_element.next = element.next\n\t\t\telement\n\t\tend",
"def test_it_can_remove_a_node_by_value\n list = LinkedList.new(\"start\")\n list.append(\"chilula\")\n list.append(\"sriracha\")\n list.append(\"tapatio\")\n list.remove_by_data(\"chilula\")\n assert_equal \"tapatio\", list.find_by_index(2)\n end",
"def delete_tail\r\n delete_node @tail\r\n end",
"def last\n return @nodes[-1]\n end",
"def delete_after\n if !next_node.nil?\n @next_node = next_node.next_node\n else\n raise \"This is the last node; nothing to delete.\"\n end\n end",
"def remove_front\n value = self.head # save the first element\n self.head = value.next # the second element becomes the new first element\n value.next = nil # no longer in the list, so no next\n return value\n end",
"def remove_from_back\n return \"Empty list...\" if @head.nil?\n\n node = @head\n while node != @tail\n if node.next.next.nil?\n node.next = nil\n @tail = node\n break\n else\n node = node.next\n end\n end\n\n if @head == @tail\n @head = nil\n @tail = nil\n end\n\n reduce_list_length\n self.display\n end",
"def remove(index)\n return nil if index.negative? || index > length\n return shift if index.zero?\n return pop(index) if index == self.length - 1\n\n previous_node = get(index - 1)\n removed = previous_node.next\n previous_node.next = removed.next\n self.length -= 1\n removed\n end",
"def remove_from_tail\n # if linked list is not empty\n if not is_empty?\n # save reference to tail node\n remove = @tail\n\n # if linked list has one item\n if @head == @tail\n # set both head and tail to nil\n @head = nil\n @tail = nil\n else\n # set up a loop that starts at @head and stops at the node prior to @tail\n start = @head\n until start.next == @tail\n start = start.next\n end\n # set that node as the new tail\n @tail = start\n # point new tail's next value to nil\n @tail.next = nil\n end\n # return saved node\n return remove\n end\n end",
"def tail\n @list && @list.size > 0 ? @list.size-1 : nil\n end",
"def remove_first\n raise 'Empty List' if empty?\n\n item = @head\n @head = item.next_node\n @head.prev_node = nil if @head\n @size -= 1\n item.data\n end",
"def remove (data)\n if @head.data == data\n @head = @head.next # Move head to the next node\n else\n current_node = @head.next # Go the next element\n prev_node = @head\n while current_node\n if current_node.data == data\n prev_node.next = current_node.next\n return @head # Head didnt change\n end\n prev_node = current_node\n current_node = current_node.next\n end\n\n return @head\n end\n end",
"def last\n return nil if @list.empty?\n @list[tail]\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Return the last element without removing it
|
def peek_last
raise 'No such element' if @size == 0
@tail.value
end
|
[
"def last\n elements.last\n end",
"def last\n return nil if @list.empty?\n @list[tail]\n end",
"def my_but_last( list )\n element_at list, length( list ) - 1\nend",
"def last\n lambda do |_rec, acc|\n acc.slice!(0, acc.length - 1)\n end\n end",
"def remove_last\n raise 'No such element' if @size == 0\n elt = @tail.value\n if @size == 1\n @head = nil\n @tail = nil\n else\n @tail = @tail.previous\n @tail.next.previous = nil\n @tail.next = nil\n end\n @size -= 1\n return elt\n end",
"def last_element(arr)\n\tarr.last\nend",
"def last\n item = items.last\n item ? item.value : nil\n end",
"def butlast\n if self.empty?\n []\n else\n self[0..-2]\n end\n end",
"def last_element(array)\n array[-1]\nend",
"def remove_last_child\n @elements.pop\n end",
"def last_of(arr)\n output = arr[-1]\n return output\nend",
"def my_last( list )\n element_at list, length( list )\nend",
"def last() end",
"def last\n collection.size - 1\n end",
"def last\n return @nodes[-1]\n end",
"def find_last\n current = @head\n\n until current.next.nil?\n current = current.next\n end\n\n return current\n end",
"def last\n @collection.size - 1\n end",
"def retrieve_last_element_from_array(array)\n array[-1]\nend",
"def last_of(arr)\n return arr[-1]\nend"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Initialize a node with value v, previous p and next n
|
def initialize(v, n, p)
@value = v
@next = n
@previous = p
end
|
[
"def initialize(value, n)\n @value = value\n @next = n\n end",
"def initialize(value = nil, next_node = nil, prev_node = nil)\n @value = value\n @next = next_node\n @prev = prev_node\n end",
"def initialize(v, d, n = nil)\n self.node = n\n self.vector = v\n self.data = d\n end",
"def initialize(key, value, next_node)\r\n\t\t\t@key = key\r\n\t\t\t@value = value\r\n\t\t\t@next = next_node\r\n\t\tend",
"def prep_node(val)\n node = Node.new\n node.value = val\n node\n end",
"def initialize( value, level, next_node = nil )\n @value = value\n @level = level\n @next_node = next_node\n end",
"def initialize(head_value = nil)\n @head = Node.new(head_value)\n @size = head_value ? 1 : 0\n end",
"def initialize\n @head = Node.new(:head)\n @tail = Node.new(:tail)\n @head.next = @tail\n @tail.prev = @head\n # nil on default @head.prev = nil\n # nil on default @tail.next = nil\n end",
"def n=(p0) end",
"def initialize(in_data)\n @data = in_data\n @prev_node = nil\n @next_node = nil\n end",
"def initialize(node_value)\n self.node_value = node_value\n @left_node = nil\n @right_node = nil\n end",
"def build_linked_list(vals)\n root = Node.new(vals[0])\n vals[1..-1].inject(root) { |node, val|\n node.next = Node.new(val) }\n root\nend",
"def initialize (value, parent = nil, left = nil, right = nil) #initialize all nodes to nil on creation.\n\t\t@value = value\n\t\t@parent = parent\n\t\t@left = left\n\t\t@right = right\n\n\t\t\n\tend",
"def initialize\n @head = ANode.new(HEAD_VAL)\n @curr = @head\n @size = 0\n end",
"def init3_2(data = nil)\r\n\t@head = Node.new(data)\r\n\t@tail = nil\r\nend",
"def n=(new_n)\n @n , @previous_n = new_n, @n\n build_chains() if @n != @previous_n && @previous_n != nil && @previous_n != 0\n end",
"def initialize(key, value = key)\n @key = key\n @value = value\n @next = self\n @prev = self\n end",
"def initialize(v)\n @v = v\n end",
"def initialize(n = 0, starting_values = [], &algorithm)\n @sequence = []\n @starting_values = starting_values\n @algorithm = algorithm.nil? ? DEFAULT_ALGORITHM : algorithm\n set_next(n)\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Evaluates the content of block and stores the result as content_for :sidebar. Because content_for concatenates the block, you can call sidebar multiple time and yield all the final content once. See ActionView::Helpers::CaptureHelpercontent_for for the full API documentation. ==== Examples Sidebar => Sidebar
|
def sidebar(&block)
content_for :sidebar, &block
end
|
[
"def widget_sidebar(&block)\n @widget_options[:sidebar_html] = capture(&block)\n end",
"def sidebar!\n content_for(:layout_sidebar) do\n 'true'\n end\n end",
"def wrapper(options={}, &block)\n raise ArgumentError, \"Missing block\" unless block_given?\n\n options[:class] = \"hideable-sidebar-wrap #{options[:class]}\"\n\n if options[:data] && options[:data].is_a?(Hash)\n options[:data] = options[:data].merge({ \"hideable-sidebar\" => \"\" })\n else\n options[:data] = { \"hideable-sidebar\" => \"\" }\n end\n\n content_tag(:div, options) do\n content_tag(:div, class: \"row\") do\n capture(&block)\n end\n end\n end",
"def sidebar(options={}, &block)\n raise ArgumentError, \"Missing block\" unless block_given?\n\n options[:class] = \"hideable-sidebar #{options[:class]}\"\n\n content_tag(:div, options) do\n [\n options[:include_close_button] != false ? button(\"Close\") : \"\",\n capture(&block)\n ].join.html_safe\n end\n end",
"def main_content\n yield if block_given?\n end",
"def sidebar_stats(&block)\n html = capture(&block)\n unless html.blank?\n concat tag(:div, {:id => sidebar_stats_dom_id}, true)\n concat html\n concat \"</div>\"\n end\n end",
"def sidebar_actions_container(options={}, &proc)\n partial_name = 'shared/sidebar_actions_container'\n if block_given? && !options[:body] && body = capture(&proc)\n unless body.blank?\n options.merge!(:body => body)\n concat(render(:partial => partial_name, :locals => options))\n end\n else\n render(:partial => partial_name, :locals => options)\n end\n end",
"def outer_content_for(name, content = nil, &block)\n outer_view.instance_eval do\n content_for(name, content, &block)\n end\n\n # This was the old implementation, which didn't work for me. Also, it was needlessly complex, since we can just do an instance_eval on the outer_view and then invoke and reuse Rails' normal content_for helper.\n # The old implementation also didn't let you retrieve content with <%= content_for(:name) %> (content = nil) like the Rails one allows.\n # This was returning a different object each time outer_content_for was called -- and it certainly wasn't the same object_id as self.object_id gave from the outermost layout view, so it must be creating a new object each time or something rather than just fetching the existing one (from an instance variable, f.e.):\n #outer_view = controller.parent_controller.view_context\n #::Rails.logger.debug \"... outer_view.send(:'instance_variables')=#{ outer_view.send(:'instance_variables').inspect}\"\n #::Rails.logger.debug \"... outer_view.object_id=#{outer_view.object_id.inspect}\"\n #content = capture(&block) if block_given?\n #content_for = outer_view.send(:'instance_variable_get', :\"@_content_for\")\n #::Rails.logger.debug \"... content_for=#{content_for.inspect}\"\n #content_for[name] << content\n #outer_view.send(:'instance_variable_set', :\"@_content_for\", content_for)\n #nil\n end",
"def render_block(context, content)\n puts \"Layout #{@filename} rendering content block\"\n @layout_proc.call(context, content)\n end",
"def content\n call_block\n end",
"def content_for(*args,&block)\n if block\n helpers.content_for(*args,&block)\n else\n rawtext(helpers.content_for(*args))\n ''\n end\n end",
"def with_content_of(content_block, &block)\n content = capture(&content_block)\n concat(block_given? ? yield(content) : content, content_block.binding)\n end",
"def sidebar\n @sidebar ||= find_sub_page(:sidebar)\n end",
"def content_for(symbol, &block)\n content_blocks[symbol] << capture_haml(&block)\n end",
"def content\n @blocks.map {|b| b.render } * EOL\n end",
"def nav_side_bar\n render layout: \"/shared/navigation/sidebar\" do\n yield if block_given?\n end\n end",
"def area(name, content=nil, &block)\n content = capture(&block) if block_given?\n append(name, content)\n render_area(name)\n end",
"def sidebar\n @sidebar ||= find_page('Sidebar', :with_redirect => false)\n end",
"def area(name, content = nil, &block)\n content = capture(&block) if block_given?\n append name, content\n render_area name\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
GET /meeting_follow_ups GET /meeting_follow_ups.json
|
def index
@meeting_follow_ups = MeetingFollowUp.all
end
|
[
"def meetups\n add_breadcrumb \"My Meetups\", \"/users/#{current_user.id}/meetups\"\n\n @user = User.find(params[:id])\n if params[:status]\n @meetups = Meetup.find_user_meetups(current_user, params[:status])\n else\n @meetups = @user.meetups.order(:updated_at).reverse_order\n end\n\n respond_to do |format|\n format.html # meetups.html.erb\n format.json { render json: @meetups }\n end\n end",
"def index\n @follow_ups = FollowUp.all\n end",
"def show_follows \n @user = User.find(params[:id])\n \n render json: @user.follows\n end",
"def show_follows\n @user = User.find(params[:id])\n \n render json: @user.follows\n end",
"def meetups\n @meetups ||= begin\n uri = URI(\"https://api.meetup.com/#{urlname_for(slots[\"Topic\"].value)}/events\")\n resp = Net::HTTP.get(uri)\n JSON.parse(resp).first(2)\n end\n end",
"def update\n respond_to do |format|\n if @meeting_follow_up.update(meeting_follow_up_params)\n format.html { redirect_to @meeting_follow_up, notice: 'Meeting follow up was successfully updated.' }\n format.json { render :show, status: :ok, location: @meeting_follow_up }\n else\n format.html { render :edit }\n format.json { render json: @meeting_follow_up.errors, status: :unprocessable_entity }\n end\n end\n end",
"def show_follows\n \t@user = User.find(params[:id])\n \t\n \trender json: @user.follows\n end",
"def new\n @follow_up = FollowUp.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @follow_up }\n end\n end",
"def followings\n @hot_topics = Topic.hot_topics(5)\n @interviewee = User.where(:nickname => params[:id]).first\n @is_same_user = @interviewee.is_same_user?(@current_user)\n \n @followings = @interviewee.followings\n end",
"def index\n @off_meetings = OffMeeting.all.order(\"updated_at DESC\").page(params[:new]).per(5)\n @search = OffMeeting.search(params[:q])\n\n ##フォロー中のユーザーのオフ会を取得\n if user_signed_in?\n following_users = current_user.following.includes(:off_meetings)\n\n @following_off_meetings = []\n following_users.each do |user|\n @following_off_meetings << user.off_meetings\n end\n\n @following_off_meetings.flatten!\n @following_off_meetings.sort_by! {|off_meeting| off_meeting.updated_at}\n @following_off_meetings.reverse!\n @following_off_meetings = Kaminari.paginate_array(@following_off_meetings).page(params[:following]).per(5)\n end\n end",
"def show\n user = Api::V1::User.find(params[:id])\n unless user.nil?\n render json: user.followings.all\n end\n end",
"def all_followships\n followships = User.find(params[:user_id]).followships\n render :json => followships\n end",
"def show\n @followup_interview = FollowupInterview.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @followup_interview }\n end\n end",
"def new\n @followup = Followup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @followup }\n end\n end",
"def followed_issues\n issues_list = []\n user = User.find(params[:id])\n followed_issues = user.followed_issues\n followed_issues.each do |issue|\n issues_list << {\n id: issue.id,\n title: issue.title,\n description: issue.description,\n }\n end\n render json: {\n success: true,\n issues: issues_list\n }\n end",
"def followers\n @connections = Connection.getfollowing(params[:auth_token], 1)\n\n respond_to do |format|\n# format.html # show.html.erb\n format.json { render json: @connections }\n end\n end",
"def index\n @meetings = (current_user.meetings + current_user.invited_meetings).sort_by(&:start_time)\n json_response(@meetings)\n end",
"def fetch\n follows = Follow.where(follower_id: params[:user_id])\n paginate json: follows.to_json(:include => :following)\n end",
"def index\n @contest_followings = current_user.contest_followings.includes(:contest)\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @contest_followings }\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
POST /meeting_follow_ups POST /meeting_follow_ups.json
|
def create
@meeting_follow_up = MeetingFollowUp.new(meeting_follow_up_params)
respond_to do |format|
if @meeting_follow_up.save
format.html { redirect_to meeting_follow_up_record_employee_plans_path(plan_id: params[:meeting_follow_up][:employee_plan_id]), notice: 'Meeting follow up was successfully created.' }
format.json { render :show, status: :created, location: @meeting_follow_up }
else
format.html { render :new }
format.json { render json: @meeting_follow_up.errors, status: :unprocessable_entity }
end
end
end
|
[
"def create\n @follow_up = FollowUp.new(params[:follow_up])\n\n respond_to do |format|\n if @follow_up.save\n format.html { redirect_to @follow_up, notice: 'Follow up was successfully created.' }\n format.json { render json: @follow_up, status: :created, location: @follow_up }\n else\n format.html { render action: \"new\" }\n format.json { render json: @follow_up.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @followup = Followup.new(params[:followup])\n\n respond_to do |format|\n if @followup.save\n format.html { redirect_to @followup, notice: 'Followup was successfully created.' }\n format.json { render json: @followup, status: :created, location: @followup }\n else\n format.html { render action: \"new\" }\n format.json { render json: @followup.errors, status: :unprocessable_entity }\n end\n end\n end",
"def add_follow_up(params = {})\n if req = client.request('tickets/followup', format_params(params))\n true\n else\n false\n end\n end",
"def create\n @follow_up = FollowUp.new(params[:follow_up])\n\n respond_to do |format|\n if @follow_up.save\n format.html { redirect_to(@follow_up, :notice => 'Follow up was successfully created.') }\n format.xml { render :xml => @follow_up, :status => :created, :location => @follow_up }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @follow_up.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @meetup = Meetup.new(meetup_params)\n @meetup.on_ranking = true\n authorize @meetup\n respond_to do |format|\n if @meetup.save\n @meetup.sessions.create(location_id: Location.where(active: true).first.id)\n @activity = @meetup.create_activity(current_user.id)\n @notifications = @activity.create_notification\n @meetup.holdings.create(user_id: current_user.id)\n notify_collaborators\n format.html { redirect_to meetup_path(@meetup) }\n format.json do\n render :show,\n status: :created, location: @meetup\n end\n else\n format.html { render :new }\n format.json do\n render json: @meetup.errors,\n status: :unprocessable_entity\n end\n end\n end\n end",
"def create\n\n respond_to do |format|\n if @followup.save\n format.html { redirect_to(@goal, :notice => t('followups.create.success')) }\n format.xml { render :xml => @followup, :status => :created, :location => @followup }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @followup.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @interview = Interview.new(params[:interview])\n @followup_interview = FollowupInterview.new(params[:followup_interview])\n @followup_interview.interview = @interview\n\n respond_to do |format|\n if @followup_interview.save\n format.html { redirect_to @followup_interview, notice: 'Followup interview was successfully created.' }\n format.json { render json: @followup_interview, status: :created, location: @followup_interview }\n else\n format.html { render action: \"new\" }\n format.json { render json: @followup_interview.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @meeting_follow_up.update(meeting_follow_up_params)\n format.html { redirect_to @meeting_follow_up, notice: 'Meeting follow up was successfully updated.' }\n format.json { render :show, status: :ok, location: @meeting_follow_up }\n else\n format.html { render :edit }\n format.json { render json: @meeting_follow_up.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n head 404\n # @api_v1_following = Api::V1::Following.new(api_v1_following_params)\n # @api_v1_follower.following_id = @current_user.id\n\n # if @api_v1_following.save\n # render json: @api_v1_following, status: :created, location: @api_v1_following\n # else\n # render json: @api_v1_following.errors, status: :unprocessable_entity\n # end\n end",
"def create\n @meetup = Meetup.new(meetup_params)\n\n respond_to do |format|\n if @meetup.save\n format.html { redirect_to @meetup, notice: 'Meetup was successfully created.' }\n format.json { render :show, status: :created, location: @meetup }\n else\n format.html { render :new }\n format.json { render json: @meetup.errors, status: :unprocessable_entity }\n end\n end\n end",
"def index\n @meeting_follow_ups = MeetingFollowUp.all\n end",
"def create\n @meetup = Meetup.new(params[:meetup])\n\t@meetup.meetupdetails\n\n\n respond_to do |format|\n if @meetup.save\n\n\t\t\n\t\t\n \n\n format.html { redirect_to @meetup, notice: 'Meetup was successfully created.' }\n format.json { render json: @meetup, status: :created, location: @meetup }\n else\n format.html { render action: \"new\" }\n format.json { render json: @meetup.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @api_v1_follower = Api::V1::Follower.new(api_v1_follower_params)\n @api_v1_follower.follower_id = @current_user.id\n\n if @api_v1_follower.save\n render json: @api_v1_follower, status: :created, location: @api_v1_follower\n else\n render json: @api_v1_follower.errors, status: :unprocessable_entity\n end\n end",
"def new\n @follow_up = FollowUp.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @follow_up }\n end\n end",
"def create\n @meetup = Meetup.new(params[:meetup])\n @meetup.staff_id = current_staff\n @meetup.status = PLANNED\n\n respond_to do |format|\n if @meetup.save\n format.html { redirect_to meetups_path }\n format.xml { render :xml => @meetup, :status => :created, :location => @meetup }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @meetup.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @contest_following = current_user.contest_followings.new(params[:contest_following])\n respond_to do |format|\n if @contest_following.save\n format.html { redirect_to contest_followings_path, notice: 'Following contest succesfully' }\n format.json { render json: @contest_following, status: :created, location: @contest_following }\n else\n format.html { redirect_to contests_path, notice: @contest_following.errors.full_messages.first }\n format.json { render json: @contest_following.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @followup = Followup.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @followup }\n end\n end",
"def add_and_attend_meetup\n user = User.new(name: params[:user][:name],\n email: params[:user][:email],\n native_language: params[:user][:native_language])\n\n respond_to do |format|\n if user.save\n @meetup.meetup_attendees.create(user_id: user.id)\n\n flash[:success] = \"#{user.name} marked as present.\"\n format.html { redirect_to @meetup }\n format.json { render :show, status: :created, location: @meetup}\n else\n flash[:danger] = \"Problem creating user: #{user.errors.full_messages.join(\", \")}\"\n format.html { redirect_to @meetup }\n format.json { render json: user.errors, status: :unprocessable_entity }\n end\n end\n\n end",
"def create\n @follow_up_type = FollowUpType.new(follow_up_type_params)\n\n respond_to do |format|\n if @follow_up_type.save\n format.html { redirect_to @follow_up_type, notice: 'Follow up type was successfully created.' }\n format.json { render action: 'show', status: :created, location: @follow_up_type }\n else\n format.html { render action: 'new' }\n format.json { render json: @follow_up_type.errors, status: :unprocessable_entity }\n end\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
PATCH/PUT /meeting_follow_ups/1 PATCH/PUT /meeting_follow_ups/1.json
|
def update
respond_to do |format|
if @meeting_follow_up.update(meeting_follow_up_params)
format.html { redirect_to @meeting_follow_up, notice: 'Meeting follow up was successfully updated.' }
format.json { render :show, status: :ok, location: @meeting_follow_up }
else
format.html { render :edit }
format.json { render json: @meeting_follow_up.errors, status: :unprocessable_entity }
end
end
end
|
[
"def update\n @follow_up = FollowUp.find(params[:id])\n\n respond_to do |format|\n if @follow_up.update_attributes(params[:follow_up])\n format.html { redirect_to @follow_up, notice: 'Follow up was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @follow_up.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @followup = Followup.find(params[:id])\n\n respond_to do |format|\n if @followup.update_attributes(params[:followup])\n format.html { redirect_to @followup, notice: 'Followup was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @followup.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\r\n @follow_up = FollowUp.find(params[:id])\r\n\r\n respond_to do |format|\r\n if @follow_up.update_attributes(params[:follow_up])\r\n flash[:notice] = 'FollowUp was successfully updated.'\r\n format.html { redirect_to incident_path(@incident) }\r\n format.xml { head :ok }\r\n else\r\n format.html { render :action => \"edit\" }\r\n format.xml { render :xml => @follow_up.errors.to_xml }\r\n end\r\n end\r\n end",
"def update\n respond_to do |format|\n if @incidentfollowup.update(incidentfollowup_params)\n format.html { redirect_to @incidentfollowup, notice: 'Incidentfollowup was successfully updated.' }\n format.json { render :show, status: :ok, location: @incidentfollowup }\n else\n format.html { render :edit }\n format.json { render json: @incidentfollowup.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @follow_up = FollowUp.find(params[:id])\n\n respond_to do |format|\n if @follow_up.update_attributes(params[:follow_up])\n format.html { redirect_to(@follow_up, :notice => 'Follow up was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @follow_up.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n head 404\n # @api_v1_follower = Api::V1::Follower.find(params[:id])\n\n # if @api_v1_follower.update(api_v1_follower_params)\n # head :no_content\n # else\n # render json: @api_v1_follower.errors, status: :unprocessable_entity\n # end\n end",
"def update\n @followee = Followee.find(params[:id])\n\n respond_to do |format|\n if @followee.update_attributes(params[:followee])\n format.html { redirect_to @followee, notice: 'Followee was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @followee.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @auto_follow.update(auto_follow_params)\n format.html { redirect_to @auto_follow, notice: 'Auto follow was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @auto_follow.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @meetup = Meetup.find(params[:id])\n\n respond_to do |format|\n if @meetup.update_attributes(params[:meetup])\n format.html { redirect_to @meetup, notice: 'Meetup was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @meetup.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @followup_interview = FollowupInterview.find(params[:id])\n @interview = Interview.find(@followup_interview.interview_id)\n\n respond_to do |format|\n if @followup_interview.update_attributes(params[:followup_interview]) && @interview.update_attributes(params[:interview])\n format.html { redirect_to @followup_interview, notice: 'Followup interview was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @followup_interview.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @follow_request.update(follow_request_params)\n format.html { redirect_back fallback_location: root_url, notice: \"Follow request was successfully updated.\" }\n format.json { render :show, status: :ok, location: @follow_request }\n else\n format.html { render :edit, status: :unprocessable_entity }\n format.json { render json: @follow_request.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @follow_up_survey.update(follow_up_survey_params)\n format.html { redirect_to @follow_up_survey, notice: 'Follow up survey was successfully updated.' }\n format.json { render :show, status: :ok, location: @follow_up_survey }\n else\n format.html { render :edit }\n format.json { render json: @follow_up_survey.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n authorize @meetup\n respond_to do |format|\n if @meetup.update(meetup_params)\n notify_collaborators\n format.html { redirect_to meetup_path(@meetup) }\n format.json { render :show, status: :ok, location: @meetup }\n else\n format.html { render :edit }\n format.json do\n render json: @meetup.errors,\n status: :unprocessable_entity\n end\n end\n end\n end",
"def update\n respond_to do |format|\n authorize! :update, @rec_event_follow, :message => 'Not authorized as an administrator.'\n if @rec_event_follow.update(rec_event_follow_params)\n format.html { redirect_to @rec_event_follow, notice: 'Rec event follow was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @rec_event_follow.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n authorize @meetup\n respond_to do |format|\n if @meetup.update(meetup_params)\n format.html { redirect_to meetup_path(@meetup) }\n format.json { render :show, status: :ok, location: @meetup }\n else\n format.html { render :edit }\n format.json do\n render json: @meetup.errors,\n status: :unprocessable_entity\n end\n end\n end\n end",
"def update\n respond_to do |format|\n if @meeting.update(meeting_params)\n format.json { head :no_content }\n else\n format.json { render json: @meeting.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @hack_meet.update(hack_meet_params)\n format.html { redirect_to @hack_meet, notice: 'Hack meet was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @hack_meet.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @meetup = Meetup.find(params[:id])\n\n respond_to do |format|\n if @meetup.update_attributes(params[:meetup])\n format.html { redirect_to meetups_path }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @meetup.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @follow_relationship.update(follow_relationship_params)\n format.html { redirect_to @follow_relationship, notice: 'Follow relationship was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @follow_relationship.errors, status: :unprocessable_entity }\n end\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
DELETE /meeting_follow_ups/1 DELETE /meeting_follow_ups/1.json
|
def destroy
@meeting_follow_up.destroy
respond_to do |format|
format.html { redirect_to meeting_follow_ups_url, notice: 'Meeting follow up was successfully destroyed.' }
format.json { head :no_content }
end
end
|
[
"def destroy\n @meetup = Meetup.find(params[:id])\n @meetup.destroy\n\n respond_to do |format|\n format.html { redirect_to request.referer }\n format.json { head :no_content }\n end\n end",
"def destroy\n @follow_up = FollowUp.find(params[:id])\n @follow_up.destroy\n\n respond_to do |format|\n format.html { redirect_to follow_ups_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @meetup = Meetup.find(params[:id])\n @meetup.destroy\n\n respond_to do |format|\n format.html { redirect_to meetups_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @followup = Followup.find(params[:id])\n @followup.destroy\n\n respond_to do |format|\n format.html { redirect_to followups_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @followup_interview = FollowupInterview.find(params[:id])\n @followup_interview.destroy\n\n respond_to do |format|\n format.html { redirect_to followup_interviews_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @incidentfollowup.destroy\n respond_to do |format|\n format.html { redirect_to incidentfollowups_url, notice: 'Incidentfollowup was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @auto_follow.destroy\n respond_to do |format|\n format.html { redirect_to auto_follows_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @meetup.destroy\n respond_to do |format|\n format.html { redirect_to meetups_url, notice: 'Meetup was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n authorize! :destroy, @rec_event_follow, :message => 'Not authorized as an administrator.'\n @rec_event_follow.destroy\n respond_to do |format|\n format.html { redirect_to rec_event_follows_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @follow_up = FollowUp.find(params[:id])\n @follow_up.destroy\n\n respond_to do |format|\n format.html { redirect_to(follow_ups_url) }\n format.xml { head :ok }\n end\n end",
"def delete_follows\n \t@follower = User.find(params[:id])\n \t@followed = User.find(params[:follows_id])\n \t\n \tif @follower.follows.delete(@followed)\n \t\t\n \telse\n \t\trender json @follower.errors\n \tend\n end",
"def destroy\n @meetup_event = MeetupEvent.find(params[:id])\n @meetup_event.destroy\n\n respond_to do |format|\n format.html { redirect_to meetup_events_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @hack_meet.destroy\n respond_to do |format|\n format.html { redirect_to hack_meets_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @followee = Followee.find(params[:id])\n @followee.destroy\n\n respond_to do |format|\n format.html { redirect_to followees_url }\n format.json { head :no_content }\n end\n end",
"def destroy\r\n @follow_up = FollowUp.find(params[:id])\r\n @follow_up.destroy\r\n\r\n respond_to do |format|\r\n format.html { redirect_to follow_ups_url }\r\n format.xml { head :ok }\r\n end\r\n end",
"def destroy\n @manage_follow_u.destroy\n respond_to do |format|\n format.html { redirect_to manage_follow_us_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @meetup_profile = MeetupProfile.find(params[:id])\n @meetup_profile.destroy\n\n respond_to do |format|\n format.html { redirect_to request.referer }\n format.json { head :no_content }\n end\n end",
"def destroy\n @meetup_member = MeetupMember.find(params[:id])\n @meetup_member.destroy\n\n respond_to do |format|\n format.html { redirect_to meetup_members_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @follow_up_survey.destroy\n respond_to do |format|\n format.html { redirect_to follow_up_surveys_url, notice: 'Follow up survey was successfully destroyed.' }\n format.json { head :no_content }\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Internal: Builds body of a mixin Returns string representation of a mixin with asset helper functions
|
def build_mixin_body(scope)
@mixin_body ||= if assets_hash(scope).values.all? {|value| value != '' }
<<-STYL
asset-url(key)
return pair[1] if pair[0] == key for pair in #{assets_hash(scope)[:url]} ()
asset-path(key)
return pair[1] if pair[0] == key for pair in #{assets_hash(scope)[:path]} ()
STYL
else
''
end
end
|
[
"def build_mixin_body(scope)\n @mixin_body ||= if assets_hash(scope).values.all? {|value| value != '' }\n <<-STYL\n#{generate_helper(scope, 'asset')}\n#{generate_helper(scope, 'image')}\n#{generate_helper(scope, 'audio')}\n#{generate_helper(scope, 'video')}\n STYL\n else\n ''\n end\n end",
"def visit_mixin(node); end",
"def mixin!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 28 )\n\n type = MIXIN\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 162:9: 'mixin'\n match( \"mixin\" )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 28 )\n\n end",
"def mixin!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 23 )\n\n type = MIXIN\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 161:9: 'mixin'\n match( \"mixin\" )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 23 )\n\n end",
"def use(mixin)\n # templates and partials are just blocks themselves so they just need to\n # be added to the workbook element\n # they will be instance eval'd when they get used\n (mixin.templates || []).each { |mt| template(*mt.args, &mt.build) }\n (mixin.partials || []).each { |mp| partial(*mp.args, &mp.build) }\n\n # styles not only need to be added to the workbook element, but\n # any build passed to the style needs to be instance eval'd\n (mixin.styles || []).each do |ms|\n StyleBuild.new(self, *ms.args, &ms.build).add do |build|\n instance_eval(&build)\n end\n end\n end",
"def as_comment_body\n\t\tcomment = \"%s: { template.%s\" % [ self.tagname, self.name ]\n\t\tcomment << self.methodchain if self.methodchain\n\t\tcomment << \" }\"\n\t\tcomment << \" with format: %p\" % [ self.format ] if self.format\n\n\t\treturn comment\n\tend",
"def as_comment_body\n\t\tcomment = \"%s '%s': { \" % [ self.tagname, self.name ]\n\t\tif self.methodchain\n\t\t\tcomment << \"template.%s%s\" % [ self.identifiers.first, self.methodchain ]\n\t\telse\n\t\t\tcomment << self.literal\n\t\tend\n\t\tcomment << \" }\"\n\t\tcomment << \" with format: %p\" % [ self.format ] if self.format\n\n\t\treturn comment\n\tend",
"def method_body(body, extname) # :nodoc:\n case extname\n when '.erb'\n source = \"# #{body.gsub(/\\n/, \"\\n# \")}\"\n compiler = ERB::Compiler.new('<>')\n compiler.put_cmd = \"write\"\n compiler.insert_cmd = \"write\"\n code = [compiler.compile(body)].flatten.first\n \n \"#{source}\\n#{code}\".gsub(/^(\\s*)/) do |m| \n indent = 2 + $1.length - ($1.length % 2)\n ' ' * indent\n end\n \n when '.rb'\n body.rstrip\n \n else\n raise CommandError.new(\"invalid definition format: #{extname.inspect}\")\n end\n end",
"def process_mixin_definition(definition)\n\t\tdefinition.self[:mixin].each do |bundle, mixins|\n\t\t\tbundle = bundle.to_s\n\t\t\t@cluster[:mixins][bundle] = {} if @cluster[:mixins][bundle] == nil\n\t\t\tmixins.each do |name, mixin|\n\t\t\t\tmixin[:structure] = process_conditional_block(definition, _get_conditionals(mixin[:structure]),mixin[:structure],'loop')\n\t\t\t\t@cluster[:mixins][bundle][name] = mixin\n\t\t\tend\n\t\tend\n\tend",
"def mixin(value)\n @mixins << value\n\n if value.is_a?(String)\n value = YAML.load(File.read(value))\n end\n\n return unless value\n\n value = Section.from_hash(value)\n\n deep_merge!(value.delete(:generic)) if value.has_key?(:generic)\n\n if value.has_key?(Env.to_sym)\n deep_merge!(value[Env.to_sym])\n else\n deep_merge!(value)\n end\n end",
"def body\n source\n end",
"def render\n to_fortitude_options = {\n :needs => [ ],\n :assign_reference => (@assigns == :instance_variables ? :instance_variable : :method),\n :do_end => @do_end,\n :new_style_hashes => @new_style_hashes\n }\n\n content_text = @template.to_fortitude(2, to_fortitude_options)\n\n out = \"class #{@class_name} < #{@superclass}\\n\"\n needs_text = needs_declarations(to_fortitude_options[:needs])\n out << \"#{needs_text}\\n \\n\" if needs_text\n\n out << \" def #{@method}\\n\"\n out << \"#{content_text.rstrip}\\n\"\n out << \" end\\n\"\n out << \"end\\n\"\n\n out\n end",
"def add_mixins(item)\n item.instance_mixins.reverse_each do |i|\n @current_object.create_include(i.path.to_s)\n end\n\n # YARD 0.9.26 makes extends appear in the same order as code\n # (includes are still reversed)\n if Gem::Version.new(YARD::VERSION) >= Gem::Version.new(\"0.9.26\")\n item.class_mixins.each do |e|\n @current_object.create_extend(e.path.to_s)\n end\n else\n item.class_mixins.reverse_each do |e|\n @current_object.create_extend(e.path.to_s)\n end\n end\n\n item.instance_mixins.length + item.class_mixins.length\n end",
"def body(args = {}, &block)\n build_base_component :body, args, &block\n end",
"def include(mixin, *args, **kwargs)\n cur_tool = DSL::Internal.current_tool(self, true)\n return self if cur_tool.nil?\n mod = DSL::Internal.resolve_mixin(mixin, cur_tool, @__loader)\n cur_tool.include_mixin(mod, *args, **kwargs)\n self\n end",
"def add_mixin(name, mixin_module = nil, &block)\n name = name.to_s\n if @mixins.key?(name)\n raise ToolDefinitionError,\n \"A mixin named #{name.inspect} has already been defined in tool\" \\\n \" #{display_name.inspect}.\"\n end\n @mixins[name] = mixin_module || Mixin.create(&block)\n self\n end",
"def _interop_body\n\n _save = self.pos\n while true # choice\n _tmp = apply(:_single_quote)\n break if _tmp\n self.pos = _save\n _tmp = apply(:_double_quote)\n break if _tmp\n self.pos = _save\n _tmp = match_string(\"\\\\}\")\n break if _tmp\n self.pos = _save\n _tmp = scan(/\\A(?-mix:[^}]+)/)\n break if _tmp\n self.pos = _save\n break\n end # end choice\n\n set_failed_rule :_interop_body unless _tmp\n return _tmp\n end",
"def is_mixin?\n type == :mixin\n end",
"def mixins(*scopes); end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Returns the cache of defined VMs for a user. It is a hash with VM id as key and VmUsage as value
|
def vms(user)
vms=@users[user]
@users[user]=vms=Hash.new if !vms
vms
end
|
[
"def user_vms(user)\n running_vms.select { |vm| vm.handle['UID'] == user.id }\n end",
"def total(user)\n usage=VmUsage.new(0.0, 0, 0)\n \n @users[user].each do |id, vm|\n usage.cpu+=vm.cpu\n usage.memory+=vm.memory\n usage.num_vms+=1\n end if @users[user]\n \n usage\n end",
"def build_vuln_cache()\n r = Vulnerability.select(:id,:cve).pluck(:id, :cve)\n return r.inject({}) { |map, (id, cve)| map[cve] = id; map }\n end",
"def get_users_machine_ids(userid)\n # $rails_logr.error(\"Looking up vms for #{userid}\")\n current_user_vms = Rbac.filtered(Vm.all, :userid => userid)\n # $rails_logr.error(\"VMS are #{current_user_vms}\")\n machine_id_guid_hash = {}\n current_user_vms.each do |vm|\n machine_id = get_vm_machine_id(vm.guid)\n #We check custom attribute as fall back to support Insights without SSA\n machine_id = vm.miq_custom_get(MACHINE_ID_CUSTOM_KEY) if machine_id.nil? || machine_id.empty?\n if machine_id\n machine_id_guid_hash[vm.guid] = machine_id.strip\n end\n end\n machine_id_guid_hash\n end",
"def vms\n vms = []\n if current_user.admin?\n vms = object.vms\n elsif current_user.lead?\n vms = object.vms.select { |vm| vm.project.users.include?(current_user) }\n elsif current_user.dev?\n vms = object.vms.select { |vm| vm.user.id == current_user.id || vm.is_jenkins }\n else\n vms = object.vms.select { |vm| vm.user.id == current_user.id }\n end\n vms.map { |v| v.id }\n end",
"def virtualMachinesByFilter(filter)\n vms = nil\n @cacheLock.synchronize(:SH) do\n vms = applyFilter(virtualMachinesByMor_locked.values, filter)\n vms = dupObj(vms)\n end\n assert_no_locks\n vms\n end",
"def formatted_cache_data\n {\n token_id: token_id,\n userUuids: user_uuid\n }\n end",
"def memory\n hash = {}\n page_size = 4096 # Possibly changed later\n\n begin\n optr = FFI::MemoryPointer.new(:uint64_t)\n size = FFI::MemoryPointer.new(:size_t)\n size.write_int(optr.size)\n\n if sysctlbyname('hw.memsize', optr, size, nil, 0) < 0\n raise SystemCallError.new('sysctlbyname', FFI.errno)\n end\n\n hash[:total] = optr.read_uint64\n ensure\n optr.free if optr && !optr.null?\n size.clear\n end\n\n begin\n swap = Swap.new\n size.write_int(swap.size)\n\n if sysctlbyname('vm.swapusage', swap, size, nil, 0) < 0\n raise SystemCallError.new('sysctlbyname', FFI.errno)\n end\n\n hash[:swap_total] = swap[:xsu_total]\n hash[:swap_available] = swap[:xsu_avail]\n hash[:swap_used] = swap[:xsu_used]\n page_size = swap[:xsu_pagesize]\n ensure\n size.free if size && !size.null?\n end\n\n host_self = mach_host_self()\n vmstat = VmStat.new\n count = FFI::MemoryPointer.new(:size_t)\n count.write_int(vmstat.size)\n\n rv = host_statistics64(host_self, HOST_VM_INFO64, vmstat, count)\n raise SystemCallError.new('host_statistics64', rv) if rv != 0\n\n hash[:free] = vmstat[:free_count] * page_size\n hash[:active] = vmstat[:active_count] * page_size\n hash[:inactive] = vmstat[:inactive_count] * page_size\n hash[:speculative] = vmstat[:speculative_count] * page_size\n hash[:wire] = vmstat[:wire_count] * page_size\n hash[:compressed] = vmstat[:compressor_page_count] * page_size\n\n hash\n ensure\n count.free if count && !count.null?\n end",
"def metrics\n VMMetric.new(@client, @client.get('VM', :metrics, @uuid))\n end",
"def cache_key_for_current_user(obj)\n [obj, (obj.user == current_user)]\n end",
"def get_vms(host_id)\n vms = []\n @hosts[host_id][:hash][:HOST][:VMS].each do |hash|\n id = hash[:ID]\n vm = {\n VM: {\n ID: id,\n UID: id,\n GID: id % @count[:groups],\n UNAME: \"user-#{id}\",\n GNAME: '',\n NAME: \"vm-#{id}\",\n MEMORY: Random.new.rand(2..6)*100000,\n CPU: Random.new.rand(1..100)/100,\n NET_TX: Random.new.rand(2..9)*10000,\n NET_RX: Random.new.rand(2..9)*10000\n }\n }\n vms << Gyoku.xml(vm, {:key_converter => :none})\n end\n\n vms\n end",
"def get_votes\n res = []\n unless session[:drupal_user_id].nil?\n res = Rails.cache.fetch 'votes-from-' + session[:drupal_user_id].to_s do\n u = User.find_by_uid(session[:drupal_user_id])\n unless u.nil?\n r = Vote.where(voter_id: u.id).map { |v| v.voteable_id }\n else\n r = []\n end\n\n r\n end\n end\n\n res\n end",
"def fetch_vm_values\n return if @sb[:options][:vm_mode] == :manual\n if @sb[:options][:chosen_vm]\n @sb[:options][:values] = {}\n vm_options = VimPerformancePlanning.vm_default_options(@sb[:options][:vm_mode])\n VimPerformancePlanning.vm_metric_values(Vm.find(@sb[:options][:chosen_vm]),\n :vm_options => vm_options,\n :range => {\n :days => @sb[:options][:days],\n :end_date => perf_planning_end_date\n },\n :tz => @sb[:options][:tz],\n :time_profile_id => @sb[:options][:time_profile])\n vm_options.each do |k, v|\n next if v.nil?\n @sb[:options][:values][k] = if k == :storage\n (v[:value].to_i / 1.gigabyte).round\n else\n v[:value].to_i.round\n end\n end\n end\n end",
"def setup_user_tips_hash\n @user_tips_hash = Hash.new\n if current_user\n @user_tips = Tip.where(\"user_id = ?\", current_user.id)\n @user_tips.each {|t| @user_tips_hash[t.game_id] = t } \n end\n end",
"def ram_cached\n %x[free -ot].split[12]\n end",
"def vms\n load! if !loaded?\n @vms ||= load_vms!\n end",
"def virtualApps_locked\n raise \"virtualApps_locked: cache lock not held\" unless @cacheLock.sync_locked?\n\n #\n # Not supported in v2.0 or v2.5\n #\n if @v2\n @virtualApps = {}\n @virtualAppsByMor = {}\n end\n\n return(@virtualApps) if @virtualApps\n\n logger.info \"MiqVimInventory.virtualApps_locked: loading VirtualApp cache for #{@connId}\"\n begin\n @cacheLock.sync_lock(:EX) if (unlock = @cacheLock.sync_shared?)\n\n ra = getMoPropMulti(inventoryHash_locked['VirtualApp'], @propMap[:VirtualApp][:props])\n\n @virtualApps = {}\n @virtualAppsByMor = {}\n ra.each do |rpObj|\n addObjHash(:VirtualApp, rpObj)\n end\n ensure\n @cacheLock.sync_unlock if unlock\n end\n logger.info \"MiqVimInventory.virtualApps_locked: loaded VirtualApp cache for #{@connId}\"\n\n @virtualApps\n end",
"def stats(id)\n request(:get, \"/users/#{id}/vm_stats.json\")\n end",
"def get_all_vm_info\n vms = Container.get_all(CLIENT)\n\n return unless vms\n\n vms_info = get_values(vms)\n vms_info\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Returns total consumption by a user into a VmUsage object
|
def total(user)
usage=VmUsage.new(0.0, 0, 0)
@users[user].each do |id, vm|
usage.cpu+=vm.cpu
usage.memory+=vm.memory
usage.num_vms+=1
end if @users[user]
usage
end
|
[
"def get_user_total(user)\n self.get_user_items(user).to_a.sum(&:user_cost)\n end",
"def usage_summary(now=DateTime.now)\n summary = {\n this_month: { secs: 0, hours: 0, overage: {}, ondemand: {}, cost: 0.00 },\n current: [],\n history: [],\n }\n year = now.utc.year\n month = now.utc.month\n thismonth = sprintf(\"%d-%02d\", year, month)\n summary[:this_month][:period] = thismonth\n monthly_usages.order('\"yearmonth\" desc, \"use\" asc').slice(0,48).each do |mu|\n msum = {\n period: mu.yearmonth,\n type: mu.use,\n secs: mu.value.to_i,\n hours: mu.value.fdiv(3600).round(3),\n cost: mu.retail_cost.round(2), # expose only what we charge customers, whether we charge them or not.\n }\n summary[:history].push msum\n if mu.yearmonth == thismonth\n summary[:current].push msum\n end\n end\n\n # calculate current totals based on the User's plan. This determines overages.\n plan_hours = plan.hours\n base_monthly_cost = plan.amount # TODO??\n plan_is_premium = plan.has_premium_transcripts?\n\n # if plan is \"basic\", calculate ondemand premium and overages.\n if !plan_is_premium\n summary[:current].each do |msum|\n\n # if there is premium usage, it must be on-demand, so pass on the msum cost.\n if msum[:type] == MonthlyUsage::PREMIUM_TRANSCRIPTS && msum[:hours] > 0\n summary[:this_month][:ondemand][:cost] = msum[:cost]\n summary[:this_month][:ondemand][:hours] = msum[:hours].round(3)\n summary[:this_month][:cost] += msum[:cost]\n summary[:this_month][:hours] += msum[:hours].round(3)\n summary[:this_month][:secs] += msum[:secs]\n\n # basic plan, basic usage.\n elsif msum[:type] == MonthlyUsage::BASIC_TRANSCRIPTS\n\n # month-to-date hours\n summary[:this_month][:hours] += msum[:hours].round(3)\n summary[:this_month][:secs] += msum[:secs]\n\n # check for overage\n if msum[:hours] > plan_hours\n summary[:this_month][:overage][:hours] = msum[:hours] - plan_hours\n # we do not charge for basic plan overages. instead we just prevent them at upload time.\n #summary[:this_month][:overage][:cost] = (OVERAGE_HOURLY_RATE * summary[:this_month][:overage][:hours]).round(2)\n #summary[:this_month][:cost] += summary[:this_month][:overage][:cost]\n end\n end\n end\n\n # otherwise, plan is premium. sum this month and check for overages only.\n else\n summary[:current].each do |msum|\n #Don't double count usage for users in orgs.\n if !(msum[:type] == MonthlyUsage::PREMIUM_TRANSCRIPT_USAGE && self.organization)\n summary[:this_month][:hours] += msum[:hours].round(3)\n summary[:this_month][:secs] += msum[:secs]\n summary[:this_month][:cost] += msum[:cost]\n end\n if msum[:type] == MonthlyUsage::PREMIUM_TRANSCRIPTS\n if msum[:hours] > plan_hours\n summary[:this_month][:overage][:hours] = msum[:hours] - plan_hours\n summary[:this_month][:overage][:cost] = (OVERAGE_HOURLY_RATE * summary[:this_month][:overage][:hours]).round(2)\n summary[:this_month][:cost] += summary[:this_month][:overage][:cost]\n end\n end\n end\n if summary[:this_month][:overage][:cost]\n # since we had an overage for the month, ignore any specific retail costs for this month,\n # and treat the overage as the total for the month. This is because we don't want to charge 2x\n # if an on-demand retail cost contributed to the overage.\n summary[:this_month][:cost] = summary[:this_month][:overage][:cost]\n end\n end\n\n # return\n summary\n end",
"def total_billable_usage\n @attributes[:total_billable_usage]\n end",
"def user_cost\n @attributes[:user_cost]\n end",
"def balance(user_id)\n @tropo_client.get(\"users/#{user_id}/usage\")\n end",
"def monthly_print_usage_by_user\n return @monthly_print_usage_by_user\n end",
"def dropbox_account_usage\n url = \"https://api.dropboxapi.com/2/users/get_space_usage\"\n\n format = ->(value) { sprintf(\"%.2f\", value) }\n to_gb = ->(value) { format[value.to_f / 1024 / 1024 / 1024] }\n\n # API expects a body of \"null\"\n dropbox_post_json(url, payload: \"null\") do |json|\n used = json[\"used\"]\n max = json.dig(\"allocation\", \"allocated\")\n free = max - used\n used_percent = format[(used.to_f / max.to_f) * 100]\n free_percent = format[100.0 - used_percent.to_f]\n\n {\n max: to_gb[max],\n used: to_gb[used],\n used_percent: used_percent,\n free: to_gb[free],\n free_percent: free_percent,\n }\n end\n end",
"def get_space_usage\n resp = request('/users/get_space_usage')\n SpaceUsage.new(resp)\n end",
"def collect_consumptions\n User.real.find_each do |user|\n metric = UsageMetric.find_or_initialize_by(user: user)\n\n metric.update(\n custom_range_byte_hours: custom_range_consumption(user),\n daily_byte_hours: CloudResource.daily_consumption(user),\n weekly_byte_hours: CloudResource.weekly_consumption(user),\n monthly_byte_hours: CloudResource.monthly_consumption(user),\n yearly_byte_hours: CloudResource.yearly_consumption(user),\n cumulative_byte_hours: CloudResource.cumulative_consumption(user),\n )\n end\n end",
"def percent_user_usage\n if limited?\n @user_usage * 100 / @limit\n else\n 0\n end\n end",
"def check(user, new_vm=nil)\n usage=@usage.total(user)\n user_quota=get(user)\n if new_vm\n usage.cpu+=new_vm.cpu.to_f\n usage.memory+=new_vm.memory.to_i\n usage.num_vms+=1\n end\n \n STDERR.puts [user_quota, usage, new_vm].inspect\n \n (!user_quota[:cpu] || usage.cpu<=user_quota[:cpu]) &&\n (!user_quota[:memory] || usage.memory<=user_quota[:memory]) &&\n (!user_quota[:num_vms] || usage.num_vms<=user_quota[:num_vms])\n end",
"def monthly_print_usage_by_user=(value)\n @monthly_print_usage_by_user = value\n end",
"def MetricTotal(metric)\n\t\tmemoryconfigured = 0.0\n\t\ttotal = 0.0\n\t\t#This gets all the VMs\n\t\tresponse = self.class.get(\"/api/vms/query/\")\n\t\t#This creates an array of VMs \n\t\tvmarray = response['QueryResultRecords']['VMRecord']\n\t\t#This calculate the number of VMs in the virtual data center\n\t\t#For each VM we GET the current metrics and we increment the \"total\" \n\t\t#variable with the value from each VM for that metric \n\t\tvmarray.each do |vm|\n\t\t\t\t#This reads the memory configured for the VM.\n\t\t\t\t#This is needed cause the metric reports % memory usage, not actual memory usage\n\t\t\t\tmemoryconfigured = vm['memoryMB'].to_f.round(1)\n\t\t\t\t#This saves the entire href for the VM\n\t\t\t\tvmhrefall = vm['href']\n\t\t\t\t#This extracts the /api/<vm id>/metrics/current from the href\n\t\t\t\tvmhref = vmhrefall[36..90]\n\t\t\t\t#This gets all info about the VM\n\t\t\t\tcheckifmetricsavailable = self.class.get(\"#{vmhref}\")\n\t\t\t\t#This turns those info in HTTParty:Response format into a string\n\t\t\t\tcheckifmetricsavailabletxt = checkifmetricsavailable.to_s\n\t\t\t\t#This checks if the VM is deployed and the metrics are ready to be queried \n \t\t\t\tif checkifmetricsavailabletxt.include? \"metrics/current\"\n\t\t\t\t\tvmmetrics = self.class.get(\"#{vmhref}/metrics/current\")\n\t\t\t\t\t#This creates an array of all metrics\n\t\t\t\t\tvmmetricsarray = vmmetrics['CurrentUsage']['Metric']\n\t\t\t\t\t#This control is needed cause newly created VMs won't have Metrics available\n\t\t\t\t\t#for a few seconds and if the programs runs at that point it will crash\n\t\t\t\t\t#This is probably no longer needed with the check above (the below didn't catch all conditions\n\t\t\t\t\tif (vmmetricsarray != nil)\n\t\t\t\t\t\tvmmetricsarray.each do |metricsvalue| \n\t\t\t\t\t\t#This sums up the metric we passed to the function \t\t\t\t\t\t\t\n\t\t\t\t\t\tif metricsvalue['name'] == metric\n\t\t\t\t\t\t\t#If it's memory (% metric) we are summing up we need to do the math on actual usage \n\t\t\t\t\t\t\tif metric == 'mem.usage.average'\n\t\t\t\t\t\t\t\tputs memoryconfigured\n\t\t\t\t\t\t\t\tputs metricsvalue['value'].to_f.round(1)\n\t\t\t\t\t\t\t\t#here we do the math to do the sum as well as calculate memory usage from % memoroy usage \n\t\t\t\t\t\t\t\ttotal += memoryconfigured * metricsvalue['value'].to_f.round(1) / 100 \n\t\t\t\t\t\t\t\telse\n\t\t\t\t\t\t\t\t#If it's CPU (native metric) we are summing up we just take the value as is\n\t\t\t\t\t\t\t\tputs metricsvalue['value'].to_f.round(1)\n\t\t\t\t\t\t\t\ttotal += metricsvalue['value'].to_f.round(1)\n\t\t\t\t\t\t\tend\n\t\t\t\t\t\tend\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\tend \n\t\tend\n\t\tputs metric, total.round(1), Time.now\n\t\treturn total.round(1)\n\tend",
"def total_billable_transfer_usage\n @attributes[:total_billable_transfer_usage]\n end",
"def custom_range_consumption(user)\n ::UsageCollector::CloudResource.consumption(custom_range_start, custom_range_end, user)\n end",
"def user_units\n @user.units\n end",
"def daily_print_usage_by_user\n return @daily_print_usage_by_user\n end",
"def user_summary (new_user_id)\n user_id = new_user_id || session_user.id\n # user_id = session_user.id\n # shares = Share.where(user_id: user_id) can we sum\n total = Share.where(user_id: user_id).sum(:amount_owed_cents)\n total\n end",
"def daily_print_usage_by_user=(value)\n @daily_print_usage_by_user = value\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
GET /hp_searches GET /hp_searches.json
|
def index
@hp_searches = HpSearch.all
end
|
[
"def search\n @q = params[:q]\n @results = Series.external_search(@q)\n\n respond_to do |format|\n format.html # search.html.haml\n format.json { render json: @results.to_json }\n end\n end",
"def index\n @searches = Search.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @searches }\n end\n end",
"def search\n @sites = Site.search(params[:q])\n\n respond_to do |format|\n format.html { \n @pagy, @sites = pagy(@sites.order(:name))\n render :index\n }\n format.json {\n render :index\n }\n end\n end",
"def search\n games_data = BoardGameAtlas::API.search(params[:name])\n render json: { games: games_data.map }\n end",
"def search\n search_query = params[:q]\n # TODO: search comments here\n # This is a collection route, so:\n # - we don't need a resource id\n # - we should return a collection\n head :ok # just return 200 OK without rendering nothing\n end",
"def index\n @search = Hof.search(params[:q])\n @hofs = @search.result\n end",
"def search\n \n search = params[:search]\n miniresume = params[:miniresume]\n location = params[:location]\n\n #thinking_sphinx conditions - \n @adviceposts = Advicepost.search(search,miniresume,location,page: 1, per_page: 25)\n \n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @adviceposts }\n end\n end",
"def index\n term = params[:name]\n puts \"*** index: term = #{term}\"\n @heros = term ? Hero.where(\"name like ?\", \"%#{term}%\") : Hero.all\n puts \"*** found #{@heros.count} matches\"\n render json: @heros\n end",
"def search(query)\n resource.get(params: {query: query}) do |response|\n JSON.parse(response.body)\n end\n end",
"def search\n @physical_hosts = PhysicalHost.search(params[:q]).page params[:page]\n respond_to do |format|\n format.html { render :index}\n format.json { render json: @physical_hosts }\n end\n end",
"def index\n motor_searches = Motor::Search.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @motor_searches }\n end\n end",
"def search(search_url)\n params = { :restrictSearchableAttributes => 'url', :query => search_url, :typoTolerance => false }\n hn_search_url = URI('http://hn.algolia.com/api/v1/search')\n hn_search_url.query = URI.encode_www_form(params)\n response = Net::HTTP.get_response(hn_search_url)\n\n if response.is_a?(Net::HTTPSuccess)\n results = JSON.parse(response.body)\n hits = results['hits']\n\n if hits.length() > 0\n print_hits(hits)\n else\n puts 'could not find a match, you are probably good!'\n end\n else\n # Could not make the request, print out the error.\n response.value()\n end\nend",
"def index\n @reloud_checks = ReloudCheck.search(params[:search])\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @reloud_checks }\n end\n end",
"def search\n expose Challenge.search(@oauth_token, params[:keyword])\n end",
"def search\n recipes = Recipe.find_by_search_text(params[:search]).uniq.sort{|a,b|a.rank <=> b.rank }.reverse\n return HESResponder(recipes)\n end",
"def saved_searches_list\n url = \"#{@sal_url}/api/v2/saved_searches/\"\n pg_clc(url) == 1 ? json_resp_body(url)['results'] : paginator(url, pg_clc(url))\n end",
"def index\n @searcheds = Searched.all\n end",
"def all_searches\n render json: Query.all.most_frequent\n end",
"def getsearchresults\n query=params['q'].gsub(' ','+')\n query=query.gsub('/','%2F')\n if params[:site]=='metacritic'\n doc=Nokogiri::HTML(open(\"http://metacritic.com/search/game/#{query}/results\"))\n doc.encoding='utf-8'\n @results=doc.css(\"li.result\").inner_html\n elsif params[:site]=='gamerankings'\n doc=Nokogiri::HTML(open(\"http://www.gamerankings.com/browse.html?search=#{query}&numrev=3\"))\n @results=doc.at_css(\"div#main_col > div.pod\").inner_html\n end\n respond_to do |format|\n format.js {render :layout => false}\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
POST /hp_searches POST /hp_searches.json
|
def create
@hp_search = HpSearch.new(hp_search_params)
respond_to do |format|
if @hp_search.save
format.html { redirect_to @hp_search, notice: 'Hp search was successfully created.' }
format.json { render :show, status: :created, location: @hp_search }
else
format.html { render :new }
format.json { render json: @hp_search.errors, status: :unprocessable_entity }
end
end
end
|
[
"def create_saved_search(query)\n post(\"/saved_searches/create.json\", :query => query)\n end",
"def json_POST_search(request)\n options = search_options(request)\n data = request.env['rack.input'].read\n JSON.parse(data).map do |topic, lat, lon|\n t = Time.now\n results = search(input, lat, lon, options).to_hash\n results[:time] = Time.now - t\n results\n end.to_json\n end",
"def search\n \n search = params[:search]\n miniresume = params[:miniresume]\n location = params[:location]\n\n #thinking_sphinx conditions - \n @adviceposts = Advicepost.search(search,miniresume,location,page: 1, per_page: 25)\n \n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @adviceposts }\n end\n end",
"def index\n @hp_searches = HpSearch.all\n end",
"def create\n @search = Search.new(search_params)\n\n respond_to do |format|\n if @search.save\n format.html { redirect_to action: 'new', query: @search.query }\n format.json { render :show, status: :created, location: @search }\n else\n format.html { redirect_to action: 'new' }\n format.json { render json: @search.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @simple_search = SimpleSearch.new(simple_search_params)\n get_response(params)\n respond_to do |format|\n format.html { render :index}\n format.json { render :index, status: 200 }\n end\n end",
"def create\n @search_set = SearchSet.new(params[:search_set].slice(:title, :description))\n\n searches = params[:search_set][:searches].select(&:present?).collect { |search_id| Search.find(search_id)}\n @search_set.searches << searches\n\n respond_to do |format|\n if @search_set.save\n format.html { redirect_to @search_set, notice: 'Search set was successfully created.' }\n format.json { render json: @search_set, status: :created, location: @search_set }\n else\n format.html { render action: \"new\" }\n format.json { render json: @search_set.errors, status: :unprocessable_entity }\n end\n end\n end",
"def log_search\n if params[:query].present? && params[:page].present?\n @session.searches.create(query: params[:query], page: params[:page])\n head :ok\n else\n head :bad_request\n end\n end",
"def search_and_save\n if params[:q].nil?\n redirect_to root_path\n else\n @dados = []\n url = URI.encode(\"http://search.twitter.com/search.json?q=#{params[:q]}\")\n call url\n Search.create(query: params[:q])\n end\n end",
"def search\n if params[:query] && !params[:query].blank? # Check a query has been sent\n q = params[:query].split.join(\" \") # Set the query to a variable and remove leading and trailing whitespace between words\n q = q.sub( \" \", \"* \" ) # Add * after each word to make it a wildcard search\n \n # Define search string that queries only listed attributes of the index and weights them accordingly using ^(weight)\n search_string = \"title:(#{q}*) OR description:(#{q}*) OR error_message_descriptions:(#{q}*)\"\\\n \" OR category_names:(#{q}*) OR tag_names:(#{q}*)\"\\\n \" OR solution_descriptions:(#{q}*) OR post_comments:(#{q}*) OR solutions_comments:(#{q}*)\"\n fetch = [:title, :updated_at, :username, :category_names, :tag_names, :solutions_size, :comments_size, :score]\n if params[:cfacet] && !params[:tfacet]\n @posts = post_query(q, fetch, true, {'category' => params[:cfacet].to_s})\n elsif params[:tfacet] && !params[:cfacet]\n @posts = post_query(q, fetch, true, {'tag' => params[:tfacet].to_s})\n elsif params[:cfacet] && params[:tfacet]\n @posts = post_query(q, fetch, true, {'category' => params[:cfacet].to_s, 'tag' => params[:tfacet].to_s})\n else\n @posts = post_query(q, fetch, true)\n end \n @posts.blank? ? @category_facets = {} : @category_facets = get_facets(@posts, \"category\")\n @posts.blank? ? @tag_facets = {} : @tag_facets = get_facets(@posts, \"tag\")\n @content_header = \"Search Results for query: #{params[:query].strip}\"\n else\n @content_header = \"Search Results for query: \"\n @posts = [].paginate(:page => params[:page])\n end \n render('list')\n end",
"def index\n SearchIndex.create(shout: shout, data: body)\n end",
"def create\n @haiku_search = HaikuSearch.new(params[:haiku_search])\n\n respond_to do |format|\n if @haiku_search.save\n format.html { redirect_to(@haiku_search, :notice => 'Haiku search was successfully created.') }\n format.xml { render :xml => @haiku_search, :status => :created, :location => @haiku_search }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @haiku_search.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @namesearch = Namesearch.new(params[:namesearch])\n\n respond_to do |format|\n if @namesearch.save\n format.html { redirect_to @namesearch, notice: 'Namesearch was successfully created.' }\n format.json { render json: @namesearch, status: :created, location: @namesearch }\n else\n format.html { render action: \"new\" }\n format.json { render json: @namesearch.errors, status: :unprocessable_entity }\n end\n end\n end",
"def index\n @searches = Search.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @searches }\n end\n end",
"def search(data)\n resource_uri = \"#{@resource_uri}/search\"\n return @client.post(resource_uri, data)\n end",
"def create # actually creates the new search query\n search = DictionarySearch.new(params[:dictionary_search])\n term = search.title\n search_language = search.language.to_i \n if search_language != 0\n language = ComplexScripts::Language.find(search.language)\n full_text_supported = (language.code=='en')\n else\n full_text_supported = false\n end\n \n options = Hash.new\n options[:type] = search.type\n if search.type == 'browse'\n options[:language] = search.language\n else\n conditions_string = Util.search_condition_string(search.type, 'title', full_text_supported)\n term = \"%#{term}%\" if !full_text_supported\n if search_language == 0\n conditions_array = [conditions_string, term]\n else\n conditions_array = [conditions_string + \" AND language_id = ?\", term, search.language]\n end\n options[:conditions] = conditions_array\n end\n session[:search_options] = options\n redirect_to dictionary_searches_url\n end",
"def create\n $SEARCHES[params[:id].to_i]['search_status'] = params[:status]\n\n # We get the result of the search\n if params[:status] == 'complete'\n uri = URI(params[:result_info][:url])\n resp = ConnectHelper.request_to_connect(uri, Net::HTTP::Get)\n $SEARCHES[params[:search_id].to_i]['candidate_ids'] = JSON.parse(resp.body)['candidate_ids']\n end\n end",
"def create\n @flexible_search = FlexibleSearch.new(flexible_search_params)\n @flexible_search.save\n\n respond_to do |format|\n if @flexible_search.save\n format.html { redirect_to @flexible_search, notice: 'Flexible search was successfully created.' }\n format.json { render :show, status: :created, location: @flexible_search }\n else\n format.html { render :new }\n format.json { render json: @flexible_search.errors, status: :unprocessable_entity }\n end\n end\n end",
"def search\n @q = params[:q]\n @results = Series.external_search(@q)\n\n respond_to do |format|\n format.html # search.html.haml\n format.json { render json: @results.to_json }\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
PATCH/PUT /hp_searches/1 PATCH/PUT /hp_searches/1.json
|
def update
respond_to do |format|
if @hp_search.update(hp_search_params)
format.html { redirect_to @hp_search, notice: 'Hp search was successfully updated.' }
format.json { render :show, status: :ok, location: @hp_search }
else
format.html { render :edit }
format.json { render json: @hp_search.errors, status: :unprocessable_entity }
end
end
end
|
[
"def update\n respond_to do |format|\n if @advance_search.update(advance_search_params)\n format.html { redirect_to @advance_search, notice: 'Advance search was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @advance_search.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @search.update(search_params)\n format.html { redirect_to :root }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @search.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @saved_search = SavedSearch.find(params[:id])\n\n respond_to do |format|\n @saved_search.name = params[:name]\n @saved_search.query = params[:query]\n if @saved_search.save\n format.html { redirect_to :channels, :notice => 'Your channel was successfully saved.' }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @saved_search.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n \n respond_to do |format|\n if @search.update(search_params)\n format.html { redirect_to @search, notice: 'Search was successfully updated.' }\n format.json { head :no_content }\n \n else\n format.html { render action: 'edit' }\n format.json { render json: @search.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @searched.update(searched_params)\n format.html { redirect_to @searched, notice: 'Searched was successfully updated.' }\n format.json { render :show, status: :ok, location: @searched }\n else\n format.html { render :edit }\n format.json { render json: @searched.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @ifs_search.update(ifs_search_params)\n format.html { redirect_to @ifs_searches_path, notice: \"Ifs search was successfully updated.\" }\n format.json { render :show, status: :ok, location: @ifs_search }\n else\n format.html { render :edit }\n format.json { render json: @ifs_search.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n serialized_params = serialize_preset_search_params\n respond_to do |format|\n if @preset_search.update(serialized_params)\n format.html { redirect_to @preset_search, notice: 'Stored search was successfully updated.' }\n format.json { render :show, status: :ok, location: @preset_search }\n else\n format.html { render :edit }\n format.json { render json: @preset_search.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @flexible_search.update(flexible_search_params)\n format.html { redirect_to @flexible_search, notice: 'Flexible search was successfully updated.' }\n format.json { render :show, status: :ok, location: @flexible_search }\n else\n format.html { render :edit }\n format.json { render json: @flexible_search.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @hot_search = HotSearch.find(params[:id])\n\n respond_to do |format|\n if @hot_search.update_attributes(params[:hot_search])\n format.html { redirect_to(@hot_search, :notice => 'Hot search was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @hot_search.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @namesearch = Namesearch.find(params[:id])\n\n respond_to do |format|\n if @namesearch.update_attributes(params[:namesearch])\n format.html { redirect_to @namesearch, notice: 'Namesearch was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @namesearch.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update_search(id, params = {})\n put(\"/searches/#{id}\", params)\n end",
"def update\n respond_to do |format|\n if @leaf_spot_imm_search.update(leaf_spot_imm_search_params)\n format.html { redirect_to @leaf_spot_imm_search, notice: 'Leaf spot imm search was successfully updated.' }\n format.json { render :show, status: :ok, location: @leaf_spot_imm_search }\n else\n format.html { render :edit }\n format.json { render json: @leaf_spot_imm_search.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @search_concept = SearchConcept.find(params[:id])\n\n respond_to do |format|\n if @search_concept.update_attributes(params[:search_concept])\n format.html { redirect_to @search_concept, notice: 'Search concept was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @search_concept.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @yelp_search.update(yelp_search_params)\n format.html { redirect_to @yelp_search, notice: 'Yelp search was successfully updated.' }\n format.json { render :show, status: :ok, location: @yelp_search }\n else\n format.html { render :edit }\n format.json { render json: @yelp_search.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @recommend_recommend_hindex = Recommend::RecommendHindex.find(params[:id])\n\n respond_to do |format|\n if @recommend_recommend_hindex.update_attributes(params[:recommend_recommend_hindex])\n format.html { redirect_to @recommend_recommend_hindex, notice: 'Recommend hindex was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @recommend_recommend_hindex.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @search_query.update(search_query_params)\n format.html { redirect_to @search_query, notice: 'Search query was successfully updated.' }\n format.json { render :show, status: :ok, location: @search_query }\n else\n format.html { render :edit }\n format.json { render json: @search_query.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @search = current_search\n @search.step += 1\n\n respond_to do |format|\n if @search.update_attributes(params[:search])\n format.html { redirect_to new_search_path }\n # format.json { head :no_content }\n else\n format.html { render \"searches/steps/step#{@search.step - 1}\" }\n # format.json { render json: @search.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @person_search = PersonSearch.find(params[:id])\n\n respond_to do |format|\n if @person_search.update_attributes(params[:person_search])\n format.html { redirect_to @person_search, notice: 'Person search was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @person_search.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @haiku_search = HaikuSearch.find(params[:id])\n\n respond_to do |format|\n if @haiku_search.update_attributes(params[:haiku_search])\n format.html { redirect_to(@haiku_search, :notice => 'Haiku search was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @haiku_search.errors, :status => :unprocessable_entity }\n end\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
DELETE /hp_searches/1 DELETE /hp_searches/1.json
|
def destroy
@hp_search.destroy
respond_to do |format|
format.html { redirect_to hp_searches_url, notice: 'Hp search was successfully destroyed.' }
format.json { head :no_content }
end
end
|
[
"def destroy\n @advance_search.destroy\n respond_to do |format|\n format.html { redirect_to advance_searches_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @search.destroy\n\n respond_to do |format|\n format.html { redirect_to searches_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @search.destroy\n respond_to do |format|\n format.html { redirect_to searches_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @visit_search = VisitSearch.find(params[:id])\n @visit_search.destroy\n\n respond_to do |format|\n format.html { redirect_to visit_searches_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @saved_search = SavedSearch.find(params[:id])\n @saved_search.destroy\n\n respond_to do |format|\n format.html { redirect_to saved_searches_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @saved_search = SavedSearch.find(params[:id])\n @saved_search.destroy\n\n respond_to do |format|\n format.html { head :no_content }\n format.json { head :no_content }\n end\n end",
"def destroy\n @live_search.destroy\n respond_to do |format|\n format.html { redirect_to live_searches_url}\n format.json { head :no_content }\n end\n end",
"def destroy\n @hot_search = HotSearch.find(params[:id])\n @hot_search.destroy\n\n respond_to do |format|\n format.html { redirect_to(hot_searches_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n # @flexible_search = FlexibleSearch.find(params[:id])\n @flexible_search.destroy\n respond_to do |format|\n format.html { redirect_to flexible_searches_url, notice: 'Flexible search was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @leaf_spot_imm_search.destroy\n respond_to do |format|\n format.html { redirect_to leaf_spot_imm_searches_url, notice: 'Leaf spot imm search was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @tire_search = TireSearch.find(params[:id])\n @tire_search.destroy\n\n respond_to do |format|\n format.html { redirect_to '/saved_searches' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @recommend_recommend_hindex = Recommend::RecommendHindex.find(params[:id])\n @recommend_recommend_hindex.destroy\n\n respond_to do |format|\n format.html { redirect_to recommend_recommend_hindices_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @milddew_imm_search.destroy\n respond_to do |format|\n format.html { redirect_to milddew_imm_searches_url, notice: 'Milddew imm search was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @api_v1_base_search.destroy\n respond_to do |format|\n format.html { redirect_to api_v1_base_searches_url, notice: 'Base search was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @json.destroy\n\n head :no_content\n end",
"def destroy\n @haiku_search = HaikuSearch.find(params[:id])\n @haiku_search.destroy\n\n respond_to do |format|\n format.html { redirect_to(haiku_searches_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @person_search = PersonSearch.find(params[:id])\n @person_search.destroy\n\n respond_to do |format|\n format.html { redirect_to person_searches_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @namesearch = Namesearch.find(params[:id])\n @namesearch.destroy\n\n respond_to do |format|\n format.html { redirect_to namesearches_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @hit = Hit.find(params[:id])\n @hit.destroy\n\n respond_to do |format|\n format.html { redirect_to hits_url }\n format.json { head :no_content }\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
one_task.rake one_task task will run another_task.rake another_task task will run my_tasks_all_mine.rake mine:task1 task will run mine:task2 task has a duplicate (will not run) mine:task3 task has a duplicate (will not run) mine:task4 task will run other_tasks_run_all.rake other_task1_run_me task will run other_task2_run_me task has a duplicate (will not run) other_task3_run_me task has a duplicate (will not run) other_task_not_run_yet task will run other_tasks_mixed_duplicates.rake other_task2_run_me task has a duplicate (will not run) other_task3_run_me task has a duplicate (will not run) task2_duplicate.rake mine:task2 task has a duplicate (will not run) task4_duplicate.rake mine:task4 task has a duplicate (will not run)
|
def create_rake_files(base_dir)
one_task_fn = File.join(base_dir, 'one_task.rake')
make_tasks_in_file(['one_task'], one_task_fn) if ok_to_create?(one_task_fn)
another_task_fn = File.join(base_dir, 'another_task.rake')
make_tasks_in_file(['another_task'], another_task_fn) if ok_to_create?(another_task_fn)
my_tasks_mine_fn = File.join(base_dir, 'my_tasks_all_mine.rake')
make_tasks_in_file(['task1', 'task2', 'task3', 'task4'], my_tasks_mine_fn, namespace: 'mine') if ok_to_create?(my_tasks_mine_fn)
tasks_run_all_fn = File.join(base_dir, 'other_tasks_run_all.rake')
make_tasks_in_file(['other_task1_run_me', 'other_task2_run_me', 'other_task3_run_me', 'other_task_not_run_yet'], tasks_run_all_fn) if ok_to_create?(tasks_run_all_fn)
tasks_mixed_duplicates_fn = File.join(base_dir, 'other_tasks_mixed_duplicates.rake')
make_tasks_in_file(['other_task2_run_me', 'other_task3_run_me'], tasks_mixed_duplicates_fn) if ok_to_create?(tasks_mixed_duplicates_fn)
task2_duplicate_fn = File.join(base_dir, 'task2_duplicate.rake')
make_tasks_in_file(['task2'], task2_duplicate_fn, namespace: 'mine') if ok_to_create?(task2_duplicate_fn)
task4_duplicate_fn = File.join(base_dir, 'task4_duplicate.rake')
make_tasks_in_file(['task4'], task4_duplicate_fn, namespace: 'mine') if ok_to_create?(task4_duplicate_fn)
end
|
[
"def all_tasks_to_run\n self.all_tasks - all_tasks_previously_run - all_tasks_duplicates\n end",
"def all_evaluated_tasks_in_files(given_dir)\n eval_tasks = []\n\n [SOMETASK1, SOMETASK2, SOMETASK3, SOMETASK4, SOMETASK5].each do |sometask|\n eval_tasks << OneTimeTasker::EvaluatedRakeTask.new(scoped(sometask), File.join(q1_dir(given_dir), SOME_TASKS_RAKEFILE))\n end\n\n eval_tasks << OneTimeTasker::EvaluatedRakeTask.new(scoped(GOODTASK), File.join(blorf_dir(given_dir), GOOD_TASK2_RAKEFILE))\n eval_tasks << OneTimeTasker::EvaluatedRakeTask.new(scoped(SET_WHEN_APPROVED_TASK), File.join(blorf_dir(given_dir), SET_WHEN_APPR2_RAKEFILE))\n eval_tasks << OneTimeTasker::EvaluatedRakeTask.new(scoped(SIMPLETASK), File.join(blorf_dir(given_dir), SIMPLE_TASK_RAKEFILE))\n\n eval_tasks << OneTimeTasker::EvaluatedRakeTask.new(scoped(SET_WHEN_APPROVED_TASK), File.join(blorf2_dir(given_dir), SET_WHEN_APPR_RAKEFILE))\n\n [SIMPLETASK, SOMETASK5].each do |blorf2_simplemore_task|\n eval_tasks << OneTimeTasker::EvaluatedRakeTask.new(scoped(blorf2_simplemore_task), File.join(blorf2_dir(given_dir), SIMPLE_AND_SOME5_RAKEFILE))\n end\n\n eval_tasks << OneTimeTasker::EvaluatedRakeTask.new(scoped(SIMPLETASK), File.join(given_dir, SIMPLE_TASK_RAKEFILE))\n eval_tasks << OneTimeTasker::EvaluatedRakeTask.new(scoped(RUNTHISTASK), File.join(given_dir, RUN_THIS_RAKEFILE))\n\n eval_tasks\n end",
"def safe_rake_tasks(task)\n begin\n log \"#{task} started at #{Time.now}\"\n eval \"Rake::Task['#{task}'].reenable\"\n eval \"Rake::Task['#{task}'].invoke\"\n log \"#{task} completed successfully at #{Time.now}\"\n rescue Exception => e\n log \"#{task} failed at #{Time.now}\", true\n end\n end",
"def set_and_log_task_as_duplicate(duplicated_task, tasks_with_same_name)\n\n dup_filname = duplicated_task.filename\n\n # Get all of the other tasks that have this task name and set them as being a duplicate of this one\n the_other_dup_tasks = tasks_with_same_name.reject { |other_task| other_task == duplicated_task }\n # set these other tasks as having this one as a duplicate\n the_other_dup_tasks.each { |other_task| other_task.add_duplicate(dup_filname) }\n\n log_as_duplicate(duplicated_task)\n\n duplicated_task\n end",
"def run!(*tasks_to_run)\n tasks_to_run.flatten!\n\n tasks_to_run.map! do |ttr| \n unless ret = self.__tasks.find {|t| t.__name == ttr } \n seq = self.__sequences.find {|t| t.__name == ttr }\n ret = seq.__task_names.map {|tn| self.__tasks.find {|t| t.__name == tn }}.compact if seq\n end\n ret\n end\n tasks_to_run.flatten!\n tasks_to_run.compact!\n\n ret = []\n tasks_to_run.each do |task|\n ret << task.__build_commands(self.__tasks)\n Runner.execute!(:task => task, :tasks => self.__tasks, :servers => self.__servers, :silent => self.__options[:silent], :verbose => self.__options[:verbose])\n end\n\n $stdout << \"\\033[0m\"\n ret.flatten # return this for tests\n end",
"def rakefiles_with_tasks_to_run\n\n rakefiles_with_tasks = new_hash_of_eval_rakefiles\n\n # This isn't efficient, but it's clear:\n all_tasks_to_run.each do |task_to_run|\n rakefilename = task_to_run.filename\n ev_rakefile_to_run = self.all_rakefiles[rakefilename]\n ev_rakefile_to_run.tasks_to_run << task_to_run\n rakefiles_with_tasks[rakefilename] = ev_rakefile_to_run\n end\n\n rakefiles_with_tasks\n end",
"def set_and_log_duplicate_tasks(evaluated_tasks)\n\n return [] if evaluated_tasks.empty?\n\n # get all of the task_names that are duplicates (TODO ruby 2.7: replace this with .tally)\n duplicated_names = evaluated_tasks.group_by(&:name).select { |_name, tasks | tasks.size > 1 }.keys\n\n # Guard condition: no duplicate names, so just return\n return [] if duplicated_names.empty?\n\n # get the duplicated tasks for each name; return all of them\n duplicated_names.map{|dup_name| duplicated_tasks_for_name(dup_name, evaluated_tasks) }.flatten\n end",
"def repeated_task_executions()\n return @repeated_task_execs if @repeated_task_execs\n result = {}\n tasks = {}\n @test_cases.each do |tc|\n if tc.automation_run\n tc.repeat_task_uuids.each do |rt|\n #puts \"#{rt.inspect}\"\n rt = [rt] if !rt.kind_of?(Array)\n tc_id = tc #tc.uuid\n result[tc_id] = {} if !result[tc_id]\n result[tc_id][rt] = [] if !result[tc_id][rt]\n rt.each do |repeated_task|\n #execs = tc.task_executions(repeated_task)\n #tasks[repeated_task] = Task.find(\"uuid\"=>repeated_task)[0] if !tasks[repeated_task]\n tasks[repeated_task] = @automation.get_task(repeated_task, true)\n #puts \"===> #{tasks[repeated_task]}\"\n execs = TaskExecution.find(\n :task_id => tasks[repeated_task].id, \n :automation_run_id => tc.automation_run.id\n ).to_a\n execs.sort! { |a,b|\n a.start_time <=> b.start_time\n }\n execs.each_with_index do |exec,idx|\n if result[tc_id][rt].size <= idx\n result[tc_id][rt] << []\n end\n result[tc_id][rt][idx] << exec\n end\n end\n end\n end\n end\n @repeated_task_execs = result\n return result\n end",
"def add_hardcoded_duplicates(task)\n task.add_duplicate('t1 duplicate 1')\n task.add_duplicate('t1 duplicate 2')\n end",
"def rest_tasks(name, file, png_file, svg_file)\n\n desc \"generate all files for ./#{file}\"\n task name => png_file\n task name => svg_file\n\n if @viewer\n\n show_name = \"view_#{name}\"\n desc \"#{@viewer} ./#{png_file}\"\n task show_name => png_file do\n sh \"#{@viewer} #{png_file}\"\n end\n\n watch_name = \"watch_#{name}\"\n desc \"watch #{@viewer} ./#{png_file}\"\n task watch_name do\n # sh \"#{@viewer} #{png_file}\"\n sh \"bash ./watcher.sh #{@viewer} #{png_file} #{name}\"\n end\n\n\n end\n\n # append tasks to default task\n task :all => png_file\n task :all => svg_file\n\nend",
"def next_task()\n @tasks.map {|task| [task[0] * task[1].priority(), task[1]] }.\n sort {|a, b| b[0] <=> a[0]}.\n each do |task_array|\n debug(\"Running #{task_array[1].class.to_s} which has priority #{task_array[0]}\")\n result = task_array[1].run()\n clear_screen()\n break if result\n end\n end",
"def multitask(args, &block)\n Rake::MultiTask.define_task(args, &block)\nend",
"def alias_task(new_name, old_name) \n original_task = Rake.application.tasks_hash[old_name.to_s]\n copied_task = original_task.dup\n Rake.application.tasks_hash[new_name.to_s] = copied_task\n Rake.application.tasks_hash[old_name.to_s] = original_task\n copied_task.name = new_name\n copied_task.clone_some_inside_vars\nend",
"def next_task(tasks=@@DISTANCE_TASKS+@@INCLADE_TASKS, save=true)\n tasks.find do |t|\n if metadata[\"run_#{t}\"]==false or\n (!is_clade? and @@INCLADE_TASKS.include?(t) and\n metadata[\"run_#{t}\"]!=true)\n false\n else\n add_result(t, save).nil?\n end\n end\n end",
"def next_task(tasks = @@DISTANCE_TASKS+@@INCLADE_TASKS, save = true)\n tasks.find do |t|\n if metadata[\"run_#{t}\"] == false or\n (!is_clade? and @@INCLADE_TASKS.include?(t) and\n metadata[\"run_#{t}\"] != true)\n false\n else\n add_result(t, save).nil?\n end\n end\n end",
"def make_tasks\n make_clean_task\n make_wix_folder_task\n make_copy_file_tasks\n make_sourcery_wxs_file_task\n make_sourcery_wixobj_file_task\n make_product_wxs_file_task\n make_product_wixobj_file_task\n make_msi_file_task\n make_msi_task\n make_test_task\n end",
"def taskthread\r\n\t\tself.tasks.shuffle! # tasks in random order\r\n \r\n\t\t# may run multiple tasks at once (this may cause weird things to happen with\r\n\t\t# threading, but I don't care; you shouldn't trust demons anyway)\r\n\t \r\n\t\t# if bool(int(random.random() * 4)):\r\n\t\tThread.new(&:taskthread) unless rand(4).zero?\r\n \r\n \r\n\t\twhile self.active and (!$kill)\r\n\t\t\tself.tasks.select(&:active).each do |task| #for task in [t for t in self.tasks if t.active]:for task in [t for t in self.tasks if t.active]:\r\n\t\t\t\tbreak unless self.active #if not self.active: break\r\n\t\t\t\ttask.run()\r\n\t\t\t\t#task.active = not bool(int(random.random()*4)) # 1 in 4 chance a task will be repeated\r\n\t\t\t\ttask.active = rand(4).zero? # 1 in 4 chance a task will be repeated\r\n\t\t\tend\r\n\t\t\tsleep(0.05)\r\n\t\t\tself.active = false if self.tasks.select(&:active).empty? #if not [t for t in self.tasks if t.active]:\r\n\t\tend\r\n\tend",
"def run_tasks()\n self.task_queue.each do |task|\n\n self.task_list << task\n trigger_event(:start,:task)\n\n result = self.send(task[:name],*(task[:args]))\n if result[:status] == :failed\n break\n end\n\n\n self.task_list.last[:result]=result\n trigger_event(:end,:task)\n end\n end",
"def rubaidh_run_rake(*tasks)\n rake = fetch(:rake, 'rake')\n rails_env = fetch(:rails_env, 'production')\n\n tasks.each do |task|\n run \"cd #{latest_release}; #{rake} RAILS_ENV=#{rails_env} #{task}\"\n end\nend"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
If the file exists, ask if it's ok to overwrite. return true only if they said it was ok to overwrite. If the file does not exist, return true (it's ok to create it)
|
def ok_to_create?(filepath)
File.exist?(filepath) ? prompt_to_overwrite?(filepath) : true
end
|
[
"def write_okay?(file)\n if File.exist?(file)\n if $FORCE\n FileUtils.rm_r(file)\n else\n return nil.status(:already_exists, file)\n end\n end\n true\n end",
"def overwrite?(path)\n if File.exist?(path)\n return yes? \"A file already exists at '#{ path }'. Do you want to overwrite? [y/n]\"\n end\n true\n end",
"def write_prep outfile\n return true unless File.exists? outfile\n\n # OK, file exists. See if we can clobber it\n if clobber?\n message { sprintf \"Overwriting existing file '%s'\", outfile }\n return true\n end\n\n message { sprintf \"Not overwriting existing file '%s'\", outfile }\n false\n end",
"def overwrite_if_exists?\n if File.exist?(@local_configuration_file)\n warning \"Configuration file already exists in #{@local_configuration_file}.\"\n warning \"Would you like to overwrite?\"\n exit unless yes?\n end\n end",
"def overwrite?\r\n result = FileSystemEntry.overwrite_allowed? || @overwrite_once\r\n @overwrite_once = false\r\n return result\r\n end",
"def can_write_file?(file)\n force? || ! File.exists?(file)\n end",
"def user_allows_overwrite?(file, opts = {})\n if File.exist?(File.expand_path(file)) && !opts[:force]\n print I18n.t(:overwrite) % file\n answer = HighLine::SystemExtensions.get_character.chr\n puts answer\n return answer =~ /^y/i\n else\n return true\n end\n end",
"def check_destination\n if File.exists?(@dest)\n overwrite = nil\n while overwrite.nil?\n print \"WARNING: File #{@dest} already exists! Overwrite it? [YES|no]: \"\n answer = gets.downcase.strip\n if /^(|y|yes)$/ =~ answer\n overwrite = true\n elsif /^(n|no)$/ =~ answer\n overwrite = false\n end\n end\n if !overwrite\n puts \"Aborted!\"\n exit\n end\n end\n end",
"def verify_overwriting\n overwrite = ask_nicely\n\n while overwrite.empty?\n overwrite = ask_nicely\n end\n\n overwrite == 'y' ? FileUtils.rm_rf(Frank.export.path) : exit\n end",
"def write_ready?(file_path)\n if File.exist?(file_path)\n @logger.debug \"File [#{file_path}] exists already\"\n unless @overwrite\n @logger.warn 'Overwrite NOT allowed. File is not NOT write ready'\n return false\n end\n @logger.info \"Delete file [#{file_path}]\"\n\n # TODO: This method should not have any side-effects\n File.delete file_path\n end\n @logger.debug \"Path [#{file_path}] is write ready\"\n true\n end",
"def warning new_name\n if File.exists? new_name\n puts \"Are you sure you want to overwrite the file. Y or N?\"\n answer = (gets.chomp).downcase\n if answer == y\n exit\n end\n end\nend",
"def overwrite?\n @overwrite = new_resource.overwrite if @overwrite.nil?\n !!@overwrite\n end",
"def overwrite_file?(file)\n return response_log[file] if response_log.has_key? file\n\n response_log[file] = true\n\n unless overwrite_all || force\n if skip_all\n response_log[file] = false\n elsif File.exist? file\n response_log[file] = prompt_user_to_overwrite file\n end\n end\n\n response_log[file]\n end",
"def file_exists?(file)\n false\n end",
"def original_file_exists?\n !original_file_paths.empty?\n end",
"def destination_file_exist?\n File.exist?(final_destination_path)\n end",
"def overwrite_allowed?\r\n @@overwrite_allowed\r\n end",
"def force_file_collision?( dst )\n dst = dst.sub(%r/#{site}\\/?/, '')\n print \"overwrite #{dst}? [(Y)es (n)o (q)uit] \"\n case gets\n when %r/q/i then abort 'user asked to quit'\n when %r/n/i then :skip\n when %r/y/i then :force\n when %r/\\s*/ then :force\n else force_file_collision?(dst) end\n rescue\n retry\n end",
"def confirm_or_create_file(sftp, remote_file, local_file, local_path)\r\n\t\t# Check to see if file exists\r\n \t\tbegin\r\n \t\t\treturn sftp.stat(remote_file)\r\n \t\trescue Net::SFTP::Operations::StatusException => e\r\n \t\t\traise unless e.code == 2\r\n \t\t\t# file doesn't exist on remote, so create it\r\n \t\t\tputs \"Create - \" + local_file.sub(local_path, '')\r\n \t\t\tsftp.put_file(local_file, remote_file)\r\n \t\t\tsftp.setstat(remote_file, :permissions => @file_perm)\r\n \t\t\treturn nil\r\n \t\tend\r\n\tend"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Make rake tasks for of the tasks in tasknames, in the file named filepath. Makes all directories needed for the filepath if they don't already exist.
|
def make_tasks_in_file(tasknames = [], filepath = '.', task_body = "\n", namespace: '')
indent = ""
filedir = File.dirname(filepath)
FileUtils.mkdir_p(filedir) unless Dir.exist?(filedir)
File.open(filepath, 'w') do |f|
unless namespace.empty?
indent = " "
f.puts namespace_start(namespace)
end
tasknames.each do |taskname|
f.puts simple_rake_task(taskname, indent: indent, task_body: task_body)
end
f.puts namespace_end unless namespace.empty?
end
filepath
end
|
[
"def make_tasks_in_file(tasknames = [], filepath = '.', task_body = \"\\n\")\n filedir = File.dirname(filepath)\n FileUtils.mkdir_p(filedir) unless Dir.exist?(filedir)\n\n File.open(filepath, 'w') do |f|\n tasknames.each do | taskname |\n f.puts simple_rake_task(taskname, task_body)\n end\n end\n\n filepath\n end",
"def create_rake_files(base_dir)\n\n one_task_fn = File.join(base_dir, 'one_task.rake')\n make_tasks_in_file(['one_task'], one_task_fn) if ok_to_create?(one_task_fn)\n\n another_task_fn = File.join(base_dir, 'another_task.rake')\n make_tasks_in_file(['another_task'], another_task_fn) if ok_to_create?(another_task_fn)\n\n my_tasks_mine_fn = File.join(base_dir, 'my_tasks_all_mine.rake')\n make_tasks_in_file(['task1', 'task2', 'task3', 'task4'], my_tasks_mine_fn, namespace: 'mine') if ok_to_create?(my_tasks_mine_fn)\n\n tasks_run_all_fn = File.join(base_dir, 'other_tasks_run_all.rake')\n make_tasks_in_file(['other_task1_run_me', 'other_task2_run_me', 'other_task3_run_me', 'other_task_not_run_yet'], tasks_run_all_fn) if ok_to_create?(tasks_run_all_fn)\n\n tasks_mixed_duplicates_fn = File.join(base_dir, 'other_tasks_mixed_duplicates.rake')\n make_tasks_in_file(['other_task2_run_me', 'other_task3_run_me'], tasks_mixed_duplicates_fn) if ok_to_create?(tasks_mixed_duplicates_fn)\n\n task2_duplicate_fn = File.join(base_dir, 'task2_duplicate.rake')\n make_tasks_in_file(['task2'], task2_duplicate_fn, namespace: 'mine') if ok_to_create?(task2_duplicate_fn)\n\n task4_duplicate_fn = File.join(base_dir, 'task4_duplicate.rake')\n make_tasks_in_file(['task4'], task4_duplicate_fn, namespace: 'mine') if ok_to_create?(task4_duplicate_fn)\n end",
"def make_tasks\n make_clean_task\n make_wix_folder_task\n make_copy_file_tasks\n make_sourcery_wxs_file_task\n make_sourcery_wixobj_file_task\n make_product_wxs_file_task\n make_product_wixobj_file_task\n make_msi_file_task\n make_msi_task\n make_test_task\n end",
"def generate_rake_tasks\n @rake_tasks = outputs.map do |output, inputs|\n additional_paths = []\n inputs.each do |input|\n\n create_file_task(input.fullpath).dynamic do\n additional_paths += additional_dependencies(input)\n end\n end\n additional_paths.each { |path| create_file_task(path) }\n\n create_file_task(output.fullpath, inputs.map(&:fullpath)) do\n output.create { generate_output(inputs, output) }\n end\n end\n end",
"def generate_rake_tasks\n @rake_tasks = outputs.map do |output, inputs|\n dependencies = []\n\n inputs.each do |input|\n dependencies << input.fullpath\n dependencies += input_dependencies(input)\n end\n\n dependencies.each { |path| create_file_task(path) }\n\n create_file_task(output.fullpath, dependencies) do\n output.create { generate_output(inputs, output) }\n end\n end\n end",
"def add_rakefile_and_tasks(rakefilename, task_names)\n\n # creates a new EvaluatedRakeFile entry if needed\n ev_rakefile = self.all_rakefiles[rakefilename]\n ev_rakefile.add_task_names(task_names)\n self.all_tasks.concat(ev_rakefile.all_tasks)\n\n ev_rakefile\n end",
"def make_simple_rakefiles(directory, num = 1, start_num: 0)\n files_created = []\n num.times do |i|\n task_num = i + start_num\n fname = File.join(directory, \"test#{task_num}.rake\")\n files_created << fname\n File.open(fname, 'w') do |f|\n f.puts simple_rake_task(\"task#{task_num}\")\n end\n end\n files_created\n end",
"def import_tasks file_name\n raise IOError, \"File not found: #{file_name}\" unless File.exists?(file_name)\n\n tasks = {}\n File.open(file_name, 'r') do |f|\n # Write array of task hashes.\n tasks = YAML.load(f)\n end\n\n existing_tasks = current_task_names\n\n tasks.each do |name, data|\n if existing_tasks.include?(name)\n modify_task(data, name)\n else\n create_task(data)\n end\n end\n end",
"def create_copy_file_tasks(source_files, source_root, dest_root, invoking_task)\n source_files.each do |source|\n target = source.pathmap(\"%{#{source_root},#{dest_root}}p\")\n directory File.dirname(target)\n file target => [File.dirname(target), source] do |t|\n cp source, target\n end\n task invoking_task => target\n end\nend",
"def generate_tasks\n template \"flex_tasks.rake\", \"lib/tasks/flex_tasks.rake\"\n end",
"def run_rake(*tasks)\n tasks.each do |task|\n run \"cd #{latest_release}; rake RAILS_ENV=#{environment} #{task}\"\n end\nend",
"def build_task(name, dependencies=[], params={})\n path = aws_dir(\".#{name}-built\")\n\n file path => dependencies do\n begin\n yield if block_given?\n touch path\n rescue => e\n HipChat.log \"<b>#{name}</b> FAILED!\", color: 'red', notify: 1\n HipChat.log \"/quote #{e}\\n#{CDO.backtrace e}\", message_format: 'text'\n raise $!, $!.message, []\n end\n end\n\n path\nend",
"def make_copy_file_tasks\n @files.each do |source, destination|\n next if source == destination\n next if Rake::FileTask.task_defined? destination\n type = File.directory?(source) ? 'folder' : 'file'\n task = Rake::FileTask.define_task destination do |t|\n folder = File.dirname(t.name)\n FileUtils.mkpath folder unless File.directory? folder\n FileUtils.copy source, t.name\n end\n task.comment = \"Create the #{destination} #{type}\"\n task.enhance ['wix']\n if Rake::FileTask.task_defined? source\n task.enhance [source]\n end\n end\n end",
"def define_tasks(path)\n output = Rake.chunks_dir + \"/\" + path\n define_split_file_task(path, output)\n SplitTask.define_common_tasks\n SplitTask.connect_common_tasks(output)\n end",
"def rubaidh_run_rake(*tasks)\n rake = fetch(:rake, 'rake')\n rails_env = fetch(:rails_env, 'production')\n\n tasks.each do |task|\n run \"cd #{latest_release}; #{rake} RAILS_ENV=#{rails_env} #{task}\"\n end\nend",
"def tasks_file(name)\n File.expand_path(File.join(@path, %w[ application tasks ], name))\n end",
"def createTask(project, target)\n task \"#{project}.#{target}\" do\n print \"#######################################################\\n\"\n invokeRake project, target\n end\nend",
"def touch_prereqs(task, env_string, dependencies=nil, already_touched=nil)\n task = task.to_s\n dependencies = list_dependencies(task, env_string) unless dependencies\n already_touched = {} unless already_touched\n \n dependencies[task].each do |subtask|\n if dependencies.has_key?(subtask)\n touch_prereqs(subtask, env_string, dependencies, already_touched)\n end\n full_path = File.expand_path(subtask, ENV['OUT'])\n # All file tasks must contain one of these characters.\n if subtask =~ /\\.|\\// and !already_touched[full_path]\n $stderr.puts \"DEBUG: touch #{subtask}\" if ENV['DEBUG']\n FileUtils.touch(full_path)\n already_touched[full_path] = true\n sleep 1\n end\n end\nend",
"def rest_tasks(name, file, png_file, svg_file)\n\n desc \"generate all files for ./#{file}\"\n task name => png_file\n task name => svg_file\n\n if @viewer\n\n show_name = \"view_#{name}\"\n desc \"#{@viewer} ./#{png_file}\"\n task show_name => png_file do\n sh \"#{@viewer} #{png_file}\"\n end\n\n watch_name = \"watch_#{name}\"\n desc \"watch #{@viewer} ./#{png_file}\"\n task watch_name do\n # sh \"#{@viewer} #{png_file}\"\n sh \"bash ./watcher.sh #{@viewer} #{png_file} #{name}\"\n end\n\n\n end\n\n # append tasks to default task\n task :all => png_file\n task :all => svg_file\n\nend"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Code for a simple task. The body of the task is given :task_body
|
def simple_rake_task(task_name = 'test_task', indent: '', task_body: "\n")
"\n" + indent +
"desc 'task named #{task_name}'\n" +
indent + "task :#{task_name} do\n" +
indent + " " + task_body +
indent + "end\n\n"
end
|
[
"def simple_rake_task(task_name = 'test_task', task_body = \"\\n\")\n \"namespace :shf do\\n\" +\n \" namespace :test do\\n\" +\n \" desc 'task named #{task_name}'\\n\" +\n \" task :#{task_name} do\\n\" +\n task_body +\n \" end\\n\" +\n \" end\\n \" +\n \"end\\n\"\n end",
"def description\n\t\t\"This just does a generic (untracked) task!\"\n\tend",
"def invoke_task(task_string); end",
"def createSimpleTask _obj, _args\n \"_obj createSimpleTask _args;\" \n end",
"def task(name, options={}, &block)\n raise ArgumentError, \"expected a block\" unless block\n\n if @next_description\n options = options.merge(:desc => @next_description)\n @next_description = nil\n end\n\n actor.define_task(name, options, &block)\n end",
"def add_a_task\n\n end",
"def do_task(task)\n case task.type\n when 'create'\n logger.info \"create message to #{task.AM_URN}\" \n @am_list[task.AM_URN].create_resource(task.resource_uuid)\n when 'release' then logger.info 'release message'\n end\n end",
"def run taskname\n Rake::Task[\"#{@flow_id}:#{taskname}\"].invoke\n end",
"def print_task(task)\n print \"==> \".info, task.bold, \"\\n\"\nend",
"def taskNull \n \"taskNull\" \n end",
"def create_task(options={})\n\t\t\tTask.create({\n\t\t\t\ttitle: \"title example\",\n\t\t\t\tdifficult: 1,\n\t\t\t\tdescription: \"description example\"*20\n\t\t\t}.merge(options))\n\t\tend",
"def explain_true(task); end",
"def create_task\n return nil unless @service\n @task_definition = @service.NewTask(0)\n task_registration\n task_prinicipals\n task_settings\n task_triggers\n task_action\n register_task\nend",
"def task(name, description=nil, &block)\n puts \"adding task :#{name}\"\n in_root(\"lib/tasks\") do |folder|\n File.open(\"#{folder}/application.rake\", \"a+\") do |f|\n if block_given?\n f.write(code_for(block))\n else\n f.write(data)\n end\n end\n end\n end",
"def task name, options={}, &block\n task = Task.new name, options, &block\n TodoRunner.registry[name] = task\n end",
"def project_task name, &block\r\n FreeBASIC::ProjectTask.new name, &block\r\nend",
"def task(&block) \n task = TaskRunnable.new\n task.proc = Proc.new(block)\n task\n end",
"def default_task_name; end",
"def task(body, priority: T.unsafe(nil), timeout: T.unsafe(nil), **keyword_args); end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Sets the input & output paths for the given script build phase.
|
def set_input_output_paths(phase, input_paths_by_config, output_paths_by_config)
if input_output_paths_use_filelist?(phase)
[input_paths_by_config, output_paths_by_config].each do |hash|
hash.each do |file_list, files|
generator = Generator::FileList.new(files)
Xcode::PodsProjectGenerator::TargetInstallerHelper.update_changed_file(generator, file_list.file_list_path)
end
end
phase.input_paths = nil
phase.output_paths = nil
phase.input_file_list_paths = input_paths_by_config.each_key.map(&:file_list_relative_path).uniq
phase.output_file_list_paths = output_paths_by_config.each_key.map(&:file_list_relative_path).uniq
else
input_paths = input_paths_by_config.values.flatten(1).uniq
output_paths = output_paths_by_config.values.flatten(1).uniq
TargetIntegrator.validate_input_output_path_limit(input_paths, output_paths)
phase.input_paths = input_paths
phase.output_paths = output_paths
phase.input_file_list_paths = nil
phase.output_file_list_paths = nil
end
end
|
[
"def create_or_update_copy_xcframeworks_script_phase_to_target(native_target, script_path, input_paths_by_config = {}, output_paths_by_config = {})\n phase = TargetIntegrator.create_or_update_shell_script_build_phase(native_target, BUILD_PHASE_PREFIX + COPY_XCFRAMEWORKS_PHASE_NAME)\n phase.shell_script = %(\"#{script_path}\"\\n)\n TargetIntegrator.set_input_output_paths(phase, input_paths_by_config, output_paths_by_config)\n reorder_script_phase(native_target, phase, :before_compile)\n end",
"def add_copy_resources_script_phase\n unless target.includes_resources?\n native_targets.each do |native_target|\n TargetIntegrator.remove_copy_resources_script_phase_from_target(native_target)\n end\n return\n end\n\n script_path = target.copy_resources_script_relative_path\n input_paths_by_config = {}\n output_paths_by_config = {}\n if use_input_output_paths\n target.resource_paths_by_config.each do |config, resource_paths|\n input_paths_key = XCFileListConfigKey.new(target.copy_resources_script_input_files_path(config),\n target.copy_resources_script_input_files_relative_path)\n input_paths_by_config[input_paths_key] = [script_path] + resource_paths\n\n output_paths_key = XCFileListConfigKey.new(target.copy_resources_script_output_files_path(config),\n target.copy_resources_script_output_files_relative_path)\n output_paths_by_config[output_paths_key] = TargetIntegrator.resource_output_paths(resource_paths)\n end\n end\n\n native_targets.each do |native_target|\n # Static library targets cannot include resources. Skip this phase from being added instead.\n next if native_target.symbol_type == :static_library\n TargetIntegrator.create_or_update_copy_resources_script_phase_to_target(native_target, script_path,\n input_paths_by_config,\n output_paths_by_config)\n end\n end",
"def set_script_path path\n script_processor.set_path path\n end",
"def add_copy_resources_script_phase\n phase_name = \"Copy Pods Resources\"\n native_targets.each do |native_target|\n phase = native_target.shell_script_build_phases.select { |bp| bp.name == phase_name }.first ||\n native_target.new_shell_script_build_phase(phase_name)\n path = target.copy_resources_script_relative_path\n phase.shell_script = %{\"#{path}\"\\n}\n phase.show_env_vars_in_log = '0'\n end\n end",
"def add_copy_dsyms_script_phase(native_target)\n script_path = \"${PODS_ROOT}/#{target.copy_dsyms_script_path.relative_path_from(target.sandbox.root)}\"\n dsym_paths = PodTargetInstaller.dsym_paths(target)\n bcsymbolmap_paths = PodTargetInstaller.bcsymbolmap_paths(target)\n\n if dsym_paths.empty? && bcsymbolmap_paths.empty?\n script_phase = native_target.shell_script_build_phases.find do |bp|\n bp.name && bp.name.end_with?(UserProjectIntegrator::TargetIntegrator::COPY_DSYM_FILES_PHASE_NAME)\n end\n native_target.build_phases.delete(script_phase) if script_phase.present?\n return\n end\n\n phase_name = UserProjectIntegrator::TargetIntegrator::BUILD_PHASE_PREFIX + UserProjectIntegrator::TargetIntegrator::COPY_DSYM_FILES_PHASE_NAME\n phase = UserProjectIntegrator::TargetIntegrator.create_or_update_shell_script_build_phase(native_target, phase_name)\n phase.shell_script = %(\"#{script_path}\"\\n)\n\n input_paths_by_config = {}\n output_paths_by_config = {}\n if use_input_output_paths?\n input_file_list_path = target.copy_dsyms_script_input_files_path\n input_file_list_relative_path = \"${PODS_ROOT}/#{input_file_list_path.relative_path_from(target.sandbox.root)}\"\n input_paths_key = UserProjectIntegrator::TargetIntegrator::XCFileListConfigKey.new(input_file_list_path, input_file_list_relative_path)\n input_paths = input_paths_by_config[input_paths_key] = []\n input_paths.concat([dsym_paths, *bcsymbolmap_paths].flatten.compact)\n\n output_file_list_path = target.copy_dsyms_script_output_files_path\n output_file_list_relative_path = \"${PODS_ROOT}/#{output_file_list_path.relative_path_from(target.sandbox.root)}\"\n output_paths_key = UserProjectIntegrator::TargetIntegrator::XCFileListConfigKey.new(output_file_list_path, output_file_list_relative_path)\n output_paths = output_paths_by_config[output_paths_key] = []\n\n dsym_output_paths = dsym_paths.map { |dsym_path| \"${DWARF_DSYM_FOLDER_PATH}/#{File.basename(dsym_path)}\" }\n bcsymbolmap_output_paths = bcsymbolmap_paths.map { |bcsymbolmap_path| \"${DWARF_DSYM_FOLDER_PATH}/#{File.basename(bcsymbolmap_path)}\" }\n output_paths.concat([dsym_output_paths, *bcsymbolmap_output_paths].flatten.compact)\n end\n\n UserProjectIntegrator::TargetIntegrator.set_input_output_paths(phase, input_paths_by_config, output_paths_by_config)\n end",
"def initialize(params)\n\t\tsuper\n\t\tinputs = ensureArray(requireParam(params, :inputs))\n\t\t@cmd = requireParam(params, :cmd)\n\t\toutputs = ensureArray(requireParam(params, :output)).map {|filepath| if filepath.relative? then canonicalizeFilepath(filepath, Dir.pwd()) else filepath end}\n\t\t\n\t\tinputs.each {|filepath| addPrereq filepath}\n\t\toutputs.each {|filepath| addTarget filepath; BuildEnv::setEntityBuilt filepath}\n\t\toutputs.each {|filepath| BuildEnv::addGeneratedFile filepath}\n\tend",
"def output_path=(path)\n @output_path = path\n end",
"def set_run_script_to_always_run_when_no_input_or_output_files_exist(project:)\n project.targets.each do |target|\n run_script_build_phases = target.build_phases.filter { |phase| phase.is_a?(Xcodeproj::Project::Object::PBXShellScriptBuildPhase) }\n cocoapods_run_script_build_phases = run_script_build_phases.filter { |phase| phase.name.start_with?(\"[CP\") }\n cocoapods_run_script_build_phases.each do |run_script|\n next unless (run_script.input_paths || []).empty? && (run_script.output_paths || []).empty?\n run_script.always_out_of_date = \"1\"\n end\n end\n project.save\nend",
"def setup_output_dir\n FileUtils.cp_r(::TDoc::TEMPLATE_PATH.join('assets'), out_dir)\n\n @json_index.generate\n @json_index.generate_gzipped\n end",
"def run_loadpath=(paths)\n\n end",
"def setup_source_files\n project.sources.each do |src|\n # Figure out where stuff should come from and go to\n source_file = src\n object_file = objectsify src\n compile_task object_file, source_file\n end#project.sources.each\n end",
"def save\n output_path = @options['output_path']\n staging_path = @options['staging_path']\n\n @assets.each do |asset|\n directory = File.join(@source, staging_path, output_path)\n write_asset_file(directory, asset)\n\n # Store output path of saved file\n asset.output_path = output_path\n end\n end",
"def script_path=(script_path)\n @script_path = script_path\n @modified = true\n end",
"def copy_source_files_to_scratch\n source_directories.each do |dir|\n Origen.file_handler.resolve_files(dir) do |file|\n subdir = file.relative_path_from(Pathname.new(dir)).dirname.to_s\n cpydir = \"#{ungenerated_dir}/#{subdir}\"\n FileUtils.mkdir_p(cpydir) unless File.exist?(cpydir)\n FileUtils.copy(file, cpydir)\n end\n end\n end",
"def setup\n setup_filters\n generate_rake_tasks\n record_input_files\n end",
"def output_path\n @output_path ||= 'build'\n end",
"def build!\n test_git!\n\n file_list = Dir.glob(\"#{@source}*\").sort # Pull the file list before creating the target directory\n\n setup_target\n\n add_runner\n\n file_list.each do |infile_name|\n rewrite_animation_frame infile_name\n create_commit infile_name\n end\n end",
"def source_map_path=(_arg0); end",
"def fix_paths!\n return if Settings.emr_data_root.blank?\n unless input_paths.blank?\n @input_paths = input_paths.map{|path| (path =~ ABSOLUTE_URI) ? path : File.join(Settings.emr_data_root, path) }\n end\n unless output_path.blank?\n @output_path = [output_path].map{|path| (path =~ ABSOLUTE_URI) ? path : File.join(Settings.emr_data_root, path) }\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Delete a 'Embed Pods Frameworks' Script Build Phase if present
|
def remove_embed_frameworks_script_phase_from_target(native_target)
remove_script_phase_from_target(native_target, EMBED_FRAMEWORK_PHASE_NAME)
end
|
[
"def remove_copy_xcframeworks_script_phase_from_target(native_target)\n remove_script_phase_from_target(native_target, COPY_XCFRAMEWORKS_PHASE_NAME)\n end",
"def create_embed_frameworks_phase(project, t)\n \n t.build_phases.delete_if { |phase| \n phase.to_s == 'Embed Frameworks'\n }\n\n embed_frameworks_build_phase = project.new(\n Xcodeproj::Project::Object::PBXCopyFilesBuildPhase\n )\n\n embed_frameworks_build_phase.name = 'Embed Frameworks'\n embed_frameworks_build_phase.symbol_dst_subfolder_spec = :frameworks\n t.build_phases << embed_frameworks_build_phase\n return embed_frameworks_build_phase\nend",
"def remove_embed_frameworks_script_phase_from_embedded_targets\n return unless target.requires_host_target?\n native_targets.each do |native_target|\n if AggregateTarget::EMBED_FRAMEWORKS_IN_HOST_TARGET_TYPES.include? native_target.symbol_type\n TargetIntegrator.remove_embed_frameworks_script_phase_from_target(native_target)\n end\n end\n end",
"def remove_script_phase_from_target(native_target, phase_name)\n build_phase = native_target.shell_script_build_phases.find { |bp| bp.name && bp.name.end_with?(phase_name) }\n return unless build_phase.present?\n native_target.build_phases.delete(build_phase)\n end",
"def before_destroy\n # cwd: utunes_app\n logger.info(\"=======> before_destroy invoked!\")\n\n version_str = sprintf(\"%.2d\", version )\n bundle_title = \"hc12_v#{version_str}\"\n \n bundle_folder = \"lib/bundles\"\n bundle_name=\"build_\" + bundle_title\n bundle_fq_name = bundle_folder + \"/\" + bundle_name\n \n logger.info(\"rm -R #{bundle_fq_name}\")\n logger.info( %x[rm -R #{bundle_fq_name}] )\n \n end",
"def remove_copy_resources_script_phase_from_target(native_target)\n build_phase = native_target.shell_script_build_phases.find { |bp| bp.name && bp.name.end_with?(COPY_PODS_RESOURCES_PHASE_NAME) }\n return unless build_phase.present?\n native_target.build_phases.delete(build_phase)\n end",
"def delete\n check_config(require_destination: true)\n cartage.display \"Removing packages from #{name}...\"\n delete_file Pathname(\"#{cartage.final_name}-release-hashref.txt\")\n delete_file cartage.final_release_metadata_json\n cartage.plugins.request_map(:build_package, :package_name).each do |name|\n delete_file name\n end\n end",
"def remove_framework_from_xcodeproj(framework_name)\n\n # Delete framework files\n source_dir_path = File.dirname(source_project_path)\n result_dir_path = File.join(source_dir_path, framework_name)\n \n if File.exist?(result_dir_path)\n FileUtils.rm_r(result_dir_path)\n end\n\n # Remove framework target from xcodeproj\n project = Xcodeproj::Project.open(source_project_path)\n\n target_to_remove = project.targets.select { |target| target.name == framework_name }.first\n\n if not target_to_remove.nil? \n # Build phases\n target_to_remove.build_phases.each { |phase| \n phase.clear \n phase.clear_build_files\n phase.remove_from_project\n }\n\n if not target_to_remove.build_configuration_list.nil? \n target_to_remove.build_configuration_list.remove_from_project\n end\n\n target_to_remove.remove_from_project\n end\n\n # Remove framework files group\n project.groups.select { |group| group.path == framework_name or group.name == framework_name }.each { |framework_group| \n framework_group.clear\n framework_group.remove_from_project\n }\n\n project.files.each { |file| \n if file.path == framework_name + \".framework\"\n file.remove_from_project\n end\n }\n\n project.frameworks_group.groups.each { |framework_group| \n if framework_group.name == 'iOS'\n framework_group.files.each { |file|\n if file.name == 'Foundation.framework'\n file.remove_from_project\n framework_group.clear\n framework_group.remove_from_project\n end\n }\n end\n }\n\n project.objects.select { |object| object.isa == \"PBXBuildFile\" }.each { |e| \n if e.file_ref.nil?\n e.remove_from_project\n end\n }\n\n project.save\n end",
"def uninstall_ruby\n directory ::File.join(options['prefix'], 'builds', new_resource.name) do\n action :delete\n end\n end",
"def remove_playground_from_workspace(playground_name, framework_name, workspace_path)\n \n # Delete playground package\n dir_path = File.dirname(workspace_path)\n result_playgorund_path = File.join(dir_path, playground_name + \".playground\")\n \n if File.exist?(result_playgorund_path)\n FileUtils.rm_r(result_playgorund_path)\n end\n\n # Remove playground from workspace\n workspace = Xcodeproj::Workspace.new_from_xcworkspace(workspace_path)\n node_to_delete = workspace.file_references.select { |file_ref| file_ref.path == result_playgorund_path }.map { |file_ref| file_ref.to_node }.first\n\n if not node_to_delete.nil?\n workspace.document.root.each_element_with_attribute('location', node_to_delete.attributes[\"location\"]) {|e| workspace.document.root.delete_element(e)}\n workspace.save_as(workspace_path)\n end\n end",
"def cleanUpDeploymentTargets(installer)\n installer.pods_project.targets.each do |target|\n target.build_configurations.each do |config|\n config.build_settings.delete 'IPHONEOS_DEPLOYMENT_TARGET'\n end\n end\nend",
"def remove_static_framework_duplicate_linkage(static_framework_pods)\n puts \"Removing duplicate linkage of static frameworks\"\n\n Dir.glob(File.join(PODS_TARGET_SUPPORT_FILES_DIR, \"Pods-*\")).each do |path|\n pod_target = path.split('-', -1).last\n\n static_framework_pods.each do |target, pods|\n next if pod_target == target\n frameworks = pods.map { |pod| identify_frameworks(pod) }.flatten\n\n Dir.glob(File.join(path, \"*.xcconfig\")).each do |xcconfig|\n lines = File.readlines(xcconfig)\n\n if other_ldflags_index = lines.find_index { |l| l.start_with?('OTHER_LDFLAGS') }\n other_ldflags = lines[other_ldflags_index]\n\n frameworks.each do |framework|\n other_ldflags.gsub!(\"-framework \\\"#{framework}\\\"\", '')\n end\n\n File.open(xcconfig, 'w') do |fd|\n fd.write(lines.join)\n end\n end\n end\n end\n end\nend",
"def set_run_script_to_always_run_when_no_input_or_output_files_exist(project:)\n project.targets.each do |target|\n run_script_build_phases = target.build_phases.filter { |phase| phase.is_a?(Xcodeproj::Project::Object::PBXShellScriptBuildPhase) }\n cocoapods_run_script_build_phases = run_script_build_phases.filter { |phase| phase.name.start_with?(\"[CP\") }\n cocoapods_run_script_build_phases.each do |run_script|\n next unless (run_script.input_paths || []).empty? && (run_script.output_paths || []).empty?\n run_script.always_out_of_date = \"1\"\n end\n end\n project.save\nend",
"def destroy\n @prebuilt.destroy\n respond_to do |format|\n format.html { redirect_to prebuilts_url, notice: 'Prebuilt was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def delete_build_files\n %{find #{gem_home}/*/ext/ -maxdepth 1 -mindepth 1 -type d | xargs -L1 bash -c 'if [ -e $0/Makefile ]; then make -C $0 clean; fi'}\n end",
"def create_app_target_embed_frameworks_script(app_spec)\n path = target.embed_frameworks_script_path_for_spec(app_spec)\n framework_paths_by_config = target.user_build_configurations.each_with_object({}) do |(config_name, config), paths_by_config|\n paths_by_config[config_name] = target.dependent_targets_for_app_spec(app_spec, :configuration => config).flat_map do |pod_target|\n spec_paths_to_include = pod_target.library_specs.map(&:name)\n spec_paths_to_include << app_spec.name if pod_target == target\n pod_target.framework_paths.values_at(*spec_paths_to_include).flatten.compact.uniq\n end\n end\n xcframeworks_by_config = target.user_build_configurations.each_with_object({}) do |(config_name, config), paths_by_config|\n paths_by_config[config_name] = target.dependent_targets_for_app_spec(app_spec, :configuration => config).flat_map do |pod_target|\n spec_paths_to_include = pod_target.library_specs.map(&:name)\n spec_paths_to_include << app_spec.name if pod_target == target\n pod_target.xcframeworks.values_at(*spec_paths_to_include).flatten.compact.uniq\n end\n end\n\n unless framework_paths_by_config.each_value.all?(&:empty?) && xcframeworks_by_config.each_value.all?(&:empty?)\n generator = Generator::EmbedFrameworksScript.new(framework_paths_by_config, xcframeworks_by_config)\n update_changed_file(generator, path)\n add_file_to_support_group(path)\n end\n end",
"def delete_release_assets\n CompileExtensionStatus.call(\n extension: extension, \n worker: 'DestroyAssetsWorker', \n job_id: DestroyAssetsWorker.perform_async(self.id)\n )\n end",
"def remove_static_build\n if File.exist?(static_build_path)\n FileUtils.rm_r(static_build_path)\n end\n end",
"def delete_launchd_script(date)\n File.delete(launchd_script_filename(date))\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Adds a shell script build phase responsible to copy the xcframework slice to the intermediate build directory.
|
def create_or_update_copy_xcframeworks_script_phase_to_target(native_target, script_path, input_paths_by_config = {}, output_paths_by_config = {})
phase = TargetIntegrator.create_or_update_shell_script_build_phase(native_target, BUILD_PHASE_PREFIX + COPY_XCFRAMEWORKS_PHASE_NAME)
phase.shell_script = %("#{script_path}"\n)
TargetIntegrator.set_input_output_paths(phase, input_paths_by_config, output_paths_by_config)
reorder_script_phase(native_target, phase, :before_compile)
end
|
[
"def add_carthage_copy_phase(target)\n shell_script_name = 'Carthage copy-frameworks Run Script'\n target_names = target.shell_script_build_phases.map(&:name)\n unless target_names.include?(shell_script_name)\n shell_script = target.new_shell_script_build_phase shell_script_name\n shell_script.shell_path = '/bin/bash'\n shell_script.shell_script = '/usr/local/bin/carthage copy-frameworks'\n shell_script.input_paths = [CARTHAGE_FRAMEWORK_PATH]\n end\n end",
"def create_copy_xcframeworks_script\n path = target.copy_xcframeworks_script_path\n generator = Generator::CopyXCFrameworksScript.new(target.xcframeworks.values.flatten, sandbox.root, target.platform)\n update_changed_file(generator, path)\n add_file_to_support_group(path)\n end",
"def add_copy_resources_script_phase\n phase_name = \"Copy Pods Resources\"\n native_targets.each do |native_target|\n phase = native_target.shell_script_build_phases.select { |bp| bp.name == phase_name }.first ||\n native_target.new_shell_script_build_phase(phase_name)\n path = target.copy_resources_script_relative_path\n phase.shell_script = %{\"#{path}\"\\n}\n phase.show_env_vars_in_log = '0'\n end\n end",
"def remove_copy_xcframeworks_script_phase_from_target(native_target)\n remove_script_phase_from_target(native_target, COPY_XCFRAMEWORKS_PHASE_NAME)\n end",
"def copy_xcframework(path, framework, dir = '')\n FileUtils.mkdir_p \"../../build/#{dir}\"\n FileUtils.rm_rf \"../../build/#{dir}/#{framework}.xcframework\"\n\n source = \"#{path}/#{framework}.xcframework\"\n if not Dir.exist? source\n raise \"Missing XCFramework to test at '#{source}'\"\n end\n sh 'cp', '-cR', source, \"../../build/#{dir}\"\nend",
"def add_copy_dsyms_script_phase(native_target)\n script_path = \"${PODS_ROOT}/#{target.copy_dsyms_script_path.relative_path_from(target.sandbox.root)}\"\n dsym_paths = PodTargetInstaller.dsym_paths(target)\n bcsymbolmap_paths = PodTargetInstaller.bcsymbolmap_paths(target)\n\n if dsym_paths.empty? && bcsymbolmap_paths.empty?\n script_phase = native_target.shell_script_build_phases.find do |bp|\n bp.name && bp.name.end_with?(UserProjectIntegrator::TargetIntegrator::COPY_DSYM_FILES_PHASE_NAME)\n end\n native_target.build_phases.delete(script_phase) if script_phase.present?\n return\n end\n\n phase_name = UserProjectIntegrator::TargetIntegrator::BUILD_PHASE_PREFIX + UserProjectIntegrator::TargetIntegrator::COPY_DSYM_FILES_PHASE_NAME\n phase = UserProjectIntegrator::TargetIntegrator.create_or_update_shell_script_build_phase(native_target, phase_name)\n phase.shell_script = %(\"#{script_path}\"\\n)\n\n input_paths_by_config = {}\n output_paths_by_config = {}\n if use_input_output_paths?\n input_file_list_path = target.copy_dsyms_script_input_files_path\n input_file_list_relative_path = \"${PODS_ROOT}/#{input_file_list_path.relative_path_from(target.sandbox.root)}\"\n input_paths_key = UserProjectIntegrator::TargetIntegrator::XCFileListConfigKey.new(input_file_list_path, input_file_list_relative_path)\n input_paths = input_paths_by_config[input_paths_key] = []\n input_paths.concat([dsym_paths, *bcsymbolmap_paths].flatten.compact)\n\n output_file_list_path = target.copy_dsyms_script_output_files_path\n output_file_list_relative_path = \"${PODS_ROOT}/#{output_file_list_path.relative_path_from(target.sandbox.root)}\"\n output_paths_key = UserProjectIntegrator::TargetIntegrator::XCFileListConfigKey.new(output_file_list_path, output_file_list_relative_path)\n output_paths = output_paths_by_config[output_paths_key] = []\n\n dsym_output_paths = dsym_paths.map { |dsym_path| \"${DWARF_DSYM_FOLDER_PATH}/#{File.basename(dsym_path)}\" }\n bcsymbolmap_output_paths = bcsymbolmap_paths.map { |bcsymbolmap_path| \"${DWARF_DSYM_FOLDER_PATH}/#{File.basename(bcsymbolmap_path)}\" }\n output_paths.concat([dsym_output_paths, *bcsymbolmap_output_paths].flatten.compact)\n end\n\n UserProjectIntegrator::TargetIntegrator.set_input_output_paths(phase, input_paths_by_config, output_paths_by_config)\n end",
"def add_copy_resources_script_phase\n unless target.includes_resources?\n native_targets.each do |native_target|\n TargetIntegrator.remove_copy_resources_script_phase_from_target(native_target)\n end\n return\n end\n\n script_path = target.copy_resources_script_relative_path\n input_paths_by_config = {}\n output_paths_by_config = {}\n if use_input_output_paths\n target.resource_paths_by_config.each do |config, resource_paths|\n input_paths_key = XCFileListConfigKey.new(target.copy_resources_script_input_files_path(config),\n target.copy_resources_script_input_files_relative_path)\n input_paths_by_config[input_paths_key] = [script_path] + resource_paths\n\n output_paths_key = XCFileListConfigKey.new(target.copy_resources_script_output_files_path(config),\n target.copy_resources_script_output_files_relative_path)\n output_paths_by_config[output_paths_key] = TargetIntegrator.resource_output_paths(resource_paths)\n end\n end\n\n native_targets.each do |native_target|\n # Static library targets cannot include resources. Skip this phase from being added instead.\n next if native_target.symbol_type == :static_library\n TargetIntegrator.create_or_update_copy_resources_script_phase_to_target(native_target, script_path,\n input_paths_by_config,\n output_paths_by_config)\n end\n end",
"def add_missing_copy_phase!(dry_run: false)\n # Check if upgrade is needed\n # If fastlane copy files build phase exists already, we don't need any more changes to the Xcode project\n phase_copy_sign = self.fastlane_runner_target.copy_files_build_phases.select { |phase_copy| phase_copy.name == \"FastlaneRunnerCopySigned\" }.first\n\n old_phase_copy_sign = self.fastlane_runner_target.shell_script_build_phases.select { |phase_copy| phase_copy.shell_script == \"cd \\\"${SRCROOT}\\\"\\ncd ../..\\ncp \\\"${TARGET_BUILD_DIR}/${EXECUTABLE_PATH}\\\" .\\n\" }.first\n\n return true if dry_run && phase_copy_sign.nil?\n\n return false if dry_run\n\n # Proceed to upgrade\n old_phase_copy_sign.remove_from_project unless old_phase_copy_sign.nil?\n\n unless phase_copy_sign\n # Create a copy files build phase\n phase_copy_sign = self.fastlane_runner_target.new_copy_files_build_phase(\"FastlaneRunnerCopySigned\")\n phase_copy_sign.dst_path = \"$SRCROOT/../..\"\n phase_copy_sign.dst_subfolder_spec = \"0\"\n phase_copy_sign.run_only_for_deployment_postprocessing = \"0\"\n targetBinaryReference = self.fastlane_runner_target.product_reference\n phase_copy_sign.add_file_reference(targetBinaryReference)\n\n # Set \"Code sign on copy\" flag on Xcode for fastlane_runner_target\n targetBinaryReference.build_files.each { |target_binary_build_file_reference|\n target_binary_build_file_reference.settings = { \"ATTRIBUTES\": [\"CodeSignOnCopy\"] }\n }\n end\n\n target_project.save\n end",
"def create_embed_frameworks_phase(project, t)\n \n t.build_phases.delete_if { |phase| \n phase.to_s == 'Embed Frameworks'\n }\n\n embed_frameworks_build_phase = project.new(\n Xcodeproj::Project::Object::PBXCopyFilesBuildPhase\n )\n\n embed_frameworks_build_phase.name = 'Embed Frameworks'\n embed_frameworks_build_phase.symbol_dst_subfolder_spec = :frameworks\n t.build_phases << embed_frameworks_build_phase\n return embed_frameworks_build_phase\nend",
"def add_earlgrey_copy_files_script(user_project, test_target)\n user_project.targets.each do |target|\n earlgrey_copy_files_phase_name = 'EarlGrey Copy Files'\n if target.name == test_target\n earlgrey_copy_files_exists = false\n target.copy_files_build_phases.each do |copy_files_phase|\n if copy_files_phase.name = earlgrey_copy_files_phase_name\n earlgrey_copy_files_exists = true\n end\n end\n\n if earlgrey_copy_files_exists == false\n new_copy_files_phase = target.new_copy_files_build_phase(earlgrey_copy_files_phase_name)\n new_copy_files_phase.dst_path = '$(TEST_HOST)/../'\n new_copy_files_phase.dst_subfolder_spec = '0'\n file_ref =\n user_project.products_group.new_file('${SRCROOT}/Pods/EarlGrey/EarlGrey-1.0.0/EarlGrey.framework')\n file_ref.source_tree = 'SRCROOT'\n build_file = new_copy_files_phase.add_file_reference(file_ref, true)\n build_file.settings = { 'ATTRIBUTES' => ['CodeSignOnCopy'] }\n user_project.save()\n end\n end\n end\nend",
"def install_flutter_application_pod(flutter_application_path)\n app_framework_dir = File.join(__dir__, 'App.framework')\n app_framework_dylib = File.join(app_framework_dir, 'App')\n if !File.exist?(app_framework_dylib)\n # Fake an App.framework to have something to link against if the xcode backend script has not run yet.\n # CocoaPods will not embed the framework on pod install (before any build phases can run) if the dylib does not exist.\n # Create a dummy dylib.\n FileUtils.mkdir_p(app_framework_dir)\n `echo \"static const int Moo = 88;\" | xcrun clang -x c -dynamiclib -o \"#{app_framework_dylib}\" -`\n end\n\n # Keep pod and script phase paths relative so they can be checked into source control.\n # Process will be run from project directory.\n current_directory_pathname = Pathname.new __dir__\n project_directory_pathname = Pathname.new Dir.pwd\n relative = current_directory_pathname.relative_path_from project_directory_pathname\n pod 'plant_species_recognition', :path => relative.to_s, :inhibit_warnings => true\n\n flutter_export_environment_path = File.join('${SRCROOT}', relative, 'flutter_export_environment.sh');\n script_phase :name => 'Run Flutter Build Script',\n :script => \"set -e\\nset -u\\nsource \\\"#{flutter_export_environment_path}\\\"\\n\\\"$FLUTTER_ROOT\\\"/packages/flutter_tools/bin/xcode_backend.sh build\",\n :input_files => [\n File.join('${SRCROOT}', flutter_application_path, '.metadata'),\n File.join('${SRCROOT}', relative, 'App.framework', 'App'),\n File.join('${SRCROOT}', relative, 'engine', 'Flutter.framework', 'Flutter'),\n flutter_export_environment_path\n ],\n :execution_position => :before_compile\nend",
"def bundle_framework(framework_name)\n framework_id, framework_values = object_for_name(framework_name)\n \n # create a new file wrapper for in the copy build phase\n framework_in_build_phase_id = generate_object_id\n framework_in_build_phase_values = {\n 'isa' => 'PBXBuildFile',\n 'fileRef' => framework_id\n }\n add_object(framework_in_build_phase_id, framework_in_build_phase_values)\n \n # get or define the Copy Frameworks build phase\n build_phase = object_for_name('Copy Frameworks')\n if build_phase.nil?\n build_phase_id, build_phase_values = new_framework_copy_build_phase\n # add the new build phase to the objects\n add_object(build_phase_id, build_phase_values)\n \n # add the new build phase to the project target\n add_build_phase_to_project_target(build_phase_id)\n else\n build_phase_id, build_phase_values = build_phase\n end\n # add the framework to the build phase\n add_object_to_build_phase(framework_in_build_phase_id, build_phase_id)\n end",
"def create_or_update_shell_script_build_phase(native_target, script_phase_name, show_env_vars_in_log = '0')\n build_phases = native_target.build_phases.grep(Xcodeproj::Project::Object::PBXShellScriptBuildPhase)\n build_phases.find { |phase| phase.name && phase.name.end_with?(script_phase_name) }.tap { |p| p.name = script_phase_name if p } ||\n native_target.project.new(Xcodeproj::Project::Object::PBXShellScriptBuildPhase).tap do |phase|\n UI.message(\"Adding Build Phase '#{script_phase_name}' to project.\") do\n phase.name = script_phase_name\n unless show_env_vars_in_log.nil?\n phase.show_env_vars_in_log = show_env_vars_in_log\n end\n native_target.build_phases << phase\n end\n end\n end",
"def wrap_xcodebuild\n require 'fileutils'\n @wrapped_xcodebuild_path ||= File.join(Gym::ROOT, \"lib/assets/wrap_xcodebuild/xcbuild-safe.sh\")\n end",
"def wrap_xcodebuild\n require 'fileutils'\n @wrapped_xcodebuild_path ||= File.join(Helper.gem_path(\"gym\"), \"lib/assets/wrap_xcodebuild/xcbuild-safe.sh\")\n end",
"def deploy\n system \"macruby_deploy --embed --gem hotcocoa #{deploy_options} #{bundle_root}\"\n end",
"def new_framework_copy_build_phase\n [generate_object_id, NEW_COPY_FRAMEWORKS_BUILD_PHASE]\n end",
"def build_using_xcode\n validations\n\n framework_args = @framework_paths\n .map { |path| \"-framework #{path}\" }\n .join(\" \")\n\n FileUtils.mkdir_p(@parent_directory)\n output_path = File.join(@parent_directory, \"#{@framework_name}.xcframework\")\n output_args = \"-output #{output_path}\"\n\n logfile = Tempfile.new(['xcframework', '.log'])\n\n cmd = \"xcodebuild -create-xcframework #{framework_args} #{output_args}\"\n\n system(\"#{cmd} >#{logfile.path} 2>&1\") ||\n raise(BuildUsingXcodeFailure.new(File.read(logfile).strip))\n ensure\n if logfile\n logfile.close\n logfile.delete\n end\n end",
"def create_app_target_embed_frameworks_script(app_spec)\n path = target.embed_frameworks_script_path_for_spec(app_spec)\n framework_paths_by_config = target.user_build_configurations.each_with_object({}) do |(config_name, config), paths_by_config|\n paths_by_config[config_name] = target.dependent_targets_for_app_spec(app_spec, :configuration => config).flat_map do |pod_target|\n spec_paths_to_include = pod_target.library_specs.map(&:name)\n spec_paths_to_include << app_spec.name if pod_target == target\n pod_target.framework_paths.values_at(*spec_paths_to_include).flatten.compact.uniq\n end\n end\n xcframeworks_by_config = target.user_build_configurations.each_with_object({}) do |(config_name, config), paths_by_config|\n paths_by_config[config_name] = target.dependent_targets_for_app_spec(app_spec, :configuration => config).flat_map do |pod_target|\n spec_paths_to_include = pod_target.library_specs.map(&:name)\n spec_paths_to_include << app_spec.name if pod_target == target\n pod_target.xcframeworks.values_at(*spec_paths_to_include).flatten.compact.uniq\n end\n end\n\n unless framework_paths_by_config.each_value.all?(&:empty?) && xcframeworks_by_config.each_value.all?(&:empty?)\n generator = Generator::EmbedFrameworksScript.new(framework_paths_by_config, xcframeworks_by_config)\n update_changed_file(generator, path)\n add_file_to_support_group(path)\n end\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Delete a 'Copy XCFrameworks' Script Build Phase if present
|
def remove_copy_xcframeworks_script_phase_from_target(native_target)
remove_script_phase_from_target(native_target, COPY_XCFRAMEWORKS_PHASE_NAME)
end
|
[
"def remove_framework_from_xcodeproj(framework_name)\n\n # Delete framework files\n source_dir_path = File.dirname(source_project_path)\n result_dir_path = File.join(source_dir_path, framework_name)\n \n if File.exist?(result_dir_path)\n FileUtils.rm_r(result_dir_path)\n end\n\n # Remove framework target from xcodeproj\n project = Xcodeproj::Project.open(source_project_path)\n\n target_to_remove = project.targets.select { |target| target.name == framework_name }.first\n\n if not target_to_remove.nil? \n # Build phases\n target_to_remove.build_phases.each { |phase| \n phase.clear \n phase.clear_build_files\n phase.remove_from_project\n }\n\n if not target_to_remove.build_configuration_list.nil? \n target_to_remove.build_configuration_list.remove_from_project\n end\n\n target_to_remove.remove_from_project\n end\n\n # Remove framework files group\n project.groups.select { |group| group.path == framework_name or group.name == framework_name }.each { |framework_group| \n framework_group.clear\n framework_group.remove_from_project\n }\n\n project.files.each { |file| \n if file.path == framework_name + \".framework\"\n file.remove_from_project\n end\n }\n\n project.frameworks_group.groups.each { |framework_group| \n if framework_group.name == 'iOS'\n framework_group.files.each { |file|\n if file.name == 'Foundation.framework'\n file.remove_from_project\n framework_group.clear\n framework_group.remove_from_project\n end\n }\n end\n }\n\n project.objects.select { |object| object.isa == \"PBXBuildFile\" }.each { |e| \n if e.file_ref.nil?\n e.remove_from_project\n end\n }\n\n project.save\n end",
"def create_embed_frameworks_phase(project, t)\n \n t.build_phases.delete_if { |phase| \n phase.to_s == 'Embed Frameworks'\n }\n\n embed_frameworks_build_phase = project.new(\n Xcodeproj::Project::Object::PBXCopyFilesBuildPhase\n )\n\n embed_frameworks_build_phase.name = 'Embed Frameworks'\n embed_frameworks_build_phase.symbol_dst_subfolder_spec = :frameworks\n t.build_phases << embed_frameworks_build_phase\n return embed_frameworks_build_phase\nend",
"def create_copy_xcframeworks_script\n path = target.copy_xcframeworks_script_path\n generator = Generator::CopyXCFrameworksScript.new(target.xcframeworks.values.flatten, sandbox.root, target.platform)\n update_changed_file(generator, path)\n add_file_to_support_group(path)\n end",
"def remove_copy_resources_script_phase_from_target(native_target)\n build_phase = native_target.shell_script_build_phases.find { |bp| bp.name && bp.name.end_with?(COPY_PODS_RESOURCES_PHASE_NAME) }\n return unless build_phase.present?\n native_target.build_phases.delete(build_phase)\n end",
"def copy_xcframework(path, framework, dir = '')\n FileUtils.mkdir_p \"../../build/#{dir}\"\n FileUtils.rm_rf \"../../build/#{dir}/#{framework}.xcframework\"\n\n source = \"#{path}/#{framework}.xcframework\"\n if not Dir.exist? source\n raise \"Missing XCFramework to test at '#{source}'\"\n end\n sh 'cp', '-cR', source, \"../../build/#{dir}\"\nend",
"def remove_playground_from_workspace(playground_name, framework_name, workspace_path)\n \n # Delete playground package\n dir_path = File.dirname(workspace_path)\n result_playgorund_path = File.join(dir_path, playground_name + \".playground\")\n \n if File.exist?(result_playgorund_path)\n FileUtils.rm_r(result_playgorund_path)\n end\n\n # Remove playground from workspace\n workspace = Xcodeproj::Workspace.new_from_xcworkspace(workspace_path)\n node_to_delete = workspace.file_references.select { |file_ref| file_ref.path == result_playgorund_path }.map { |file_ref| file_ref.to_node }.first\n\n if not node_to_delete.nil?\n workspace.document.root.each_element_with_attribute('location', node_to_delete.attributes[\"location\"]) {|e| workspace.document.root.delete_element(e)}\n workspace.save_as(workspace_path)\n end\n end",
"def remove_embed_frameworks_script_phase_from_target(native_target)\n remove_script_phase_from_target(native_target, EMBED_FRAMEWORK_PHASE_NAME)\n end",
"def uninstall_ruby\n directory ::File.join(options['prefix'], 'builds', new_resource.name) do\n action :delete\n end\n end",
"def add_carthage_copy_phase(target)\n shell_script_name = 'Carthage copy-frameworks Run Script'\n target_names = target.shell_script_build_phases.map(&:name)\n unless target_names.include?(shell_script_name)\n shell_script = target.new_shell_script_build_phase shell_script_name\n shell_script.shell_path = '/bin/bash'\n shell_script.shell_script = '/usr/local/bin/carthage copy-frameworks'\n shell_script.input_paths = [CARTHAGE_FRAMEWORK_PATH]\n end\n end",
"def delete\n check_config(require_destination: true)\n cartage.display \"Removing packages from #{name}...\"\n delete_file Pathname(\"#{cartage.final_name}-release-hashref.txt\")\n delete_file cartage.final_release_metadata_json\n cartage.plugins.request_map(:build_package, :package_name).each do |name|\n delete_file name\n end\n end",
"def delete_build_files\n %{find #{gem_home}/*/ext/ -maxdepth 1 -mindepth 1 -type d | xargs -L1 bash -c 'if [ -e $0/Makefile ]; then make -C $0 clean; fi'}\n end",
"def remove_embed_frameworks_script_phase_from_embedded_targets\n return unless target.requires_host_target?\n native_targets.each do |native_target|\n if AggregateTarget::EMBED_FRAMEWORKS_IN_HOST_TARGET_TYPES.include? native_target.symbol_type\n TargetIntegrator.remove_embed_frameworks_script_phase_from_target(native_target)\n end\n end\n end",
"def add_missing_copy_phase!(dry_run: false)\n # Check if upgrade is needed\n # If fastlane copy files build phase exists already, we don't need any more changes to the Xcode project\n phase_copy_sign = self.fastlane_runner_target.copy_files_build_phases.select { |phase_copy| phase_copy.name == \"FastlaneRunnerCopySigned\" }.first\n\n old_phase_copy_sign = self.fastlane_runner_target.shell_script_build_phases.select { |phase_copy| phase_copy.shell_script == \"cd \\\"${SRCROOT}\\\"\\ncd ../..\\ncp \\\"${TARGET_BUILD_DIR}/${EXECUTABLE_PATH}\\\" .\\n\" }.first\n\n return true if dry_run && phase_copy_sign.nil?\n\n return false if dry_run\n\n # Proceed to upgrade\n old_phase_copy_sign.remove_from_project unless old_phase_copy_sign.nil?\n\n unless phase_copy_sign\n # Create a copy files build phase\n phase_copy_sign = self.fastlane_runner_target.new_copy_files_build_phase(\"FastlaneRunnerCopySigned\")\n phase_copy_sign.dst_path = \"$SRCROOT/../..\"\n phase_copy_sign.dst_subfolder_spec = \"0\"\n phase_copy_sign.run_only_for_deployment_postprocessing = \"0\"\n targetBinaryReference = self.fastlane_runner_target.product_reference\n phase_copy_sign.add_file_reference(targetBinaryReference)\n\n # Set \"Code sign on copy\" flag on Xcode for fastlane_runner_target\n targetBinaryReference.build_files.each { |target_binary_build_file_reference|\n target_binary_build_file_reference.settings = { \"ATTRIBUTES\": [\"CodeSignOnCopy\"] }\n }\n end\n\n target_project.save\n end",
"def set_run_script_to_always_run_when_no_input_or_output_files_exist(project:)\n project.targets.each do |target|\n run_script_build_phases = target.build_phases.filter { |phase| phase.is_a?(Xcodeproj::Project::Object::PBXShellScriptBuildPhase) }\n cocoapods_run_script_build_phases = run_script_build_phases.filter { |phase| phase.name.start_with?(\"[CP\") }\n cocoapods_run_script_build_phases.each do |run_script|\n next unless (run_script.input_paths || []).empty? && (run_script.output_paths || []).empty?\n run_script.always_out_of_date = \"1\"\n end\n end\n project.save\nend",
"def remove_static_framework_duplicate_linkage(static_framework_pods)\n puts \"Removing duplicate linkage of static frameworks\"\n\n Dir.glob(File.join(PODS_TARGET_SUPPORT_FILES_DIR, \"Pods-*\")).each do |path|\n pod_target = path.split('-', -1).last\n\n static_framework_pods.each do |target, pods|\n next if pod_target == target\n frameworks = pods.map { |pod| identify_frameworks(pod) }.flatten\n\n Dir.glob(File.join(path, \"*.xcconfig\")).each do |xcconfig|\n lines = File.readlines(xcconfig)\n\n if other_ldflags_index = lines.find_index { |l| l.start_with?('OTHER_LDFLAGS') }\n other_ldflags = lines[other_ldflags_index]\n\n frameworks.each do |framework|\n other_ldflags.gsub!(\"-framework \\\"#{framework}\\\"\", '')\n end\n\n File.open(xcconfig, 'w') do |fd|\n fd.write(lines.join)\n end\n end\n end\n end\n end\nend",
"def remove_script_phase_from_target(native_target, phase_name)\n build_phase = native_target.shell_script_build_phases.find { |bp| bp.name && bp.name.end_with?(phase_name) }\n return unless build_phase.present?\n native_target.build_phases.delete(build_phase)\n end",
"def before_destroy\n # cwd: utunes_app\n logger.info(\"=======> before_destroy invoked!\")\n\n version_str = sprintf(\"%.2d\", version )\n bundle_title = \"hc12_v#{version_str}\"\n \n bundle_folder = \"lib/bundles\"\n bundle_name=\"build_\" + bundle_title\n bundle_fq_name = bundle_folder + \"/\" + bundle_name\n \n logger.info(\"rm -R #{bundle_fq_name}\")\n logger.info( %x[rm -R #{bundle_fq_name}] )\n \n end",
"def cleanUpDeploymentTargets(installer)\n installer.pods_project.targets.each do |target|\n target.build_configurations.each do |config|\n config.build_settings.delete 'IPHONEOS_DEPLOYMENT_TARGET'\n end\n end\nend",
"def delete_build_files\n %{find #{gem_home}/*/ext/ -maxdepth 1 -mindepth 1 -type d | xargs -I% make -C % clean}\n end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Removes a script phase from a native target by name
|
def remove_script_phase_from_target(native_target, phase_name)
build_phase = native_target.shell_script_build_phases.find { |bp| bp.name && bp.name.end_with?(phase_name) }
return unless build_phase.present?
native_target.build_phases.delete(build_phase)
end
|
[
"def remove_copy_xcframeworks_script_phase_from_target(native_target)\n remove_script_phase_from_target(native_target, COPY_XCFRAMEWORKS_PHASE_NAME)\n end",
"def remove_embed_frameworks_script_phase_from_target(native_target)\n remove_script_phase_from_target(native_target, EMBED_FRAMEWORK_PHASE_NAME)\n end",
"def remove_copy_resources_script_phase_from_target(native_target)\n build_phase = native_target.shell_script_build_phases.find { |bp| bp.name && bp.name.end_with?(COPY_PODS_RESOURCES_PHASE_NAME) }\n return unless build_phase.present?\n native_target.build_phases.delete(build_phase)\n end",
"def unpin_script(script); end",
"def unpin_script(script)\n devtools.runtime.evaluate(expression: script.remove)\n devtools.page.remove_script_to_evaluate_on_new_document(identifier: script.devtools_identifier)\n pinned_scripts.delete(script)\n end",
"def remove_embed_frameworks_script_phase_from_embedded_targets\n return unless target.requires_host_target?\n native_targets.each do |native_target|\n if AggregateTarget::EMBED_FRAMEWORKS_IN_HOST_TARGET_TYPES.include? native_target.symbol_type\n TargetIntegrator.remove_embed_frameworks_script_phase_from_target(native_target)\n end\n end\n end",
"def remove_target(target)\n @cache[\"targets\"].delete(target)\n end",
"def remove_command(name)\n @embedded_commands.delete(name.to_sym)\n end",
"def delete_script(script)\n send_command('DELETESCRIPT', sieve_name(script))\n end",
"def remove()\n CCProcess.start(\"sdk-manage --tooling --remove '#{@name}'\", (_ :removing_tooling) + \" #{@name}\", 60*15)\n @@toolings.delete(@name)\n end",
"def remove(plugin); end",
"def unload name\n raise \"Cannot unload: No such script #{name}\" unless @scripts.has_key? name\n\n @scripts[name].die if @scripts[name].respond_to? \"die\"\n\n @scripts[name].unregister_script\n @scripts[name].unregister_commands\n @scripts[name].unregister_events\n\n @scripts.delete name\n end",
"def unload name\n raise \"Cannot unload: No such script #{name}\" unless @scripts.key? name\n\n @scripts[name].die if @scripts[name].respond_to? \"die\"\n\n Events.dispatch_for @scripts[name], :unloading\n\n Events.delete_for @scripts[name]\n URL.delete_for @scripts[name] if defined? MODULE_LOADED_URL_HANDLER\n RegexHandlerManager.delete_for @scripts[name] if defined? MODULE_LOADED_REGEX_HANDLER\n\n @scripts[name].unregister_script\n @scripts[name].unregister_commands\n @scripts[name].unregister_events\n\n @scripts.delete name\n end",
"def remove_invocation(*names); end",
"def remove_checkout_source(name)\n root_name = Specification.root_name(name)\n checkout_sources.delete(root_name)\n end",
"def uninstall_early_script(directives)\n uninstall_script(directives, directive_name: :early_script)\n end",
"def undefine plugin_name\n undefine_plugin_class_methods plugin_name\n Nark::Plugin.remove_trigger plugin_name\n Nark::Plugin.send :remove_const, plugin_name.to_s.camelize.to_sym\n end",
"def create_or_update_shell_script_build_phase(native_target, script_phase_name, show_env_vars_in_log = '0')\n build_phases = native_target.build_phases.grep(Xcodeproj::Project::Object::PBXShellScriptBuildPhase)\n build_phases.find { |phase| phase.name && phase.name.end_with?(script_phase_name) }.tap { |p| p.name = script_phase_name if p } ||\n native_target.project.new(Xcodeproj::Project::Object::PBXShellScriptBuildPhase).tap do |phase|\n UI.message(\"Adding Build Phase '#{script_phase_name}' to project.\") do\n phase.name = script_phase_name\n unless show_env_vars_in_log.nil?\n phase.show_env_vars_in_log = show_env_vars_in_log\n end\n native_target.build_phases << phase\n end\n end\n end",
"def remove_hook_for(*names); end"
] |
{
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.