query
stringlengths
9
9.05k
document
stringlengths
10
222k
negatives
sequencelengths
19
20
metadata
dict
Set appointment's end time; Create new appointment and rendere result
def appointment_end_time(end_time): session.attributes['end_time'] = str(end_time) form = AppointmentForm(session.attributes) form.submit() return render_result(form)
[ "def appointment_time(begin_time):\n\n session.attributes['begin_time'] = str(begin_time)\n msg = render_template('end_date')\n return question(msg)", "def appointment_end_date(end_date):\n\n session.attributes['end_date'] = str(end_date)\n msg = render_template('end_time')\n return question(msg)", "def main_calendar_appointer(start_datetime,end_datetime, doctor_num, patient_num):\r\n service = token_decider(doctor_num)\r\n calendar_summary = \"Patient Appointment\"\r\n id = id_checker(service,calendar_summary)\r\n\r\n time_start = \"{}-{}-{}T{}:{}:00\".format(start_datetime.year, start_datetime.month, start_datetime.day, start_datetime.hour, start_datetime.minute)\r\n time_end = \"{}-{}-{}T{}:{}:00\".format(end_datetime.year, end_datetime.month, end_datetime.day, end_datetime.hour, end_datetime.minute)\r\n print(time_start)\r\n print(time_end)\r\n\r\n event = {\r\n 'summary': 'Patient appointment',\r\n 'location': 'SmartOffice',\r\n 'description': 'Medical appointment with patient no.{}'.format(patient_num),\r\n 'start': {\r\n 'dateTime': time_start,\r\n 'timeZone': 'Australia/Melbourne',\r\n },\r\n 'end': {\r\n 'dateTime': time_end,\r\n 'timeZone': 'Australia/Melbourne',\r\n }\r\n }\r\n event_= event\r\n\r\n event = service.events().insert(calendarId=id, body=event).execute()\r\n print('Event created: {}'.format(event.get('htmlLink')))\r\n\r\n # Print out latest 10 events\r\n event_checker(id,service)\r\n return event_", "def create_appointment():\n\n msg = render_template('date')\n return question(msg)", "def editAppointment(request, pk):\n user = request.user\n appointment = Appointment.objects.filter(id=pk)[0]\n\n if user.enduser.isDoctor and (appointment.doctorID == user.enduser.id):\n pass\n elif user.enduser.isSecretary:\n pass\n elif user.enduser.isNurse and (appointment.begins - timezone.now() < datetime.timedelta(days = 7)):\n pass\n else:\n return render(request, 'cal/invalid.html')\n\n form = editAppointmentForm( initial={'length' : appointment.length, 'location' : appointment.location,\n 'date' : datetime.date(appointment.begins.year, appointment.begins.month, appointment.begins.day),\n 'begins' : datetime.time(appointment.begins.hour, appointment.begins.minute)})\n if request.method == \"POST\":\n form = editAppointmentForm(request.POST)\n if form.is_valid():\n begins=form.cleaned_data['begins']\n date=form.cleaned_data['date']\n fullTime = datetime.datetime(date.year, date.month, date.day, begins.hour, begins.minute)\n length=form.cleaned_data['length']\n location=getHospitalID(user.username)\n\n appointment.begins = fullTime\n appointment.length = length\n appointment.location = location\n appointment.day = fullTime.day\n appointment.month = fullTime.month\n appointment.year = fullTime.year\n appointment.save()\n return redirect('/appointments/' + str(appointment.id))\n return render(request, 'cal/editAppointment.html', {'appointment' : appointment, 'form' : form,\n 'doctor' : endUser.objects.filter(id=appointment.doctorID)[0],\n 'patient' : endUser.objects.filter(id=appointment.patientID)[0],\n 'user' : user})", "def makeAppointment(request):\n user = request.user\n if user.enduser.isPatient:\n return render(request, 'cal/invalid.html')\n elif user.enduser.isDoctor:\n pass\n elif user.enduser.isSecretary:\n pass\n elif user.enduser.isNurse:\n pass\n patients = endUser.objects.filter(isPatient=True)\n doctors = endUser.objects.filter(isDoctor=True)\n\n if request.method == 'POST':\n form = makeAppointmentForm(request.POST)\n if form.is_valid():\n begins=form.cleaned_data['begins']\n date=form.cleaned_data['date']\n fullTime = datetime.datetime(date.year, date.month, date.day, begins.hour, begins.minute)\n length=form.cleaned_data['length']\n location=getHospitalID(user.username)\n patientID=form.cleaned_data['patientID']\n doctorID=form.cleaned_data['doctorID']\n\n\n ap = Appointment.objects.create(begins=fullTime, patientID=patientID, doctorID=doctorID, length=length,\n location=location, day=fullTime.day, month=fullTime.month, year = fullTime.year)\n ap.save()\n return redirect('/cal')\n else:\n return render(request, 'cal/makeAppointment.html', {'form':form, 'patients':patients, 'doctors':doctors})\n\n form = makeAppointmentForm()\n context = {\n 'form' : form,\n 'patients' : patients,\n 'doctors' : doctors\n }\n return render(request, 'cal/makeAppointment.html', context)", "def appointment_date(begin_date):\n\n session.attributes['begin_date'] = str(begin_date)\n qs = render_template('time')\n return question(qs)", "def test_post_appointments(self, _mock_email, _mock_event):\n add_user()\n db.session.commit()\n user = User.query.first()\n user_public_id = user.public_id\n add_event(user.id, create_availability())\n db.session.commit()\n event = Event.query.first()\n event_url = event.url\n start = '2020-03-20T08:30:00Z'\n comments = \"I don't know about this appointment man...\"\n name = 'Little Timmy'\n email = 'little@timmy.com'\n\n route = f'/users/{user_public_id}/events/{event_url}/appointments'\n request = create_appointment_json(start=start,\n comments=comments,\n name=name,\n email=email)\n response = self.api.post(route,\n data=request,\n content_type='application/json')\n\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 201)\n self.assertEqual(data['message'], 'success')\n\n appointment = db.session.query(Appointment).\\\n filter(User.public_id == user_public_id,\n Event.url == event_url,\n Appointment.start == start).\\\n first()\n self.assertEqual(appointment.comments, comments)\n\n participant = appointment.participants[0]\n self.assertEqual(participant.name, name)\n self.assertEqual(participant.email, email)", "def add_apmt(self):\n print('')\n month = input('What month? ')\n day = input('What day? ')\n hour = input('What hour? ')\n minute = input('What minute')\n message = input('What is the appointment? ')\n self.my_planner.add_appointment(month, day, hour, minute, message)\n print('')\n return True", "def create_appt(data):\r\n appt = objectify.Element(\"appointment\")\r\n appt.begin = data[\"begin\"]\r\n appt.uid = data[\"uid\"]\r\n appt.alarmTime = data[\"alarmTime\"]\r\n appt.state = data[\"state\"]\r\n appt.location = data[\"location\"]\r\n appt.duration = data[\"duration\"]\r\n appt.subject = data[\"subject\"]\r\n return appt", "def book_new_appointment(self, play_init=True, client=None):\n\n print 'booking new appt'\n # Ask the user when they would like the appointment\n if play_init:\n\n self.speech.play_audio(join(self.audio_vocab_dir, 'appointment_management/when_would_appt.mp3'))\n\n # Start to listen for user speech\n user_speech = self.speech.listen(method_before_speech_analysis=self.speech.play_audio,\n args=join(self.audio_vocab_dir, 'appointment_management/okay_1_second_check_time_available.mp3'))\n\n print \"Obtained requested booking date\"\n\n month, day, year, time_of_day = self.translator.get_date_time(user_speech)\n\n def is_valid_id(m, d, y, t):\n\n if m != None and d != None and y != None and t == None:\n return False, 'time'\n\n elif m == None or d == None or y == None or t == None:\n return False, None\n time_minutes = 30 if '0.5' in str(time_of_day) else 0\n dt = datetime.datetime(int(year), m + 1, day, int(time_of_day), time_minutes)\n timestamp = time.mktime(dt.timetuple())\n if timestamp < time.time():\n print \"This time is in the past\"\n return False, None\n return True, None\n\n print month, day, year, time_of_day\n\n if is_valid_id(month, day, year, time_of_day)[0] == False:\n if is_valid_id(month, day, year, time_of_day)[1] == 'time':\n self.speech.play_audio([join(self.audio_vocab_dir, 'new_profile/enhanced/time_of_day.mp3'),\n join(self.audio_vocab_dir, 'new_profile/enhanced/sorry_i_couldnt_get_your.mp3')])\n else:\n self.speech.play_audio(join(self.audio_vocab_dir, 'personal/didnt_understand.mp3'))\n self.speech.play_audio(join(self.audio_vocab_dir, 'appointment_management/when_would_appt.mp3'))\n self.book_new_appointment(False, client=client)\n\n elif self._check_slot_availability(month, day, year, time_of_day)[0] is True:\n # Time slot is free\n self._book_appointment_in_db(month, day, year, time_of_day, client=client)\n # Will return its own check. Just need to do confirmation\n self._booking_confirmation(month, day, year, time_of_day, client=client)\n return 'successfully booked'\n else:\n # Cannot book appointment\n cannot_book = join(self.speech.audio_vocab_dir, 'appointment_management/time_slot_already_booked.mp3')\n print \"Time slot already booked\"\n self.speech.microphone_mgr.commands.put({'Command':'Start', 'Wav_path':cannot_book})\n self.speech.play_audio(cannot_book)\n\n self.book_new_appointment(False, client=client)", "def set_available_time_slot():\n if request.content_type != 'application/json':\n error = json.dumps({'error': 'Invalid Content Type'})\n return make_response(error, 400, InterviewCalendarApi.HEADERS)\n\n data = request.json\n # For Temporary purpose, stored in flat file database\n with open(InterviewCalendarApi.DB_FILE, \"a+\") as fd:\n record = \"%s|%s|%s|%s\\n\" %(data[\"Category\"], data[\"Name\"],\n data[\"Email\"], \",\".join(data[\"AvailablityDateTime\"]))\n fd.write(record)\n msg = json.dumps({\"Status\": \"Success\"})\n return make_response(msg, 200, InterviewCalendarApi.HEADERS)", "def add_end_tournament(self):\n self.end_date = datetime.now()\n return convert_date(self.end_date)", "def set_end_time(td, end_time):\n\n td.setEndTime(end_time)", "def add_appointment(self, appointment):\n\n if len(self.appointments) < self.max_appointments:\n\n uuid = uuid4().hex\n self.appointments[uuid] = {\"locator\": appointment.locator, \"end_time\": appointment.end_time}\n\n if appointment.locator in self.locator_uuid_map:\n self.locator_uuid_map[appointment.locator].append(uuid)\n\n else:\n self.locator_uuid_map[appointment.locator] = [uuid]\n\n self.db_manager.store_watcher_appointment(uuid, appointment.to_json())\n self.db_manager.create_append_locator_map(appointment.locator, uuid)\n\n appointment_added = True\n signature = Cryptographer.sign(appointment.serialize(), self.signing_key)\n\n logger.info(\"New appointment accepted\", locator=appointment.locator)\n\n else:\n appointment_added = False\n signature = None\n\n logger.info(\"Maximum appointments reached, appointment rejected\", locator=appointment.locator)\n\n return appointment_added, signature", "def create_appointment():\n\n form = AppointmentForm()\n\n if form.validate_on_submit():\n\n appointment = Appointment(\n title = form.title.data,\n description = form.description.data,\n location = form.location.data,\n start = form.start.data,\n client = form.client.data,\n user = current_user\n )\n\n try:\n db.session.add(appointment)\n db.session.commit()\n\n flash('Successfully created the appointment.')\n\n return redirect(url_for('appointment.read_appointments'))\n except:\n flash('Error creating the appointment')\n\n return render_template('appointments/form.html.j2', form=form, title='Create appointment')", "def create_meeting(self, request, *args, **kwargs):\n from datetime import datetime, timezone, timedelta\n from django.db.models import Min, F\n from core.users.models import HospitalUser\n from core.users.models import DoctorWorkSchedule\n\n meetings = Meeting.objects.filter(\n user_id=request.user.pk,\n end_time__gte=datetime.now(timezone.utc)\n )\n if meetings:\n return Response({'error': 'You has already created meeting room'}, status=status.HTTP_400_BAD_REQUEST)\n meetings = Meeting.objects.filter(\n end_time__gte=datetime.now(timezone.utc)\n )\n meetings = meetings.values('doctor_id').annotate(\n last_meeting=Min(F('end_time'))\n ).order_by('last_meeting')\n all_doctors = HospitalUser.objects.filter(\n role=\"doctor\"\n )\n tmp_schedules = {}\n for doctor in all_doctors:\n tmp_schedules.update(\n {doctor.pk: {\"data\": None, \"doctor_id\": doctor.pk}}\n )\n for meeting in meetings:\n tmp_schedules.update(\n {meeting['doctor_pk']: {\"data\": meeting, \"doctor_id\": meeting['doctor_pk'], }}\n )\n available_schedules = []\n for tmp_schedule in tmp_schedules.values():\n if tmp_schedule[\"data\"]:\n schedules = DoctorWorkSchedule.objects.filter(user__pk=tmp_schedule['doctor_id'], end_time__gte=tmp_schedule['data']['last_meeting']).order_by('end_time')\n for schedule in schedules:\n if tmp_schedule['data']['start_time'] < datetime.now(timezone.utc) and datetime.now(timezone.utc) + timedelta(minutes=MEETING_DURATION) < tmp_schedule['data']['end_time']:\n start_time = datetime.now(timezone.utc)\n end_time = datetime.now(timezone.utc) + timedelta(minutes=MEETING_DURATION)\n available_schedules.append({\"doctor\": schedule.user, \"start_time\": start_time, \"end_time\": end_time})\n elif tmp_schedule['data']['start_time'] > datetime.now(timezone.utc) and tmp_schedule['data']['start_time'] + timedelta(minutes=MEETING_DURATION) < tmp_schedule['data']['end_time']:\n start_time = tmp_schedule['data']['start_time']\n end_time = tmp_schedule['data']['start_time'] + timedelta(minutes=MEETING_DURATION)\n available_schedules.append({\"doctor\": schedule.user, \"start_time\": start_time, \"end_time\": end_time})\n else:\n schedules = DoctorWorkSchedule.objects.filter(user__pk=tmp_schedule['doctor_id'], end_time__gte=datetime.now(timezone.utc)).order_by('end_time')\n for schedule in schedules:\n if schedule.start_time < datetime.now(timezone.utc) and datetime.now(timezone.utc) + timedelta(minutes=MEETING_DURATION) < schedule.end_time:\n start_time = datetime.now(timezone.utc)\n end_time = datetime.now(timezone.utc) + timedelta(minutes=MEETING_DURATION)\n available_schedules.append({\"doctor\": schedule.user, \"start_time\": start_time, \"end_time\": end_time})\n elif schedule.start_time > datetime.now(timezone.utc) and schedule.start_time + timedelta(minutes=MEETING_DURATION) < schedule.end_time:\n start_time = schedule.start_time\n end_time = schedule.start_time + timedelta(minutes=MEETING_DURATION)\n available_schedules.append({\"doctor\": schedule.user, \"start_time\": start_time, \"end_time\": end_time})\n\n if not available_schedules:\n return Response({'error': 'All doctors are buzy'}, status=status.HTTP_400_BAD_REQUEST)\n\n available_schedules = sorted(available_schedules, key=lambda x: x['start_time'])\n\n #get parameters\n data = self.request.data\n user_id = request.user.pk\n \n client = ZoomClient(ZOOM_API_KEY, ZOOM_API_SECRET)\n\n user_list_response = client.user.list()\n user_list = json.loads(user_list_response.content.decode(\"utf-8\"))\n\n for user in user_list['users']:\n zoom_user_id = user['id']\n\n data['user_id'] = zoom_user_id\n data['start_time'] = start_time\n data['duration'] = MEETING_DURATION\n zoom_meeting_data = data\n\n try:\n #create meeting on zoomus\n response = client.meeting.create(**zoom_meeting_data)\n zoom_meeting = json.loads(client.meeting.create(**zoom_meeting_data).content.decode(\"utf-8\"))\n print(zoom_meeting)\n #create meeting in database\n zoom_meeting['zoomus_meeting_id'] = zoom_meeting.pop('id')\n # zoom_meeting['unit_id'] = unit_id\n zoom_meeting['user_id'] = user_id\n zoom_meeting['end_time'] = end_time\n zoom_meeting.pop('type')\n zoom_meeting.pop('settings')\n \n print(Meeting.objects.all())\n meeting = Meeting.objects.create(**zoom_meeting)\n print(\"meeting\", meeting)\n except Exception as error:\n print(error)\n return Response({'error': self.CREATE_ERROR}, status=status.HTTP_400_BAD_REQUEST)\n\n data = MeetingSerializer(instance=meeting).data\n return Response(data, status=status.HTTP_201_CREATED)", "def test_update_appointment(self):\n add_user()\n db.session.commit()\n user = User.query.first()\n user_public_id = user.public_id\n add_event(user.id, create_availability())\n db.session.commit()\n event = Event.query.first()\n event_url = event.url\n start = dt.datetime.now(dt.timezone.utc) + dt.timedelta(days=30)\n add_appointment(event_id=event.id,\n participants=[create_participant()],\n start=start,\n status=True)\n db.session.commit()\n\n route = f'/users/{user_public_id}/events/{event_url}/appointments/' \\\n f'{start.isoformat()}'\n status = False\n response = self.api.patch(route,\n data=json.dumps({'status': status}),\n content_type='application/json')\n\n self.assertEqual(response.status_code, 200)\n\n data = json.loads(response.data.decode())\n self.assertEqual(data['message'], 'success')\n\n appointment = Appointment.query.first()\n self.assertEqual(appointment.status, status)", "def create_patient_appointment():\n if request.method == 'POST':\n patient_email = request.form['patient_email']\n doctor_email = request.form['doctor_email']\n date = request.form['date']\n time = request.form['time']\n\n response = requests.post(server_url + 'patient/create_appointment', json={\n 'patient_email': patient_email,\n 'doctor_email': doctor_email,\n 'date': date,\n 'time': time\n })\n\n response = response.json()\n\n if response.get('Status') == \"DOCTOR_HAS_AN_APPOINTMENT_SELECTED_TIME_SLOT\":\n return render_template('patients/appointment_failed.html')\n elif response.get('Status') == \"DOCTOR_IS_NOT_AVAILABLE_AT_THAT_TIME\":\n return render_template('patients/appointment_failed.html')\n elif response.get('Status') == \"INVALID_PATIENT_EMAIL\":\n return render_template('patients/appointment_failed.html')\n elif response.get('Status') == \"INVALID_DOCTOR_EMAIL\":\n return render_template('patients/appointment_failed.html')\n else:\n referer = request.referrer\n return redirect(referer, code=302)\n else:\n return render_template('patients/dashboard.html')" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Significant duration model by Abrahamson and Silva (1996) Empirical ground motion models, report prepared for Brookhaven National Laboratory. Input
def abrahamson_silva_ds_1999(magnitude=7.0,distance=10.0,soil=True,duration_type='DS575H'): # map the duration_type to integer key dur_map = {'DS575H': 0, 'DS575V': 1, 'DS595H': 2, 'DS595V': 3} dur_tag = dur_map.get(duration_type.upper(),None) if dur_tag is None: print("SignificantDurationModel.abrahamson_silva_ds_1999: duration_type='DS575H','DS575V','DS595H','DS595V'?") return None, None # modeling coefficients beta = [3.2, 3.2, 3.2, 3.2] b1 = [5.204, 4.610, 5.204, 4.610] b2 = [0.851, 1.536, 0.851, 1.536] m_star = [6, 6, 6, 6] c1 = [0.805, 1.076, 0.805, 1.076] c2 = [0.063, 0.107, 0.063, 0.107] rc = [10, 10, 10, 10] Drat = [0.000, 0.000, 0.845, 0.646] sigma = [0.55, 0.46, 0.49, 0.45] # median if distance > rc[dur_tag]: ds_median = np.exp(np.log((np.exp(b1[dur_tag]+b2[dur_tag]* \ (magnitude-m_star[dur_tag]))/(10**(1.5*magnitude+ \ 16.05)))**(-1/3)/(4.9e6*beta[dur_tag])+soil* \ c1[dur_tag]+c2[dur_tag]*(distance-rc[dur_tag]))+Drat[dur_tag]) else: ds_median = np.exp(np.log((np.exp(b1[dur_tag]+b2[dur_tag]* \ (magnitude-m_star[dur_tag]))/(10**(1.5*magnitude+ \ 16.05)))**(-1/3)/(4.9e6*beta[dur_tag])+soil* \ c1[dur_tag])+Drat[dur_tag]) # sigma ds_sigma = sigma[dur_tag] # return return np.log(ds_median), ds_sigma
[ "def residence_time(self):\r\n if self.model == \"exponential\":\r\n return self.model_params[1] / np.log(2)\r\n elif self.model == \"biexponential\":\r\n fraction1 = self.model_params[1]\r\n return (\r\n (fraction1 * self.model_params[2])\r\n + ((1.0 - fraction1) * self.model_params[3])\r\n ) / np.log(2)", "def model(params, t):\n Mc, E51, Re, Me = params\n\n kappa = 0.2\n\n # The time to peak luminosity, from Nakar & Piro 2014\n tp_day = 0.9 * (kappa/0.34)**(0.5) * \\\n E51**(-0.25) * (Mc)**(0.17) * (Me/0.01)**0.57\n tp = tp_day * 86400\n\n # The velocity in the extended material (cm/s)\n ve = 2E9 * E51**0.5 * Mc**(-0.35) * (Me/0.01)**(-0.15)\n\n # the expansion timescale in seconds\n te = Re/ve\n\n # The amount of energy passed into the extended material (erg)\n Ee = 4E49 * E51 * Mc**(-0.7) * (Me/0.01)**0.7\n\n # The light curve\n t_sec = t*86400\n L = (te*Ee/tp**2) * np.exp(-(t_sec*(t_sec+2*te))/(2*tp**2))\n\n # Return time in days\n return t, L\n\n\n # set parameters\n B = 1E14\n Pms = 20\n\n tm = 4 * B14**(-2) * Pms**2\n Lm = (Em/tm)/(1+t/tm)**2", "def time_dep_sens(self, exp_age, inf_age):\r\n # Convert exposure/infection age to days\r\n days_exp = exp_age/self.n_time_day\r\n days_inf = inf_age/self.n_time_day\r\n\r\n # Uses Chang et al's approximation (2020, Health Care Management Science)\r\n # of Kucirka et al's results (2020, Annals of Internal Medicine)\r\n if self.time_dep_type=='K':\r\n # Combine exposed + infectious time since model is for days since exposure\r\n days_since_exp = days_exp + days_inf - 1/self.n_time_day\r\n # Compute sensitivity\r\n if days_since_exp == 0:\r\n sens = 0\r\n elif days_since_exp <=21:\r\n sens = sigmoid(-29.966+37.713*np.log(days_since_exp)-14.452*np.power(np.log(days_since_exp), 2)+1.721*np.power(np.log(days_since_exp), 3))\r\n else:\r\n sens = sigmoid(6.878-2.436*np.log(days_since_exp))\r\n return sens\r\n\r\n # Uses nasal swab results from Wikramaratna et al (2020 Euro Surveillance)\r\n else:\r\n # From Wikramaratna supplemental materials with two entries prepended\r\n # First two entries are mirror of third and fourth entries and are designed\r\n # to account for time before symptom onset\r\n sens = [0.945897051, 0.956035396, 0.964345916, 0.956035396, 0.945897051,\r\n 0.933584589, 0.918713483, 0.900871215, 0.879635065, 0.854598785,\r\n 0.8254113590000001, 0.791827558, 0.753761722, 0.711341394, 0.664950473,\r\n 0.615247128, 0.563146104, 0.509754778, 0.456274387, 0.40390623000000003,\r\n 0.35374840399999996, 0.30670959900000005, 0.26345573099999997,\r\n 0.22439299899999998, 0.18968316500000004, 0.159281275, 0.132984471,\r\n 0.11048262799999997, 0.091403491, 0.07535069699999997, 0.061931225000000034,\r\n 0.05077403700000005, 0.041540205, 0.033927078000000055]\r\n sens_dict = dict(zip(range(len(sens)), sens))\r\n\r\n if days_inf!=0:\r\n if np.round(days_inf-1) in sens_dict.keys():\r\n return sens_dict[np.round(days_inf-1)]\r\n else:\r\n # Covers the edge case where someone is infectious for a really long time\r\n return 0\r\n else:\r\n # No probability of detection when exposed under this model\r\n return 0", "def calculate_time_of_simulation(self, model):\n return 1.0 * u.erg / self.calculate_luminosity_inner(model)", "def main():\n\n #\n # Generate waveform\n #\n\n print 'generating waveoform...'\n waveform = pmns_utils.Waveform('shen_135135_lessvisc')\n\n # Pick some extrinsic parameters\n ext_params = ExtParams(distance=1, ra=0.0, dec=0.0, polarization=0.0,\n inclination=0.0, phase=0.0, geocent_peak_time=0.0+5.0)\n\n # Construct the time series for these params\n waveform.make_wf_timeseries(theta=ext_params.inclination,\n phi=ext_params.phase)\n\n #\n # Generate IFO data\n #\n det1_data = DetData(waveform=waveform, ext_params=ext_params)\n\n from scipy import signal\n import pylab as pl\n\n pl.figure()\n pl.plot(det1_data.td_response.sample_times,det1_data.td_response.data)\n pl.plot(det1_data.td_signal.sample_times,det1_data.td_signal.data)\n\n pl.figure()\n f,p = signal.welch(det1_data.td_response.data, fs=1./det1_data.delta_t,\n nperseg=512)\n pl.loglog(f,np.sqrt(p))\n\n f,p = signal.welch(det1_data.td_signal.data, fs=1./det1_data.delta_t,\n nperseg=512)\n pl.loglog(f,np.sqrt(p))\n pl.ylim(1e-25,1e-21)\n pl.show()", "def radiation_measurement_analysis():\n import pint\n ureg = pint.UnitRegistry()\n\n mrem_h = ureg.parse_units('mrem') / ureg.hour\n m = ureg.parse_units('meters')\n s = ureg.parse_units('seconds')\n\n # Measurements of background radiation\n bg_dist = ureg.parse_expression('10 m') # estimate of how far away we are wrt background\n background_rows = [\n dict(vid=1, distance=bg_dist, rad=0.023 * mrem_h, capture_time=0.0 * s),\n dict(vid=1, distance=bg_dist, rad=0.022 * mrem_h, capture_time=0.0 * s),\n dict(vid=1, distance=bg_dist, rad=0.023 * mrem_h, capture_time=4.0 * s),\n dict(vid=1, distance=bg_dist, rad=0.021 * mrem_h, capture_time=5.0 * s),\n dict(vid=1, distance=bg_dist, rad=0.023 * mrem_h, capture_time=11.0 * s),\n dict(vid=1, distance=bg_dist, rad=0.023 * mrem_h, capture_time=16.0 * s),\n dict(vid=1, distance=bg_dist, rad=0.024 * mrem_h, capture_time=20.0 * s),\n ]\n\n # Measurements of sample radiation\n esp_dist = ureg.parse_expression('1 inch').to(m) / 2 # estimate of how far we are from the sample when very close\n dist0_rows = [\n dict(vid=2, distance=esp_dist, rad=0.060 * mrem_h, capture_time=0.0 * s),\n dict(vid=2, distance=esp_dist, rad=0.061 * mrem_h, capture_time=3.0 * s),\n dict(vid=2, distance=esp_dist, rad=0.060 * mrem_h, capture_time=5.0 * s),\n dict(vid=2, distance=esp_dist, rad=0.059 * mrem_h, capture_time=9.0 * s),\n dict(vid=2, distance=esp_dist, rad=0.060 * mrem_h, capture_time=10.0 * s),\n dict(vid=2, distance=esp_dist, rad=0.059 * mrem_h, capture_time=11.0 * s),\n dict(vid=2, distance=esp_dist, rad=0.057 * mrem_h, capture_time=12.0 * s),\n dict(vid=2, distance=esp_dist, rad=0.058 * mrem_h, capture_time=13.0 * s),\n dict(vid=2, distance=esp_dist, rad=0.059 * mrem_h, capture_time=14.0 * s),\n dict(vid=2, distance=esp_dist, rad=0.060 * mrem_h, capture_time=15.0 * s),\n dict(vid=2, distance=esp_dist, rad=0.061 * mrem_h, capture_time=16.0 * s),\n dict(vid=2, distance=esp_dist, rad=0.062 * mrem_h, capture_time=18.0 * s),\n dict(vid=2, distance=esp_dist, rad=0.062 * mrem_h, capture_time=18.0 * s),\n dict(vid=2, distance=esp_dist, rad=0.064 * mrem_h, capture_time=20.0 * s),\n dict(vid=2, distance=esp_dist, rad=0.065 * mrem_h, capture_time=22.0 * s),\n dict(vid=2, distance=esp_dist, rad=0.066 * mrem_h, capture_time=23.0 * s),\n dict(vid=2, distance=esp_dist, rad=0.065 * mrem_h, capture_time=24.0 * s),\n dict(vid=2, distance=esp_dist, rad=0.063 * mrem_h, capture_time=25.0 * s),\n dict(vid=2, distance=esp_dist, rad=0.065 * mrem_h, capture_time=26.0 * s),\n dict(vid=2, distance=esp_dist, rad=0.064 * mrem_h, capture_time=27.0 * s),\n dict(vid=2, distance=esp_dist, rad=0.064 * mrem_h, capture_time=27.0 * s),\n dict(vid=2, distance=esp_dist, rad=0.065 * mrem_h, capture_time=28.0 * s),\n dict(vid=2, distance=esp_dist, rad=0.063 * mrem_h, capture_time=30.0 * s),\n ]\n\n dist0_v2_rows = [\n dict(vid=3, distance=esp_dist, rad=0.012 * mrem_h, capture_time=0.0 * s),\n dict(vid=3, distance=esp_dist, rad=0.011 * mrem_h, capture_time=1.0 * s),\n dict(vid=3, distance=esp_dist, rad=0.013 * mrem_h, capture_time=8.0 * s),\n dict(vid=3, distance=esp_dist, rad=0.013 * mrem_h, capture_time=9.0 * s),\n ]\n\n close_rows = [\n dict(vid=4, distance=0.5 * m, rad=0.013 * mrem_h, capture_time=0.0 * s),\n dict(vid=4, distance=0.5 * m, rad=0.014 * mrem_h, capture_time=5.0 * s),\n dict(vid=4, distance=0.5 * m, rad=0.012 * mrem_h, capture_time=7.0 * s),\n dict(vid=4, distance=0.5 * m, rad=0.011 * mrem_h, capture_time=15.0 * s),\n dict(vid=4, distance=0.5 * m, rad=0.012 * mrem_h, capture_time=16.0 * s),\n ]\n\n mid_rows = [\n dict(vid=5, distance=1.0 * m, rad=0.014 * mrem_h, capture_time=0.0 * s),\n dict(vid=5, distance=1.0 * m, rad=0.015 * mrem_h, capture_time=5.0 * s),\n dict(vid=5, distance=1.0 * m, rad=0.013 * mrem_h, capture_time=10.0 * s),\n ]\n\n far_rows = [\n dict(vid=6, distance=2.0 * m, rad=0.023 * mrem_h, capture_time=0.0 * s),\n dict(vid=6, distance=2.0 * m, rad=0.025 * mrem_h, capture_time=0.1 * s),\n ]\n\n # guess_dist = ureg.parse_expression('0.3 m') # estimate of how far away we are wrt background\n # guess_rows = [\n # dict(vid=9, distance=guess_dist, rad=0.030 * mrem_h, capture_time=0.0 * s),\n # dict(vid=9, distance=guess_dist, rad=0.041 * mrem_h, capture_time=2.0 * s),\n # dict(vid=9, distance=guess_dist, rad=0.051 * mrem_h, capture_time=3.0 * s),\n # ]\n\n rows = dist0_rows + background_rows + dist0_v2_rows + close_rows + mid_rows + far_rows\n # rows += guess_rows\n\n import pandas as pd\n import numpy as np\n table = pd.DataFrame(rows)\n\n # Ensure comparable units\n units = {\n 'rad': mrem_h,\n 'distance': m,\n 'capture_time': s,\n }\n for key, unit in units.items():\n table[key] = table[key].apply(lambda c: c.to(unit).m)\n table['rad'] = table['rad'].astype(float)\n table['distance'] = table['distance'].astype(float)\n\n # Weight each measurement based on the amount of time the measurement was\n # sustained in the video.\n average_rad_rows = []\n for vid, group in table.groupby('vid'):\n from statsmodels.stats.weightstats import DescrStatsW\n weights = (-1 * group['capture_time'].diff(periods=-1).fillna(0)) / group['capture_time'].iloc[-1]\n table.loc[group.index, 'weight'] = weights\n values = group['rad']\n weighted_stats = DescrStatsW(values, weights=weights, ddof=0)\n dists = group['distance'].unique()\n assert len(dists) == 1\n average_rad_rows.append({\n 'vid': vid,\n 'distance': dists[0],\n 'rad_mean': weighted_stats.mean,\n 'rad_std': weighted_stats.std,\n })\n stats_table = pd.DataFrame(average_rad_rows)\n\n bg_row = stats_table.loc[stats_table['distance'].argmax()]\n fg_row = stats_table.loc[stats_table['distance'].argmin()]\n\n # -------------------\n ADD_DUMMY_VALUES = 0\n if ADD_DUMMY_VALUES:\n # Hack: because we don't have enough samples we can fudge the value\n # knowning that the value should be the background radiation in the\n # limit.\n\n dummy_measurements = []\n extra_support = 1\n for idx in range(3, 3 + extra_support):\n dummy_row = {\n 'vid': -idx,\n 'distance': bg_row['distance'] + idx,\n 'rad_mean': bg_row['rad_mean'],\n 'rad_std': 0.01,\n }\n dummy_measurements.append(dummy_row)\n\n # also add an extra value close to the sample\n rad_bg = bg_row['rad_mean']\n rad_above_bg = fg_row['rad_mean'] - rad_bg\n dummy_row = {\n 'vid': -1,\n 'distance': fg_row['distance'] / 2,\n 'rad_mean': rad_bg + (rad_above_bg * 4),\n 'rad_std': 0.5,\n }\n dummy_measurements.append(dummy_row)\n\n # dummy_row = {\n # 'vid': -2,\n # 'distance': fg_row['distance'] / 4,\n # 'rad_mean': rad_bg + (rad_above_bg * 16),\n # }\n # dummy_measurements.append(dummy_row)\n\n dummy_stats = pd.DataFrame(dummy_measurements)\n dummy_stats['weight'] = 0.5\n stats_table['weight'] = 1.0\n stats_table2 = pd.concat([stats_table, dummy_stats]).reset_index(drop=True).sort_values('distance')\n else:\n stats_table2 = stats_table\n # -------------------\n\n import scipy\n scipy.optimize.curve_fit\n\n # Because we know the radiation should follow an inverse square law wrt to\n # distance, we can fit a polynomial of degree 2 (parabola) to interpolate /\n # extrapolate the **inverse** values.\n x = stats_table2['distance'].values\n y = stats_table2['rad_mean'].values\n s = stats_table2['rad_std'].values\n\n # Model the squared falloff directly\n def invsquare(x, a, b):\n return a * (1 / (0.01 + x ** 2)) + b\n # bg_row['rad_mean']\n # Use curve_fit to constrain the first coefficient to be zero\n try:\n coef = scipy.optimize.curve_fit(invsquare, x, y, sigma=s, method='trf')[0]\n except Exception as ex:\n coef = None\n print(f'ex={ex}')\n\n # Also fit one to the raw weighted points as a sanity check\n # inv_poly2 = Polynomial.fit(table['distance'], 1 / table['rad'], w=table['weight'], deg=2)\n\n import kwplot\n sns = kwplot.autosns()\n plt = kwplot.autoplt()\n # ax = sns.boxplot(data=table, x='distance', y='rad', width=0.1)\n\n # Add in points to show each observation\n ax = sns.relplot(x=\"distance\", y=\"rad\", data=table, size=4, color=\".3\",\n linewidth=0, alpha=0.5, palette='deep')\n\n ax = plt.gca()\n ax.set_xlabel('distance from sample ({})'.format(str(units['distance'])))\n ax.set_ylabel('radiation dosage ({})'.format(str(units['rad'])))\n\n max_meters = 10\n\n extrap_x = np.linspace(0, max_meters, 1000)\n if coef is not None:\n extrap_y1 = invsquare(extrap_x, *coef)\n # extrap_y2 = 1 / inv_poly2(extrap_x)\n ax.plot(stats_table2['distance'].values, stats_table2['rad_mean'].values, 'rx')\n ax.plot(stats_table['distance'].values, stats_table['rad_mean'].values, 'bo')\n ax.plot(extrap_x, extrap_y1, '--')\n ax.set_ylim(0.001, 0.1)\n ax.set_yscale('log')\n # ax.plot(extrap_x, extrap_y2, '--')", "def calculate_ucsc_exposure_time(vmag, precision, elevation, seeing, bmv, decker=\"W\"):\n vmag = np.array(vmag)\n precision = np.array(precision)\n bmv = np.array(bmv)\n precision = np.array(precision)\n\t\t\n\t# Now lets calculate the exposure times\n\t\n\t# Desired I2 counts for precision\n i2counts = getI2_K(precision)\n mstars = np.where(bmv > 1.2)\n if len(mstars) > 0:\n i2counts[mstars] = getI2_M(precision[mstars])\n\n # minimum I2 counts so exposures are not rejected by P. Butler's DRP\n mini2_idx = np.where(i2counts < MIN_I2)\n if len(mini2_idx) > 0:\n i2counts[mini2_idx] = MIN_I2\n\t\n\t# Exposure Meter counts to reach desired I2 counts\n exp_counts = getEXPMeter(i2counts, bmv)\n #\texp_counts = 1e9\n\t# Exposure time to reach desired I2 counts\n exp_time = getEXPTime(i2counts, vmag, bmv, elevation, seeing, decker=decker)\n\t\n return exp_time, exp_counts, i2counts", "def analyze(video, write_output=True, label=False, rate=False, model=None):\r\n # Initiate an empty list of tracked waves, ultimately recognized\r\n # waves, and a log of all tracked waves in each frame.\r\n tracked_waves = []\r\n recognized_waves = []\r\n wave_log = []\r\n ratings = []\r\n\r\n # Initialize frame counters.\r\n frame_num = 1\r\n num_frames = int(video.get(cv2.CAP_PROP_FRAME_COUNT))\r\n fps = int(video.get(cv2.CAP_PROP_FPS))\r\n\r\n # If an output video is to be made:\r\n if write_output is True:\r\n out = mwt_io.create_video_writer(video)\r\n\r\n # Initiate a timer for program performance:\r\n time_start = time.time()\r\n\r\n\r\n # The main loop is here:\r\n while True:\r\n\r\n # Write status update to stdio.\r\n status_update(frame_num, num_frames)\r\n\r\n # Read frames until end of clip.\r\n successful_read, original_frame = video.read()\r\n if not successful_read:\r\n break\r\n\r\n # Preprocess frames.\r\n analysis_frame = mwt_preprocessing.preprocess(original_frame)\r\n \r\n # Detect all sections.\r\n sections = mwt_detection.detect_sections(analysis_frame,\r\n frame_num,\r\n original_frame)\r\n\r\n # Track all waves in tracked_waves.\r\n mwt_tracking.track(tracked_waves,\r\n analysis_frame,\r\n frame_num,\r\n num_frames,\r\n original_frame)\r\n \r\n\r\n # Write tracked wave stats to wave_log.\r\n for wave in tracked_waves:\r\n wave_log.append((frame_num, wave.name, wave.mass, wave.max_mass,\r\n wave.displacement, wave.max_displacement,\r\n wave.birth, wave.death, wave.recognized,\r\n wave.centroid))\r\n\r\n # Remove dead waves from tracked_waves.\r\n dead_recognized_waves = [wave for wave in tracked_waves \r\n if wave.death is not None\r\n and wave.recognized is True]\r\n recognized_waves.extend(dead_recognized_waves)\r\n\r\n # Label the dead waves, if label flag was specified\r\n if label:\r\n mwt_label.label(dead_recognized_waves, fps, dead=True)\r\n\r\n # Rate the dead waves, if rate flag was specified\r\n if rate:\r\n mwt_rate.rate(ratings, dead_recognized_waves, model)\r\n\r\n tracked_waves = [wave for wave in tracked_waves if wave.death is None]\r\n\r\n # Remove duplicate waves, keeping earliest wave.\r\n tracked_waves.sort(key=lambda x: x.birth, reverse=True)\r\n for wave in tracked_waves:\r\n other_waves = [wav for wav in tracked_waves if not wav == wave]\r\n if mwt_tracking.will_be_merged(wave, other_waves):\r\n wave.death = frame_num\r\n tracked_waves = [wave for wave in tracked_waves if wave.death is None]\r\n tracked_waves.sort(key=lambda x: x.birth, reverse=False)\r\n\r\n # Check sections for any new potential waves and add to\r\n # tracked_waves.\r\n for section in sections:\r\n if not mwt_tracking.will_be_merged(section, tracked_waves):\r\n tracked_waves.append(section)\r\n\r\n # Label all current waves if label flag was specified\r\n if label:\r\n mwt_label.label(tracked_waves, fps)\r\n\r\n # Rate all current waves if rate flag was specified\r\n if rate:\r\n mwt_rate.rate(ratings, tracked_waves, model)\r\n\r\n # analysis_frame = cv2.cvtColor(analysis_frame, cv2.COLOR_GRAY2RGB)\r\n\r\n if write_output is True:\r\n # Draw detection boxes on original frame for visualization.\r\n original_frame = mwt_io.draw(\r\n tracked_waves,\r\n original_frame,\r\n #1)\r\n 1/mwt_preprocessing.RESIZE_FACTOR)\r\n\r\n # Write frame to output video.\r\n # out.write(original_frame)\r\n #out.write(analysis_frame)\r\n\r\n # Increment the frame count.\r\n frame_num += 1\r\n\r\n # Stop timer here and calc performance.\r\n time_elapsed = (time.time() - time_start)\r\n performance = (num_frames / time_elapsed)\r\n\r\n if rate:\r\n final_rating = mwt_rate.get_final_rating(ratings)\r\n print (\"Final rating for this video: {}\".format(final_rating))\r\n\r\n # Provide update to user here.\r\n if recognized_waves is not None:\r\n print (\"{} wave(s) recognized.\".format(len(recognized_waves)))\r\n print (\"Program performance: %0.1f frames per second.\" %performance)\r\n for i, wave in enumerate(recognized_waves):\r\n print (\"Wave #{}: ID: {}, Birth: {}, Death: {},\" \\\r\n + \" Max Displacement: {}, Max Mass: {}\".format(\r\n i+1, wave.name, wave.birth, wave.death,\r\n wave.max_displacement, wave.max_mass))\r\n else:\r\n print (\"No waves recognized.\")\r\n\r\n # Clean-up resources.\r\n if write_output is True:\r\n out.release()\r\n\r\n return recognized_waves, wave_log, performance", "def determine_incident_wave_values():\n v_1 = v00*((gi+1.0)*p0+(gi-1.0)*p1 + 2.0 * gi * p00_infty) / ((gi+1.0)*p1 + (gi-1.0)*p0 + 2.0 * gi * p00_infty)\n r_1 = 1.0/ v_1\n\n ui = v00 * np.sqrt((p1-p0)/(v00-v_1)) ! incident shock speed\n\n u1 = ui * (v00-v_1) / v00 ! piston speed\n\n c00 = np.sqrt(gi*(p0 + p00_infty) / r00) ! sound speed in unshocked gas\n c11 = np.sqrt(gt * (p0 + p0_infty) / r0) ! sound speed in unshocked material\n rm0 = ui / c00", "def main():\n max_v = np.amax(vLF1) * 3.6 # in km/h\n tmax_v = tLF1[np.argmax(vLF1)] # in seconds\n final_v = vLF2[-1] # in m/s\n total_time = tLF2[-1] # in seconds\n\n print('The maximum velocity is {:f} km/h'.format(max_v))\n print('The time when max velocity is reached is {:f} s'.format(tmax_v))\n print('The final velocity on the ground is {:f} m/s'.format(final_v))\n print('The total duration of the jump is {:f} s'.format(total_time))\n\n plt.subplot(2, 1, 1)\n plt.plot(tLF1, zLF1, label='Phase 1')\n plt.plot(tLF2, zLF2, label='Phase 2', color='red', linewidth=2.0)\n plt.title('Simulation of Felix Baumgartner\\'s 2012 Jump')\n plt.ylabel('Altitude (m)')\n plt.grid('on')\n plt.legend()\n\n plt.subplot(2, 1, 2)\n plt.plot(tLF1, vLF1, label='Phase 1')\n plt.plot(tLF2, vLF2, label='Phase 2', color='red', linewidth=2.0)\n plt.ylabel('Velocity (m/s) [Down]')\n plt.xlabel('Time (s)')\n plt.grid('on')\n plt.legend()\n\n plt.savefig('skydive.pdf')\n plt.show()\n\n return (max_v, tmax_v, final_v, total_time)", "def __init__(self):\n self.name = \"Clinical outcome model\"\n self.thrombectomy_time_no_effect = 8 * 60\n self.thrombolysis_time_no_effect = 6.3 * 60\n self.maximum_permitted_time_to_thrombectomy = 360\n self.maximum_permitted_time_to_thrombolysis = 270", "def vlass_stars(duration, n_beams):\n n_pointings = duration//4.2\n n_observed = n_pointings*n_beams\n return n_observed", "def frames(velocity=286.7, datestring=\"2005/11/01/00:00:00\",\n ra=\"05:35:28.105\", dec=\"-069.16.10.99\", equinox=\"J2000\", \n observatory=\"ALMA\", prec=4, verbose=True, myme='', myqa='',\n restFreq=345.79599, veltype='optical'):\n localme = False\n localqa = False\n if (myme == ''):\n myme = createCasaTool(metool)\n localme = True\n if (myqa == ''):\n myqa = createCasaTool(qatool)\n localqa = True\n if (dec.find(':') >= 0):\n dec = dec.replace(':','.')\n position = myme.direction(equinox, ra, dec)\n obstime = myme.epoch('TAI', datestring)\n\n if (veltype.lower().find('opt') == 0):\n velOpt = create_casa_quantity(myqa,velocity,\"km/s\")\n dopp = myme.doppler(\"OPTICAL\",velOpt)\n # CASA doesn't do Helio, but difference to Bary is hopefully small\n rvelOpt = myme.toradialvelocity(\"BARY\",dopp)\n elif (veltype.lower().find('rad') == 0):\n rvelOpt = myme.radialvelocity('LSRK',str(velocity)+'km/s')\n else:\n print \"veltype must be 'rad'io or 'opt'ical\"\n return\n\n myme.doframe(position)\n myme.doframe(myme.observatory(observatory))\n myme.doframe(obstime)\n myme.showframe()\n\n rvelRad = myme.measure(rvelOpt,'LSRK')\n doppRad = myme.todoppler(\"RADIO\",rvelRad) \n restFreq = parseFrequencyArgumentToGHz(restFreq)\n freqRad = myme.tofrequency('LSRK',doppRad,me.frequency('rest',str(restFreq)+'GHz'))\n\n lsrk = qa.tos(rvelRad['m0'],prec=prec)\n rvelTop = myme.measure(rvelOpt,'TOPO')\n doppTop = myme.todoppler(\"RADIO\",rvelTop) \n freqTop = myme.tofrequency('TOPO',doppTop,me.frequency('rest',str(restFreq)+'GHz'))\n if (localme):\n myme.done()\n if (localqa):\n myqa.done()\n topo = qa.tos(rvelTop['m0'],prec=prec)\n velocityDifference = 0.001*(rvelRad['m0']['value']-rvelTop['m0']['value'])\n frequencyDifference = freqRad['m0']['value'] - freqTop['m0']['value']\n return(freqTop['m0']['value'], velocityDifference, frequencyDifference)", "def muontrg_efficiencies():\r\n# to be updated with new numbers:\r\n ejpsi_trg = Jpsi_trg\r\n ejpsi_trg.add_relative_error(0.03) # TISTOS Justine 140711\r\n ebsmm_trg.add_relative_error(0.02) # Extra lines Justine 140711\r\n ebsmm_trg = BmmE_trg\r\n ebsmm_trg.add_relative_error(0.03) # TISTOS Justine 140711\r\n ebsmm_trg.add_relative_error(0.025) # Extra lines Justine 120711\r\n return ejpsi_trg,ebsmm_trg", "def run_model_cma(parameters, forcing, path_to_shapefile, path_to_dem, path_to_nlcd, end_year):\n\n \n \n model = BMI_HBVmountain(forcing, path_to_shapefile, path_to_dem, path_to_nlcd)\n config_file = model.setup(forcing_netcdf=forcing, bare_parameters= Parameters(parameters[8], parameters[6], 0, 0, parameters[4],\n parameters[1], parameters[2], parameters[3], parameters[7], parameters[0]),\n forest_parameters=Parameters(parameters[11], parameters[6], 0, parameters[9], parameters[4],\n parameters[1], parameters[2], parameters[3], parameters[10], parameters[0]),\n grass_parameters= Parameters(parameters[14], parameters[6], 0, parameters[12], parameters[4],\n parameters[1], parameters[2], parameters[3], parameters[13], parameters[0]),\n rip_parameters= Parameters(parameters[17], parameters[6], 0, parameters[15], parameters[18],\n parameters[1], parameters[2], parameters[3], parameters[16], parameters[0]),\n slow_parameters= Slow_Paramters(parameters[5], parameters[19]))\n model.initialize(config_file)\n\n\n\n\n Discharge = []\n timestamp = []\n while (model.get_value_ptr('Current_Date') < (datetime.date(end_year, 12, 31))): \n model.update()\n timestamp.append(model.get_value_ptr('Current_Date'))\n Discharge.append(model.get_value_ptr('Discharge'))\n\n simulated_discharge_df = pd.DataFrame(\n {'streamflow': Discharge},\n index=pd.to_datetime(timestamp)\n )\n \n precipitation = generate_forcing_from_NETCDF(forcing).prec\n model.finalize()\n return simulated_discharge_df.streamflow, precipitation", "def printLatestMeasurement(self): \n data = self.tristimulus[len(self.tristimulus)-1]\n x = data[0]\n y = data[1]\n L = data[2]\n print\"(x,y) = ({0:.4f}, {1:.4f}), L = {2:.4f} cd/m2 ({3:.4f} fL)\".format( x, y, L, 0.291863508*L)", "def computeTm(self):\n #first step is finding the derivative series of the well\n x = self.temperatures\n if self.fluorescence == None:\n self.Tm = None\n return\n y = self.fluorescence\n \n xdiff = np.diff(x)\n dydx = -np.diff(y)/xdiff\n #the derivative series, has one less index since there is one fewer differences than points\n seriesDeriv = pandas.Series(dydx, x[:-1])\n \n #now that we have the derivative series, we can find the Tm\n lowestPoint = 0\n lowestPointIndex = None\n \n #gets number of signchanges between max and min of the curve, used to determin if the curve\n #is complex or not\n lowestPoint2 = 1\n lowestIndex2 = None\n highestPoint = 0\n highestIndex = None\n previous = None\n for i, value in enumerate(self.fluorescence[:-1]):\n if value > highestPoint:\n highestPoint = value\n highestIndex = i\n if highestIndex == 0 :\n highestPoint = 0\n highestIndex = None\n for i, value in enumerate(self.fluorescence[:-1]):\n if value<lowestPoint2:\n lowestPoint2 = value\n lowestIndex2 = i\n for i, value in enumerate(self.fluorescence[:-1]):\n if i < lowestIndex2:\n continue\n if value > highestPoint:\n highestPoint = value\n highestIndex = i\n else:\n for i, value in enumerate(self.fluorescence[:-1]):\n if i > highestIndex:\n break\n if value<lowestPoint2:\n lowestPoint2 = value\n lowestIndex2 = i\n signChange = False\n for ind in seriesDeriv.index[lowestIndex2+1:highestIndex]:\n \n if previous:\n if seriesDeriv[ind] + SIGN_CHANGE_THRESH < 0 and previous - SIGN_CHANGE_THRESH > 0:\n signChange = True\n if seriesDeriv[ind] - SIGN_CHANGE_THRESH > 0 and previous + SIGN_CHANGE_THRESH < 0:\n signChange = True\n # if seriesDeriv[ind] == 0:\n # signChangeCount += 1\n previous = seriesDeriv[ind]\n\n \n #finding the lowest point and its index on the derivative series\n #only search for Tm up to 90degrees, since last part is hard to predict\n #and often gives false positives\n ignoreNum = int(len(seriesDeriv.index)*0.125)\n for ind in seriesDeriv.index[:-ignoreNum]:\n if seriesDeriv[ind]<lowestPoint:\n lowestPoint = seriesDeriv[ind]\n lowestPointIndex = ind\n \n #TODO working, tms not steep enough added to complex\n #if the slope is not steep enough, tm remains saved, but curve is grouped with the\n #complex curves (now known as the unreliable group)\n #if lowestPoint > -0.000001 / (normalisationFactor / saturation max point of all curves thing):\n # print self.name, 'lowestpoint too small', lowestPoint\n # self.complex = True\n\n #if lowest point is the first index, then no curve fit is required\n if lowestPointIndex == seriesDeriv.index[0]:\n tm = lowestPointIndex\n self.Tm = tm\n \n #set complex to true if curve was complex\n if signChange:\n self.complex = True\n return\n \n #could not find any Tm\n if lowestPointIndex == None:\n self.Tm = None\n \n #if no tm, the curve hopefully be picked up as a monotonic/in the noise/saturated/outlier\n #however, if this does not happen, the curve remains as complex\n self.complex = True\n return \n \n #the indices in the series either side of the lowest index\n #note the first list is indexed e.g. list[i] where i is the section using .index\n leftIndex = [ind for ind in seriesDeriv.index][[ind for ind in seriesDeriv.index].index(lowestPointIndex)-1]\n rightIndex = [ind for ind in seriesDeriv.index][[ind for ind in seriesDeriv.index].index(lowestPointIndex)+1]\n \n \n #matrices used to fit a parabola to the 3 points\n Y=[seriesDeriv[leftIndex],\n seriesDeriv[lowestPointIndex],\n seriesDeriv[rightIndex]]\n \n A=[[leftIndex**2, leftIndex, 1],\n [lowestPointIndex**2, lowestPointIndex, 1],\n [rightIndex**2, rightIndex, 1]]\n \n #solve for b, in the form Y=Ab\n (a,b,c) = np.linalg.solve(A,Y)\n \n #initialise tm to left most point of relevant curve\n tm=seriesDeriv[leftIndex]\n tmValue=0\n #make tm the lowest point on the fitted parabola rounded to nearest 0.01\n for x in np.arange(leftIndex,rightIndex,0.01):\n point = (a*(x**2) + b*x + c)\n if tmValue > point:\n tmValue = point\n tm = x\n self.Tm = tm\n \n #again check for complex shape before returning\n if signChange:\n self.complex = True\n\n\n averagePoint = (lowestPoint2 +highestPoint) / 2\n i = lowestIndex2\n while self.fluorescence[i]<averagePoint:\n i += 1;\n\n # estimates tm by another method and if the difference is too large the curve is considred complex\n if (self.temperatures[i] -self.Tm)**2 > 5**2:\n self.complex=True\n return", "def dur(row):\n D = 0\n for i in row[\"Time Adj\"]:\n D += i*row[\"Cpn\"]*math.exp(-i*row[\"YTM\"])\n D += i*100*math.exp(-i*row[\"YTM\"])\n return D/row[\"Midpoint\"]", "def test_PerfectModelEnsemble_smooth_carries_lead_attrs(\r\n perfectModelEnsemble_initialized_control_1d_ym_cftime,\r\n):\r\n pm = perfectModelEnsemble_initialized_control_1d_ym_cftime\r\n pm_smooth = pm.smooth({\"lead\": 4}, how=\"mean\")\r\n assert (\r\n pm_smooth.verify(metric=\"rmse\", comparison=\"m2e\", dim=\"init\").lead.attrs[\r\n \"units\"\r\n ]\r\n == \"years\"\r\n )" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Make n bars dataframe seeing past n bars. The row size of `df` must be greater than or equal to `n_bars`, or raise ValueError.
def make_nbars_past(df: pd.DataFrame, n_bars: int, cols: List[str] = ['Close'], datetime_col: Union[str, None] = 'Date') -> pd.DataFrame: if df.shape[0] < n_bars + 1: raise ValueError( f'row size of the df (={df.shape[0]}) must be greater than or equal to n_bars + 1 (={n_bars + 1})') df = df.rename(columns={col: f'{col}{n_bars}' for col in cols}) for i in reversed(range(n_bars)): inc = n_bars - i for col in cols: df[f'{col}{i}'] = df[f'{col}{n_bars}'][inc:].append( pd.Series([np.nan]*inc)).reset_index(drop=True) # correct bar date (or datetime) if datetime_col is not None: df[datetime_col] = df[datetime_col][n_bars:].append( pd.Series([np.nan]*n_bars)).reset_index(drop=True) df = df.dropna() return df
[ "def make_nbars_future(df: pd.DataFrame, n_bars: int, cols: List[str] = ['Close'], datetime_col: Union[str, None] = 'Date') -> pd.DataFrame:\n if df.shape[0] < n_bars + 1:\n raise ValueError(\n f'row size of the df (={df.shape[0]}) must be greater than or equal to n_bars + 1 (={n_bars + 1})')\n df = df.rename(columns={col: f'{col}0' for col in cols})\n\n for i in range(1, n_bars+1):\n for col in cols:\n df[f'{col}{i}'] = df[f'{col}0'][i:].append(\n pd.Series([np.nan]*i)).reset_index(drop=True)\n\n df = df.dropna()\n\n return df", "def get_latest_bars(self, symbol, N=1):\n raise NotImplementedError(\"Should implement get_latest_bars()\")", "def setBarGroups(ngroups, gap):\n dislin.bargrp(ngroups, gap)", "def bar_chart(self, df, n_groups, dict):\n fig, ax = plt.subplots()\n # choose bar width (standard 0.8 chosen)\n bar_width = 0.35\n # get an index to set the ticks for the x axis\n\n index = np.arange(n_groups)\n indexes = df.index.tolist()\n print(indexes)\n df[\"index\"] = indexes\n\n # make barchart for permutation test\n ax.bar(index, df[\"perm\"], bar_width, color='b', linewidth=4,\n label='Permutation test')\n # make barchart for t-test\n ax.bar(index + bar_width, df[\"t_test\"], bar_width, color='r',\n label='t-test')\n\n ax.set_xlabel(dict[\"xlabel\"])\n ax.set_ylabel(dict[\"ylabel\"])\n ax.set_title(dict[\"title\"])\n ax.set_xticks(index + bar_width / 2)\n ax.set_xticklabels(dict[\"xtickslabels\"])\n ax.legend()\n\n fig.tight_layout()\n plt.show()", "def take(\n self,\n df: DataFrame,\n n: int,\n presort: str,\n na_position: str = \"last\",\n partition_spec: PartitionSpec = EMPTY_PARTITION_SPEC,\n metadata: Any = None,\n ) -> DataFrame: # pragma: no cover\n pass", "def tail(self, n: int = 5) -> 'DataFrame':\n return self[-n:, :] # type: ignore", "def imbalance_bar(data,ET_window,P_window, warm_up_len = 100,mode=\"TIB\"):\n assert mode in [\"TIB\",\"VIB\"], \"please enter mode of imbalance bar: TIB/VIB\"\n if mode == \"TIB\":\n data = _preprocess(data)\n else:\n data = _preprocess(data, need_vol=True)\n\n N = data.shape[0]\n b_t = _direction(data[\"price\"])\n if mode == \"VIB\":\n b_t = np.array(b_t * data[\"vol\"])\n E_T = warm_up_len\n E_theta = E_T * 0.5 # without prior knowledge it's reasonable to assume P(b_t==1) = 0.5\n\n # length of first bar\n t0 = np.where(abs(np.cumsum(b_t))>=E_theta)[0]\n if len(t0) == 0:\n raise ValueError(\"No such bar can be created!\")\n\n bar = [t0[0]+1]\n bar_len = 0\n current_loc = sum(bar)\n while True:\n E_T = _EMA(bar, ET_window)[-1]\n P_estimate = _EMA(b_t[:current_loc], P_window)[-1]\n E_theta = E_T * abs(P_estimate)\n\n theta_t = abs(np.cumsum(b_t[current_loc:]))\n increment = np.where(theta_t >= E_theta)[0] # np.where() will return a tuple\n \n if len(increment)==0: # if can't find any appropriate bar\n bar.append(data.shape[0] - current_loc)\n break \n if bar[bar_len] + (increment[0]+1) >= N:\n bar.append(data.shape[0] - current_loc)\n break\n bar.append(increment[0]+1)# python start from 0 but we want to store the length of each bar\n current_loc += (increment[0]+1)\n bar_len += 1\n result = _bar2df(bar,data)\n return result", "def chunk_df(df, n):\n for i in range(0, df.shape[0], n):\n yield df[i:i + n]", "def limit(self, n=None):\n if n:\n self.entries = self.entries[-n:]", "def repeat(df, n):\n return pd.concat([df] * n, ignore_index=True)", "def filled_bar(df, rank=\"Phylum\", figsize=(16, 6), drop=0.01):\n summarized = df.groupby([\"id\", rank])[\"count\"].sum().reset_index()\n summarized[\"percent\"] = summarized.groupby(\"id\")[\"count\"].apply(\n lambda x: x / x.sum())\n summarized = summarized.pivot(index=\"id\", columns=rank, values=\"percent\")\n rank_means = summarized.mean()\n rank_order = (rank_means[rank_means > drop].\n sort_values(ascending=False).index)\n id_order = summarized[rank_order[0]].sort_values(ascending=False).index\n summarized = summarized.reindex(id_order).reindex(rank_order, axis=1)\n summarized.index = range(summarized.shape[0])\n\n ax = summarized.plot(kind=\"area\", stacked=True, legend=False,\n figsize=figsize)\n ax.set_ylim(0, 1)\n ax.legend(bbox_to_anchor=(1.04, 1), loc=\"upper left\")\n return ax", "def popn(self, n):\n if n:\n ret = self.frame.data_stack[-n:]\n self.frame.data_stack[-n:] = []\n return ret\n else:\n return []", "def __n_rows(df, n_columns):\n # List of dataframe columns\n columns = list(df.columns)\n\n # If the length of the dataframe columns is even\n if len(columns) % n_columns == 0:\n # Then calculate the absolute division\n axes_rows = len(columns) // n_columns\n else:\n # Else, calculate the absolute division and add 1\n axes_rows = (len(columns) // n_columns) + 1\n\n return axes_rows", "def setNSlices(self,n):\n assert(n> 0)\n self._c_param.lee_richards_n_slices = n", "def select_next_n(filtered_games, n=5):\n assert isinstance(filtered_games, pd.DataFrame)\n assert isinstance(n, int)\n assert n >= 1\n\n # Yield next n from the dataframe if current index not exceeding rows, and stop the generator at last iteration.\n curr_index = 0\n while curr_index < filtered_games.shape[0]:\n yield filtered_games.iloc[curr_index:curr_index+n].reset_index(drop = True) # Yield next n games\n curr_index += n # Increment current index by n", "def StackBarplot(df,\n bin_width=0.1,\n rank=False,axes=None,\n fontsize=15,\n linewidth=0.1,\n yticklabel=True,save=False):\n if rank:\n df = resortFirstSample(df)\n fig,ax = axesConf(df,axes=axes)\n # prefix data structure\n featureList = list(df.index)[::-1]\n\n # color\n colors = list(cm.tab20.colors)\n category_colors = [to_hex(color) for color in colors]\n\n xrange = np.arange(0,len(df.columns))\n #xrange = np.arange(0,bin_width* len(df.columns),step=bin_width) ## todo:乘法有问题,容易使得xrange与df.columns的长度不一致\n starts= [0 for i in range(len(df.columns))]\n\n for (i,feature) in enumerate(featureList):\n # stacked barplot: add bar one by one sample\n ## color\n #category_colors = color_conf(len(taxonList))\n #category_colors = plt.get_cmap('tab20')(np.linspace(0.15, 0.85, len(taxonList)))\n\n ## stacked bar\n\n height = df.loc[feature,:].values\n height = np.array(height)\n ax.bar(xrange, height, bottom=starts, width=bin_width,\n linewidth=linewidth,\n edgecolor='black',\n align='edge',\n label=feature, color=category_colors[i])\n\n starts = [i+j for i,j in zip(starts,height)]\n\n ax.legend(bbox_to_anchor=(1, 0),\n loc='lower left',\n fontsize=fontsize,\n facecolor='w')\n ## tick setting\n for xline,yline in zip(ax.get_xticklines(),ax.get_yticklines()):\n xline.set_visible(False)\n #yline.set_visible(False)\n\n for (xlabel,ylabel) in zip(ax.get_xticklabels(),ax.get_yticklabels()):\n ylabel.set_color('black')\n ylabel.set_fontsize(10)\n\n\n ax.xaxis.set_major_locator(ticker.NullLocator())\n ## set spines invisible\n ax.spines['bottom'].set_color(None)\n ax.spines['right'].set_color(None)\n ax.spines['top'].set_color(None)\n #if save:\n #baseconf.BaseAxes.savefig('StackBarplot')\n plt.tight_layout()\n return fig,ax", "def tail(a, n=5):\n return DataFrameIloc(a)[-n:]", "def limit(requestContext, seriesList, n):\n return seriesList[0:n]", "def keep_n(self, n=100):\n before = self.item_count()\n\n item_count = self.item_count()\n if item_count > n: self.filter(self.sample(n))\n\n after = self.item_count()\n with msg(f'Keeping (at most) {n} items: {after} of {before}', done=False, enabled=self.output):pass" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Make n bars dataframe seeing future n bars. The row size of `df` must be greater than or equal to `n_bars`, or raise ValueError.
def make_nbars_future(df: pd.DataFrame, n_bars: int, cols: List[str] = ['Close'], datetime_col: Union[str, None] = 'Date') -> pd.DataFrame: if df.shape[0] < n_bars + 1: raise ValueError( f'row size of the df (={df.shape[0]}) must be greater than or equal to n_bars + 1 (={n_bars + 1})') df = df.rename(columns={col: f'{col}0' for col in cols}) for i in range(1, n_bars+1): for col in cols: df[f'{col}{i}'] = df[f'{col}0'][i:].append( pd.Series([np.nan]*i)).reset_index(drop=True) df = df.dropna() return df
[ "def make_nbars_past(df: pd.DataFrame, n_bars: int, cols: List[str] = ['Close'], datetime_col: Union[str, None] = 'Date') -> pd.DataFrame:\n if df.shape[0] < n_bars + 1:\n raise ValueError(\n f'row size of the df (={df.shape[0]}) must be greater than or equal to n_bars + 1 (={n_bars + 1})')\n df = df.rename(columns={col: f'{col}{n_bars}' for col in cols})\n\n for i in reversed(range(n_bars)):\n inc = n_bars - i\n for col in cols:\n df[f'{col}{i}'] = df[f'{col}{n_bars}'][inc:].append(\n pd.Series([np.nan]*inc)).reset_index(drop=True)\n\n # correct bar date (or datetime)\n if datetime_col is not None:\n df[datetime_col] = df[datetime_col][n_bars:].append(\n pd.Series([np.nan]*n_bars)).reset_index(drop=True)\n\n df = df.dropna()\n\n return df", "def bar_chart(self, df, n_groups, dict):\n fig, ax = plt.subplots()\n # choose bar width (standard 0.8 chosen)\n bar_width = 0.35\n # get an index to set the ticks for the x axis\n\n index = np.arange(n_groups)\n indexes = df.index.tolist()\n print(indexes)\n df[\"index\"] = indexes\n\n # make barchart for permutation test\n ax.bar(index, df[\"perm\"], bar_width, color='b', linewidth=4,\n label='Permutation test')\n # make barchart for t-test\n ax.bar(index + bar_width, df[\"t_test\"], bar_width, color='r',\n label='t-test')\n\n ax.set_xlabel(dict[\"xlabel\"])\n ax.set_ylabel(dict[\"ylabel\"])\n ax.set_title(dict[\"title\"])\n ax.set_xticks(index + bar_width / 2)\n ax.set_xticklabels(dict[\"xtickslabels\"])\n ax.legend()\n\n fig.tight_layout()\n plt.show()", "def get_latest_bars(self, symbol, N=1):\n raise NotImplementedError(\"Should implement get_latest_bars()\")", "def setBarGroups(ngroups, gap):\n dislin.bargrp(ngroups, gap)", "def chunk_df(df, n):\n for i in range(0, df.shape[0], n):\n yield df[i:i + n]", "def repeat(df, n):\n return pd.concat([df] * n, ignore_index=True)", "def imbalance_bar(data,ET_window,P_window, warm_up_len = 100,mode=\"TIB\"):\n assert mode in [\"TIB\",\"VIB\"], \"please enter mode of imbalance bar: TIB/VIB\"\n if mode == \"TIB\":\n data = _preprocess(data)\n else:\n data = _preprocess(data, need_vol=True)\n\n N = data.shape[0]\n b_t = _direction(data[\"price\"])\n if mode == \"VIB\":\n b_t = np.array(b_t * data[\"vol\"])\n E_T = warm_up_len\n E_theta = E_T * 0.5 # without prior knowledge it's reasonable to assume P(b_t==1) = 0.5\n\n # length of first bar\n t0 = np.where(abs(np.cumsum(b_t))>=E_theta)[0]\n if len(t0) == 0:\n raise ValueError(\"No such bar can be created!\")\n\n bar = [t0[0]+1]\n bar_len = 0\n current_loc = sum(bar)\n while True:\n E_T = _EMA(bar, ET_window)[-1]\n P_estimate = _EMA(b_t[:current_loc], P_window)[-1]\n E_theta = E_T * abs(P_estimate)\n\n theta_t = abs(np.cumsum(b_t[current_loc:]))\n increment = np.where(theta_t >= E_theta)[0] # np.where() will return a tuple\n \n if len(increment)==0: # if can't find any appropriate bar\n bar.append(data.shape[0] - current_loc)\n break \n if bar[bar_len] + (increment[0]+1) >= N:\n bar.append(data.shape[0] - current_loc)\n break\n bar.append(increment[0]+1)# python start from 0 but we want to store the length of each bar\n current_loc += (increment[0]+1)\n bar_len += 1\n result = _bar2df(bar,data)\n return result", "def StackBarplot(df,\n bin_width=0.1,\n rank=False,axes=None,\n fontsize=15,\n linewidth=0.1,\n yticklabel=True,save=False):\n if rank:\n df = resortFirstSample(df)\n fig,ax = axesConf(df,axes=axes)\n # prefix data structure\n featureList = list(df.index)[::-1]\n\n # color\n colors = list(cm.tab20.colors)\n category_colors = [to_hex(color) for color in colors]\n\n xrange = np.arange(0,len(df.columns))\n #xrange = np.arange(0,bin_width* len(df.columns),step=bin_width) ## todo:乘法有问题,容易使得xrange与df.columns的长度不一致\n starts= [0 for i in range(len(df.columns))]\n\n for (i,feature) in enumerate(featureList):\n # stacked barplot: add bar one by one sample\n ## color\n #category_colors = color_conf(len(taxonList))\n #category_colors = plt.get_cmap('tab20')(np.linspace(0.15, 0.85, len(taxonList)))\n\n ## stacked bar\n\n height = df.loc[feature,:].values\n height = np.array(height)\n ax.bar(xrange, height, bottom=starts, width=bin_width,\n linewidth=linewidth,\n edgecolor='black',\n align='edge',\n label=feature, color=category_colors[i])\n\n starts = [i+j for i,j in zip(starts,height)]\n\n ax.legend(bbox_to_anchor=(1, 0),\n loc='lower left',\n fontsize=fontsize,\n facecolor='w')\n ## tick setting\n for xline,yline in zip(ax.get_xticklines(),ax.get_yticklines()):\n xline.set_visible(False)\n #yline.set_visible(False)\n\n for (xlabel,ylabel) in zip(ax.get_xticklabels(),ax.get_yticklabels()):\n ylabel.set_color('black')\n ylabel.set_fontsize(10)\n\n\n ax.xaxis.set_major_locator(ticker.NullLocator())\n ## set spines invisible\n ax.spines['bottom'].set_color(None)\n ax.spines['right'].set_color(None)\n ax.spines['top'].set_color(None)\n #if save:\n #baseconf.BaseAxes.savefig('StackBarplot')\n plt.tight_layout()\n return fig,ax", "def filled_bar(df, rank=\"Phylum\", figsize=(16, 6), drop=0.01):\n summarized = df.groupby([\"id\", rank])[\"count\"].sum().reset_index()\n summarized[\"percent\"] = summarized.groupby(\"id\")[\"count\"].apply(\n lambda x: x / x.sum())\n summarized = summarized.pivot(index=\"id\", columns=rank, values=\"percent\")\n rank_means = summarized.mean()\n rank_order = (rank_means[rank_means > drop].\n sort_values(ascending=False).index)\n id_order = summarized[rank_order[0]].sort_values(ascending=False).index\n summarized = summarized.reindex(id_order).reindex(rank_order, axis=1)\n summarized.index = range(summarized.shape[0])\n\n ax = summarized.plot(kind=\"area\", stacked=True, legend=False,\n figsize=figsize)\n ax.set_ylim(0, 1)\n ax.legend(bbox_to_anchor=(1.04, 1), loc=\"upper left\")\n return ax", "def take(\n self,\n df: DataFrame,\n n: int,\n presort: str,\n na_position: str = \"last\",\n partition_spec: PartitionSpec = EMPTY_PARTITION_SPEC,\n metadata: Any = None,\n ) -> DataFrame: # pragma: no cover\n pass", "def generate_bar_rows(data_list, save_path):\n if not data_list:\n return False, 'data_list can not be empty'\n # row number\n row = len(data_list)\n fig, ax_list = plt.subplots(row, 1, figsize=(10, 5*row))\n for i, item in enumerate(data_list):\n if row == 1:\n ax = ax_list\n else:\n ax = ax_list[i]\n d_dict = item['data']\n # x axis value\n if 'x_value' in item and item['x_value']:\n x_list = item['x_value']\n else:\n x_list = np.arange(1, len(list(d_dict.values())[0])+1) # the label locations\n width = 0.2 # the width of the bars\n\n j = 0\n for label, data in d_dict.items():\n\n if j % 2 == 0:\n rects = ax.bar(x_list - width / 2, data, width, label=label)\n else:\n rects = ax.bar(x_list + width / 2, data, width, label=label)\n auto_label(ax, rects)\n j += 1\n\n if 'x_label' in item and item['x_label']:\n ax.set_xlabel(item['x_label'])\n if 'v' in item and item['y_label']:\n ax.set_ylabel(item['y_label'])\n if 'title' in item and item['title']:\n ax.set_title(item['title'])\n ax.legend()\n fig.tight_layout()\n # plt.show()\n fig.savefig(save_path, dpi=600)\n plt.close('all')\n gc.collect()\n return True, ''", "def __n_rows(df, n_columns):\n # List of dataframe columns\n columns = list(df.columns)\n\n # If the length of the dataframe columns is even\n if len(columns) % n_columns == 0:\n # Then calculate the absolute division\n axes_rows = len(columns) // n_columns\n else:\n # Else, calculate the absolute division and add 1\n axes_rows = (len(columns) // n_columns) + 1\n\n return axes_rows", "def n50_barplot(df, path, settings, title=None):\n n50_bar = Plot(path=path + \"NanoComp_N50.html\", title=\"Comparing read length N50\")\n datasets = df[\"dataset\"].unique()\n length_column = \"aligned_lengths\" if \"aligned_lengths\" in df else \"lengths\"\n ylabel = \"Aligned read length N50\" if \"aligned_lengths\" in df else \"Sequenced read length N50\"\n\n palette = (\n settings[\"colors\"] if settings[\"colors\"] else cycle(plotly.colors.DEFAULT_PLOTLY_COLORS)\n )\n\n n50s = [get_N50(np.sort(df.loc[df[\"dataset\"] == d, length_column])) for d in datasets]\n n50_bar.fig = go.Figure()\n\n for idx, n50, color in zip(datasets, n50s, palette):\n n50_bar.fig.add_trace(go.Bar(x=[idx], y=[n50], name=idx, marker_color=color))\n\n n50_bar.fig.update_layout(\n title=title or n50_bar.title,\n title_x=0.5,\n yaxis_title=ylabel,\n )\n\n n50_bar.html = n50_bar.fig.to_html(full_html=False, include_plotlyjs=\"cdn\")\n n50_bar.save(settings)\n return [n50_bar]", "def num_cols(nvals, base):\n vals = [str(i) for i in range(nvals)]\n df = pd.DataFrame({'vals': vals})\n encoder = encoders.BaseNEncoder(base=base)\n encoder.fit(df)\n return len(list(encoder.transform(df)))", "def oneNumBar(df, colName):\n bins = pd.qcut(x=df[colName[0]], q=15, duplicates='drop')\n ax = bins.value_counts()\n bins = bins.cat.as_ordered()\n bins = bins.cat.categories\n bounds = bins.left \n bounds = list(bounds)\n bounds.append(bins[len(bounds)-1].right)\n texts = []\n for x,y in zip(bounds[0::],bounds[1::]):\n texts.append(\"(\" + str(x) + \", \" + str(y) + \"]\") \n barData = [go.Bar(x=texts, \n y=ax,\n marker=dict(\n color = '#92c5de',\n opacity=0.8)\n )] \n layout = go.Layout(\n title=\"Bar Plot Showing Count of Values for \" + str(colName[0]),\n xaxis=dict(\n title= colName[0]\n ),\n yaxis=dict(\n title= \"NUMBER OF RECORDS\", \n )\n )\n fig = go.Figure(data=barData, layout=layout)\n return {\"label\":\"Frequency\", \"plot\":fig}", "def make_bars(prepared_data):\n log.info('this func has no update')\n values_ = prepared_data['bars_values']\n\n def autolabel(rects, height_factor):\n for i, rect in enumerate(rects):\n height = rect.get_height()\n label = '%d' % int(height)\n ax.text(rect.get_x() + rect.get_width() / 2., height_factor * height,\n '{}'.format(label),\n ha='center', va='bottom')\n\n mpl.rcParams.update({'font.size': 15})\n\n for house in values_:\n sections = [f'П-зд {i[-1]}' for i in houses_arr[f'house_{house}']]\n values = [values_[house].get(int(i[-1]), 0) for i in sections]\n\n plt.bar(sections, values)\n ax = plt.gca()\n ax.set_title(f'Будинок {house}')\n autolabel(ax.patches, height_factor=0.85)\n\n img_path = os.path.join('img', 'charts', f'bar{house}.png')\n plt.savefig(img_path, dpi=200)\n plt.clf()\n plt.close()", "def tail(self, n: int = 5) -> 'DataFrame':\n return self[-n:, :] # type: ignore", "def barplot(self, x = \"Predictor\", color = None, opacity = 1, template = \"ggplot2\", \n has_title = True, barmode=\"stack\", is_horizontal = False, title = None, is_percent = False,\n show_num = False):\n if color: #Produce either a stacked or grouped bar plot\n df_stack = self._df.groupby([x,color]).size().reset_index()\n df_stack['Percentage'] = self._df.groupby([x, color]).size().groupby(level = 0).apply(lambda \n x:100 * x/float(x.sum())).values\n df_stack.columns = [x, color, 'Count', 'Percentage']\n df_stack['Percentage'] = round(df_stack['Percentage'], 2)\n \n x_clean, df_clean = clean_varname(df_stack, var = x)\n color_clean, df_clean = clean_varname(df_clean, var = color)\n \n if has_title:\n if not title:\n title = f\"Bar Plot of {x_clean} and {color_clean}\"\n else:\n title = None\n \n \n # 8 different variations for how this graph can appear:\n if is_horizontal:\n if is_percent:\n if show_num: #Show percentages on stacked bar graph\n fig = px.bar(df_clean, y = x_clean, x = 'Percentage', \n color = color_clean, template = template, barmode=barmode, \n opacity = opacity, title = title, text = df_clean['Percentage'])\n else:\n fig = px.bar(df_clean, y = x_clean, x = 'Percentage', \n color = color_clean, template = template, barmode=barmode, \n opacity = opacity, title = title)\n else:\n if show_num: #Show counts on stacked bar graph:\n fig = px.bar(df_clean, y = x_clean, x = 'Count', \n color = color_clean, template = template, barmode=barmode, \n opacity = opacity, title = title, text = df_clean['Count'])\n else:\n fig = px.bar(df_clean, y = x_clean, x = 'Count', \n color = color_clean, template = template, barmode=barmode, \n opacity = opacity, title = title)\n else:\n if is_percent:\n if show_num:\n fig = px.bar(df_clean, x = x_clean, y = 'Percentage', \n color = color_clean, template = template, barmode=barmode, \n opacity = opacity, title = title, text = df_clean['Percentage'])\n else:\n fig = px.bar(df_clean, x = x_clean, y = 'Percentage', \n color = color_clean, template = template, barmode=barmode, \n opacity = opacity, title = title)\n else:\n if show_num:\n fig = px.bar(df_clean, x = x_clean, y = 'Count', \n color = color_clean, template = template, barmode=barmode, \n opacity = opacity, title = title, text = df_clean['Count'])\n else:\n fig = px.bar(df_clean, x = x_clean, y = 'Count', \n color = color_clean, template = template, barmode=barmode, \n opacity = opacity, title = title) \n \n return fig\n \n else: #Create a basic bar plot\n df_stack = self._df.groupby([x]).size().reset_index()\n df_stack['Percentage'] = self._df.groupby([x]).size().groupby(level = 0).apply(lambda", "def select_next_n(filtered_games, n=5):\n assert isinstance(filtered_games, pd.DataFrame)\n assert isinstance(n, int)\n assert n >= 1\n\n # Yield next n from the dataframe if current index not exceeding rows, and stop the generator at last iteration.\n curr_index = 0\n while curr_index < filtered_games.shape[0]:\n yield filtered_games.iloc[curr_index:curr_index+n].reset_index(drop = True) # Yield next n games\n curr_index += n # Increment current index by n" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Usage When you call the DescribeDBInstanceEncryptionKey operation, the instance must have transparent data encryption (TDE) enabled in BYOK mode. You can call the [ModifyDBInstanceTDE](~~131267~~) operation to enable TDE.
def describe_dbinstance_encryption_key_with_options( self, request: dds_20151201_models.DescribeDBInstanceEncryptionKeyRequest, runtime: util_models.RuntimeOptions, ) -> dds_20151201_models.DescribeDBInstanceEncryptionKeyResponse: UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.dbinstance_id): query['DBInstanceId'] = request.dbinstance_id if not UtilClient.is_unset(request.encryption_key): query['EncryptionKey'] = request.encryption_key if not UtilClient.is_unset(request.owner_account): query['OwnerAccount'] = request.owner_account if not UtilClient.is_unset(request.owner_id): query['OwnerId'] = request.owner_id if not UtilClient.is_unset(request.resource_owner_account): query['ResourceOwnerAccount'] = request.resource_owner_account if not UtilClient.is_unset(request.resource_owner_id): query['ResourceOwnerId'] = request.resource_owner_id if not UtilClient.is_unset(request.security_token): query['SecurityToken'] = request.security_token req = open_api_models.OpenApiRequest( query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( action='DescribeDBInstanceEncryptionKey', version='2015-12-01', protocol='HTTPS', pathname='/', method='POST', auth_type='AK', style='RPC', req_body_type='formData', body_type='json' ) return TeaCore.from_map( dds_20151201_models.DescribeDBInstanceEncryptionKeyResponse(), self.call_api(params, req, runtime) )
[ "def describe_dbinstance_encryption_key(\n self,\n request: dds_20151201_models.DescribeDBInstanceEncryptionKeyRequest,\n ) -> dds_20151201_models.DescribeDBInstanceEncryptionKeyResponse:\n runtime = util_models.RuntimeOptions()\n return self.describe_dbinstance_encryption_key_with_options(request, runtime)", "def testDbKeyRotation(self):\r\n @contextmanager\r\n def _OverrideSecret(secret, secret_value):\r\n try:\r\n old_secret_value = secrets.GetSharedSecretsManager()._secrets[secret]\r\n secrets.GetSharedSecretsManager()._secrets[secret] = secret_value\r\n # Clear the cached crypter.\r\n if hasattr(_CryptValue, '_crypter'):\r\n del _CryptValue._crypter\r\n yield\r\n finally:\r\n secrets.GetSharedSecretsManager()._secrets[secret] = old_secret_value\r\n if hasattr(_CryptValue, '_crypter'):\r\n del _CryptValue._crypter\r\n\r\n # Encrypt a value using the original key.\r\n plaintext = 'quick brown fox'\r\n self._crypt_inst.Set(plaintext)\r\n\r\n # Add a new key to the keyset and make it primary and ensure that plaintext can still be recovered.\r\n writer = keyczar_dict.DictWriter(secrets.GetSharedSecretsManager()._secrets['db_crypt'])\r\n czar = keyczar.GenericKeyczar(keyczar_dict.DictReader(writer.dict))\r\n czar.AddVersion(keyinfo.PRIMARY)\r\n czar.Write(writer)\r\n\r\n with _OverrideSecret('db_crypt', json.dumps(writer.dict)):\r\n self.assertEqual(self._crypt_inst.Get().Decrypt(), plaintext)\r\n\r\n # Now remove old key and verify that plaintext cannot be recovered.\r\n czar.Demote(1)\r\n czar.Revoke(1)\r\n czar.Write(writer)\r\n with _OverrideSecret('db_crypt', json.dumps(writer.dict)):\r\n self.assertRaises(errors.KeyNotFoundError, self._crypt_inst.Get().Decrypt)", "def update_wallet(db, type, data):\n d = data\n kds = BCDataStream()\n vds = BCDataStream()\n\n # Write the type code to the key\n kds.write_string(type)\n vds.write(\"\") # Ensure there is something\n\n try:\n if type == \"tx\":\n raise NotImplementedError(\"Writing items of type 'tx'\")\n kds.write(d['tx_id'])\n #d.update(parse_WalletTx(vds))\n elif type == \"name\":\n kds.write(d['hash'])\n vds.write(d['name'])\n elif type == \"version\":\n vds.write_uint32(d['version'])\n elif type == \"setting\":\n raise NotImplementedError(\"Writing items of type 'setting'\")\n kds.write_string(d['setting'])\n #d['value'] = parse_setting(d['setting'], vds)\n elif type == \"key\":\n kds.write_string(d['public_key'])\n vds.write_string(d['private_key'])\n elif type == \"wkey\":\n kds.write_string(d['public_key'])\n vds.write_string(d['private_key'])\n vds.write_int64(d['created'])\n vds.write_int64(d['expires'])\n vds.write_string(d['comment'])\n elif type == \"ckey\":\n kds.write_string(d['public_key'])\n kds.write_string(d['crypted_key'])\n elif type == \"mkey\":\n kds.write_int32(d['nID'])\n vds.write_string(d['crypted_key'])\n vds.write_string(d['salt'])\n vds.write_int32(d['nDeriveIterations'])\n vds.write_int32(d['nDerivationMethod'])\n vds.write_string(d['vchOtherDerivationParameters'])\n elif type == \"defaultkey\":\n vds.write_string(d['key'])\n elif type == \"pool\":\n kds.write_int64(d['n'])\n vds.write_int32(d['nVersion'])\n vds.write_int64(d['nTime'])\n vds.write_string(d['public_key'])\n elif type == \"acc\":\n kds.write_string(d['account'])\n vds.write_int32(d['nVersion'])\n vds.write_string(d['public_key'])\n elif type == \"acentry\":\n kds.write_string(d['account'])\n kds.write_uint64(d['n'])\n vds.write_int32(d['nVersion'])\n vds.write_int64(d['nCreditDebit'])\n vds.write_int64(d['nTime'])\n vds.write_string(d['otherAccount'])\n vds.write_string(d['comment'])\n elif type == \"bestblock\":\n vds.write_int32(d['nVersion'])\n vds.write_compact_size(len(d['hashes']))\n for h in d['hashes']:\n vds.write(h)\n else:\n print \"Unknown key type: \"+type\n\n # Write the key/value pair to the database\n db.put(kds.input, vds.input)\n\n except Exception, e:\n print(\"ERROR writing to wallet.dat, type %s\"%type)\n print(\"data dictionary: %r\"%data)", "def describe_user_encryption_key_list_with_options(\n self,\n request: dds_20151201_models.DescribeUserEncryptionKeyListRequest,\n runtime: util_models.RuntimeOptions,\n ) -> dds_20151201_models.DescribeUserEncryptionKeyListResponse:\n UtilClient.validate_model(request)\n query = {}\n if not UtilClient.is_unset(request.dbinstance_id):\n query['DBInstanceId'] = request.dbinstance_id\n if not UtilClient.is_unset(request.owner_account):\n query['OwnerAccount'] = request.owner_account\n if not UtilClient.is_unset(request.owner_id):\n query['OwnerId'] = request.owner_id\n if not UtilClient.is_unset(request.resource_owner_account):\n query['ResourceOwnerAccount'] = request.resource_owner_account\n if not UtilClient.is_unset(request.resource_owner_id):\n query['ResourceOwnerId'] = request.resource_owner_id\n if not UtilClient.is_unset(request.security_token):\n query['SecurityToken'] = request.security_token\n if not UtilClient.is_unset(request.target_region_id):\n query['TargetRegionId'] = request.target_region_id\n req = open_api_models.OpenApiRequest(\n query=OpenApiUtilClient.query(query)\n )\n params = open_api_models.Params(\n action='DescribeUserEncryptionKeyList',\n version='2015-12-01',\n protocol='HTTPS',\n pathname='/',\n method='POST',\n auth_type='AK',\n style='RPC',\n req_body_type='formData',\n body_type='json'\n )\n return TeaCore.from_map(\n dds_20151201_models.DescribeUserEncryptionKeyListResponse(),\n self.call_api(params, req, runtime)\n )", "def walletinfo(test_unlock):\n stm = shared_morphene_instance()\n if mph.rpc is not None:\n mph.rpc.rpcconnect() \n t = PrettyTable([\"Key\", \"Value\"])\n t.align = \"l\"\n t.add_row([\"created\", mph.wallet.created()])\n t.add_row([\"locked\", mph.wallet.locked()])\n t.add_row([\"Number of stored keys\", len(mph.wallet.getPublicKeys())])\n t.add_row([\"sql-file\", mph.wallet.keyStorage.sqlDataBaseFile])\n password_storage = mph.config[\"password_storage\"]\n t.add_row([\"password_storage\", password_storage])\n password = os.environ.get(\"UNLOCK\")\n if password is not None:\n t.add_row([\"UNLOCK env set\", \"yes\"])\n else:\n t.add_row([\"UNLOCK env set\", \"no\"])\n if KEYRING_AVAILABLE:\n t.add_row([\"keyring installed\", \"yes\"])\n else:\n t.add_row([\"keyring installed\", \"no\"])\n if test_unlock:\n if unlock_wallet(stm):\n t.add_row([\"Wallet unlock\", \"successful\"])\n else:\n t.add_row([\"Wallet unlock\", \"not working\"])\n # t.add_row([\"getPublicKeys\", str(mph.wallet.getPublicKeys())])\n print(t)", "def _get_sql_db_tde_disabled_event(com, ext):\n friendly_cloud_type = util.friendly_string(com.get('cloud_type'))\n reference = com.get('reference')\n description = (\n '{} SQL DB {} has TDE disabled.'\n .format(friendly_cloud_type, reference)\n )\n recommendation = (\n 'Check {} SQL DB {} and enable TDE.'\n .format(friendly_cloud_type, reference)\n )\n event_record = {\n # Preserve the extended properties from the virtual\n # machine record because they provide useful context to\n # locate the virtual machine that led to the event.\n 'ext': util.merge_dicts(ext, {\n 'record_type': 'sql_db_tde_event'\n }),\n 'com': {\n 'cloud_type': com.get('cloud_type'),\n 'record_type': 'sql_db_tde_event',\n 'reference': reference,\n 'description': description,\n 'recommendation': recommendation,\n }\n }\n\n _log.info('Generating sql_db_tde_event; %r', event_record)\n yield event_record", "def CreateFromExtendedKey(self,\n wallet_name: str,\n ex_key_str: str) -> HdWalletBase:\n try:\n bip_obj = Bip32Secp256k1.FromExtendedKey(ex_key_str)\n except Bip32KeyError as ex:\n raise ValueError(f\"Invalid extended key: {ex_key_str}\") from ex\n\n # Segwit wallet uses hardened derivation, not supported by public-only objects\n if bip_obj.IsPublicOnly() and self.m_mnemonic_type == HdWalletElectrumV2MnemonicTypes.SEGWIT:\n raise ValueError(\"Only private extended keys are supported for segwit mnemonic type\")\n\n electrum_obj = self.m_electrum_cls(bip_obj)\n return HdWalletElectrumV2(wallet_name=wallet_name,\n electrum_obj=electrum_obj)", "def create_tsigkey(self, context, tsigkey):\n\n if tsigkey['algorithm'] not in TSIG_SUPPORTED_ALGORITHMS:\n raise exceptions.NotImplemented('Unsupported algorithm')\n\n tsigkey_m = models.TsigKey()\n\n tsigkey_m.update({\n 'designate_id': tsigkey['id'],\n 'name': tsigkey['name'],\n 'algorithm': tsigkey['algorithm'],\n 'secret': base64.b64encode(tsigkey['secret'])\n })\n\n tsigkey_m.save(self.session)\n\n # NOTE(kiall): Prepare and execute query to install this TSIG Key on\n # every domain. We use a manual query here since anything\n # else would be impossibly slow.\n query_select = select([\n models.Domain.__table__.c.id,\n \"'TSIG-ALLOW-AXFR'\",\n \"'%s'\" % tsigkey['name']]\n )\n\n columns = [\n models.DomainMetadata.__table__.c.domain_id,\n models.DomainMetadata.__table__.c.kind,\n models.DomainMetadata.__table__.c.content,\n ]\n\n query = InsertFromSelect(models.DomainMetadata.__table__, query_select,\n columns)\n\n # NOTE(kiall): A TX is required for, at the least, SQLite.\n self.session.begin()\n self.session.execute(query)\n self.session.commit()", "def encrypt_item(table_name, aws_cmk_id):\n index_key = {\"partition_attribute\": {\"S\": \"is this\"}, \"sort_attribute\": {\"N\": \"55\"}}\n plaintext_item = {\n \"example\": {\"S\": \"data\"},\n \"some numbers\": {\"N\": \"99\"},\n \"and some binary\": {\"B\": b\"\\x00\\x01\\x02\"},\n \"leave me\": {\"S\": \"alone\"}, # We want to ignore this attribute\n }\n # Collect all of the attributes that will be encrypted (used later).\n encrypted_attributes = set(plaintext_item.keys())\n encrypted_attributes.remove(\"leave me\")\n # Collect all of the attributes that will not be encrypted (used later).\n unencrypted_attributes = set(index_key.keys())\n unencrypted_attributes.add(\"leave me\")\n # Add the index pairs to the item.\n plaintext_item.update(index_key)\n\n # Create a normal client.\n client = boto3.client(\"dynamodb\")\n # Create a crypto materials provider using the specified AWS KMS key.\n aws_kms_cmp = AwsKmsCryptographicMaterialsProvider(key_id=aws_cmk_id)\n # Create attribute actions that tells the encrypted client to encrypt all attributes except one.\n actions = AttributeActions(\n default_action=CryptoAction.ENCRYPT_AND_SIGN, attribute_actions={\"leave me\": CryptoAction.DO_NOTHING}\n )\n # Use these objects to create an encrypted client.\n encrypted_client = EncryptedClient(client=client, materials_provider=aws_kms_cmp, attribute_actions=actions)\n\n # Put the item to the table, using the encrypted client to transparently encrypt it.\n encrypted_client.put_item(TableName=table_name, Item=plaintext_item)\n\n # Get the encrypted item using the standard client.\n encrypted_item = client.get_item(TableName=table_name, Key=index_key)[\"Item\"]\n\n # Get the item using the encrypted client, transparently decrypting it.\n decrypted_item = encrypted_client.get_item(TableName=table_name, Key=index_key)[\"Item\"]\n\n # Verify that all of the attributes are different in the encrypted item\n for name in encrypted_attributes:\n assert encrypted_item[name] != plaintext_item[name]\n assert decrypted_item[name] == plaintext_item[name]\n\n # Verify that all of the attributes that should not be encrypted were not.\n for name in unencrypted_attributes:\n assert decrypted_item[name] == encrypted_item[name] == plaintext_item[name]\n\n # Clean up the item\n encrypted_client.delete_item(TableName=table_name, Key=index_key)", "def modify_db_instance(DBInstanceIdentifier=None, AllocatedStorage=None, DBInstanceClass=None, DBSubnetGroupName=None, DBSecurityGroups=None, VpcSecurityGroupIds=None, ApplyImmediately=None, MasterUserPassword=None, DBParameterGroupName=None, BackupRetentionPeriod=None, PreferredBackupWindow=None, PreferredMaintenanceWindow=None, MultiAZ=None, EngineVersion=None, AllowMajorVersionUpgrade=None, AutoMinorVersionUpgrade=None, LicenseModel=None, Iops=None, OptionGroupName=None, NewDBInstanceIdentifier=None, StorageType=None, TdeCredentialArn=None, TdeCredentialPassword=None, CACertificateIdentifier=None, Domain=None, CopyTagsToSnapshot=None, MonitoringInterval=None, DBPortNumber=None, PubliclyAccessible=None, MonitoringRoleArn=None, DomainIAMRoleName=None, PromotionTier=None, EnableIAMDatabaseAuthentication=None, EnablePerformanceInsights=None, PerformanceInsightsKMSKeyId=None, PerformanceInsightsRetentionPeriod=None, CloudwatchLogsExportConfiguration=None, ProcessorFeatures=None, UseDefaultProcessorFeatures=None, DeletionProtection=None):\n pass", "def ekey(self):\n if self.security == 'LOW':\n raise Exception('LOW security does not need access to encryption key.')\n if self.__ekey:\n ekey = self.__ekey\n else:\n ekey = self.getuserinput(\n 'Provide kindi secrets encryption key: ',\n 'Cannot instantiate Secrets for MEDIUM and HIGH security if no key is provided'\n )\n if self.security == 'MEDIUM':\n self.__ekey = ekey\n return ekey", "def testEnumerateOnBDE(self):\n test_path = self._getTestDataPath('bdetogo.raw')\n test_evidence = evidence.RawDisk(source_path=test_path)\n\n # Test without credentials\n with self.assertRaises(TurbiniaException):\n partitions.Enumerate(test_evidence)\n\n # Test with bad credentials\n test_evidence.credentials = [('password', 'bde!TEST')]\n with self.assertRaises(TurbiniaException):\n partitions.Enumerate(test_evidence)\n\n # Test with good credentials\n test_evidence.credentials = [('password', 'bde-TEST')]\n path_specs = partitions.Enumerate(test_evidence)\n self.assertEqual(len(path_specs), 2)\n\n # Test GetPartitionEncryptionType\n encryption_type = partitions.GetPartitionEncryptionType(path_specs[0])\n self.assertEqual(encryption_type, 'BDE')", "def tdes(self, pin_block, cur_pin_key, action): # action = True means encryption, False means decryption\t\n\t\tif action: return enCrypt(\"DES3_ECB\", pin_block, cur_pin_key[:__KEYSLEN_3DES])\n\t\telse: return deCrypt(\"DES3_ECB\", pin_block, cur_pin_key[:__KEYSLEN_3DES])", "async def set_database_encryption_key(\n self,\n new_encryption_key: str,\n *,\n request_id: str = None,\n request_timeout: int = None,\n skip_validation: bool = False\n ) -> Ok:\n _constructor = SetDatabaseEncryptionKey.construct if skip_validation else SetDatabaseEncryptionKey\n\n return await self.client.request(\n _constructor(\n new_encryption_key=new_encryption_key,\n ),\n request_id=request_id,\n request_timeout=request_timeout,\n )", "def encryption_key(self) -> typing.Optional[aws_cdk.aws_kms.IKey]:\n ...", "async def check_database_encryption_key(\n self,\n encryption_key: str,\n *,\n request_id: str = None,\n request_timeout: int = None,\n skip_validation: bool = False\n ) -> Ok:\n _constructor = CheckDatabaseEncryptionKey.construct if skip_validation else CheckDatabaseEncryptionKey\n\n return await self.client.request(\n _constructor(\n encryption_key=encryption_key,\n ),\n request_id=request_id,\n request_timeout=request_timeout,\n )", "def hab_encrypted(self) -> bool:\n return self._dek_key is not None", "def get_data_encryption_key(self, account: str) -> str:\n return self._contract.functions.getDataEncryptionKey(account).call()", "def test_encryption_cycle_aes_256_gcm_iv12_tag16_hkdf_sha256_non_framed(self):\n ciphertext, _ = aws_encryption_sdk.encrypt(\n source=VALUES[\"plaintext_128\"],\n key_provider=self.kms_master_key_provider,\n encryption_context=VALUES[\"encryption_context\"],\n frame_length=0,\n algorithm=Algorithm.AES_256_GCM_IV12_TAG16_HKDF_SHA256,\n )\n plaintext, _ = aws_encryption_sdk.decrypt(source=ciphertext, key_provider=self.kms_master_key_provider)\n assert plaintext == VALUES[\"plaintext_128\"]" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Usage When you call the DescribeDBInstanceEncryptionKey operation, the instance must have transparent data encryption (TDE) enabled in BYOK mode. You can call the [ModifyDBInstanceTDE](~~131267~~) operation to enable TDE.
def describe_dbinstance_encryption_key( self, request: dds_20151201_models.DescribeDBInstanceEncryptionKeyRequest, ) -> dds_20151201_models.DescribeDBInstanceEncryptionKeyResponse: runtime = util_models.RuntimeOptions() return self.describe_dbinstance_encryption_key_with_options(request, runtime)
[ "def describe_dbinstance_encryption_key_with_options(\n self,\n request: dds_20151201_models.DescribeDBInstanceEncryptionKeyRequest,\n runtime: util_models.RuntimeOptions,\n ) -> dds_20151201_models.DescribeDBInstanceEncryptionKeyResponse:\n UtilClient.validate_model(request)\n query = {}\n if not UtilClient.is_unset(request.dbinstance_id):\n query['DBInstanceId'] = request.dbinstance_id\n if not UtilClient.is_unset(request.encryption_key):\n query['EncryptionKey'] = request.encryption_key\n if not UtilClient.is_unset(request.owner_account):\n query['OwnerAccount'] = request.owner_account\n if not UtilClient.is_unset(request.owner_id):\n query['OwnerId'] = request.owner_id\n if not UtilClient.is_unset(request.resource_owner_account):\n query['ResourceOwnerAccount'] = request.resource_owner_account\n if not UtilClient.is_unset(request.resource_owner_id):\n query['ResourceOwnerId'] = request.resource_owner_id\n if not UtilClient.is_unset(request.security_token):\n query['SecurityToken'] = request.security_token\n req = open_api_models.OpenApiRequest(\n query=OpenApiUtilClient.query(query)\n )\n params = open_api_models.Params(\n action='DescribeDBInstanceEncryptionKey',\n version='2015-12-01',\n protocol='HTTPS',\n pathname='/',\n method='POST',\n auth_type='AK',\n style='RPC',\n req_body_type='formData',\n body_type='json'\n )\n return TeaCore.from_map(\n dds_20151201_models.DescribeDBInstanceEncryptionKeyResponse(),\n self.call_api(params, req, runtime)\n )", "def testDbKeyRotation(self):\r\n @contextmanager\r\n def _OverrideSecret(secret, secret_value):\r\n try:\r\n old_secret_value = secrets.GetSharedSecretsManager()._secrets[secret]\r\n secrets.GetSharedSecretsManager()._secrets[secret] = secret_value\r\n # Clear the cached crypter.\r\n if hasattr(_CryptValue, '_crypter'):\r\n del _CryptValue._crypter\r\n yield\r\n finally:\r\n secrets.GetSharedSecretsManager()._secrets[secret] = old_secret_value\r\n if hasattr(_CryptValue, '_crypter'):\r\n del _CryptValue._crypter\r\n\r\n # Encrypt a value using the original key.\r\n plaintext = 'quick brown fox'\r\n self._crypt_inst.Set(plaintext)\r\n\r\n # Add a new key to the keyset and make it primary and ensure that plaintext can still be recovered.\r\n writer = keyczar_dict.DictWriter(secrets.GetSharedSecretsManager()._secrets['db_crypt'])\r\n czar = keyczar.GenericKeyczar(keyczar_dict.DictReader(writer.dict))\r\n czar.AddVersion(keyinfo.PRIMARY)\r\n czar.Write(writer)\r\n\r\n with _OverrideSecret('db_crypt', json.dumps(writer.dict)):\r\n self.assertEqual(self._crypt_inst.Get().Decrypt(), plaintext)\r\n\r\n # Now remove old key and verify that plaintext cannot be recovered.\r\n czar.Demote(1)\r\n czar.Revoke(1)\r\n czar.Write(writer)\r\n with _OverrideSecret('db_crypt', json.dumps(writer.dict)):\r\n self.assertRaises(errors.KeyNotFoundError, self._crypt_inst.Get().Decrypt)", "def update_wallet(db, type, data):\n d = data\n kds = BCDataStream()\n vds = BCDataStream()\n\n # Write the type code to the key\n kds.write_string(type)\n vds.write(\"\") # Ensure there is something\n\n try:\n if type == \"tx\":\n raise NotImplementedError(\"Writing items of type 'tx'\")\n kds.write(d['tx_id'])\n #d.update(parse_WalletTx(vds))\n elif type == \"name\":\n kds.write(d['hash'])\n vds.write(d['name'])\n elif type == \"version\":\n vds.write_uint32(d['version'])\n elif type == \"setting\":\n raise NotImplementedError(\"Writing items of type 'setting'\")\n kds.write_string(d['setting'])\n #d['value'] = parse_setting(d['setting'], vds)\n elif type == \"key\":\n kds.write_string(d['public_key'])\n vds.write_string(d['private_key'])\n elif type == \"wkey\":\n kds.write_string(d['public_key'])\n vds.write_string(d['private_key'])\n vds.write_int64(d['created'])\n vds.write_int64(d['expires'])\n vds.write_string(d['comment'])\n elif type == \"ckey\":\n kds.write_string(d['public_key'])\n kds.write_string(d['crypted_key'])\n elif type == \"mkey\":\n kds.write_int32(d['nID'])\n vds.write_string(d['crypted_key'])\n vds.write_string(d['salt'])\n vds.write_int32(d['nDeriveIterations'])\n vds.write_int32(d['nDerivationMethod'])\n vds.write_string(d['vchOtherDerivationParameters'])\n elif type == \"defaultkey\":\n vds.write_string(d['key'])\n elif type == \"pool\":\n kds.write_int64(d['n'])\n vds.write_int32(d['nVersion'])\n vds.write_int64(d['nTime'])\n vds.write_string(d['public_key'])\n elif type == \"acc\":\n kds.write_string(d['account'])\n vds.write_int32(d['nVersion'])\n vds.write_string(d['public_key'])\n elif type == \"acentry\":\n kds.write_string(d['account'])\n kds.write_uint64(d['n'])\n vds.write_int32(d['nVersion'])\n vds.write_int64(d['nCreditDebit'])\n vds.write_int64(d['nTime'])\n vds.write_string(d['otherAccount'])\n vds.write_string(d['comment'])\n elif type == \"bestblock\":\n vds.write_int32(d['nVersion'])\n vds.write_compact_size(len(d['hashes']))\n for h in d['hashes']:\n vds.write(h)\n else:\n print \"Unknown key type: \"+type\n\n # Write the key/value pair to the database\n db.put(kds.input, vds.input)\n\n except Exception, e:\n print(\"ERROR writing to wallet.dat, type %s\"%type)\n print(\"data dictionary: %r\"%data)", "def describe_user_encryption_key_list_with_options(\n self,\n request: dds_20151201_models.DescribeUserEncryptionKeyListRequest,\n runtime: util_models.RuntimeOptions,\n ) -> dds_20151201_models.DescribeUserEncryptionKeyListResponse:\n UtilClient.validate_model(request)\n query = {}\n if not UtilClient.is_unset(request.dbinstance_id):\n query['DBInstanceId'] = request.dbinstance_id\n if not UtilClient.is_unset(request.owner_account):\n query['OwnerAccount'] = request.owner_account\n if not UtilClient.is_unset(request.owner_id):\n query['OwnerId'] = request.owner_id\n if not UtilClient.is_unset(request.resource_owner_account):\n query['ResourceOwnerAccount'] = request.resource_owner_account\n if not UtilClient.is_unset(request.resource_owner_id):\n query['ResourceOwnerId'] = request.resource_owner_id\n if not UtilClient.is_unset(request.security_token):\n query['SecurityToken'] = request.security_token\n if not UtilClient.is_unset(request.target_region_id):\n query['TargetRegionId'] = request.target_region_id\n req = open_api_models.OpenApiRequest(\n query=OpenApiUtilClient.query(query)\n )\n params = open_api_models.Params(\n action='DescribeUserEncryptionKeyList',\n version='2015-12-01',\n protocol='HTTPS',\n pathname='/',\n method='POST',\n auth_type='AK',\n style='RPC',\n req_body_type='formData',\n body_type='json'\n )\n return TeaCore.from_map(\n dds_20151201_models.DescribeUserEncryptionKeyListResponse(),\n self.call_api(params, req, runtime)\n )", "def walletinfo(test_unlock):\n stm = shared_morphene_instance()\n if mph.rpc is not None:\n mph.rpc.rpcconnect() \n t = PrettyTable([\"Key\", \"Value\"])\n t.align = \"l\"\n t.add_row([\"created\", mph.wallet.created()])\n t.add_row([\"locked\", mph.wallet.locked()])\n t.add_row([\"Number of stored keys\", len(mph.wallet.getPublicKeys())])\n t.add_row([\"sql-file\", mph.wallet.keyStorage.sqlDataBaseFile])\n password_storage = mph.config[\"password_storage\"]\n t.add_row([\"password_storage\", password_storage])\n password = os.environ.get(\"UNLOCK\")\n if password is not None:\n t.add_row([\"UNLOCK env set\", \"yes\"])\n else:\n t.add_row([\"UNLOCK env set\", \"no\"])\n if KEYRING_AVAILABLE:\n t.add_row([\"keyring installed\", \"yes\"])\n else:\n t.add_row([\"keyring installed\", \"no\"])\n if test_unlock:\n if unlock_wallet(stm):\n t.add_row([\"Wallet unlock\", \"successful\"])\n else:\n t.add_row([\"Wallet unlock\", \"not working\"])\n # t.add_row([\"getPublicKeys\", str(mph.wallet.getPublicKeys())])\n print(t)", "def _get_sql_db_tde_disabled_event(com, ext):\n friendly_cloud_type = util.friendly_string(com.get('cloud_type'))\n reference = com.get('reference')\n description = (\n '{} SQL DB {} has TDE disabled.'\n .format(friendly_cloud_type, reference)\n )\n recommendation = (\n 'Check {} SQL DB {} and enable TDE.'\n .format(friendly_cloud_type, reference)\n )\n event_record = {\n # Preserve the extended properties from the virtual\n # machine record because they provide useful context to\n # locate the virtual machine that led to the event.\n 'ext': util.merge_dicts(ext, {\n 'record_type': 'sql_db_tde_event'\n }),\n 'com': {\n 'cloud_type': com.get('cloud_type'),\n 'record_type': 'sql_db_tde_event',\n 'reference': reference,\n 'description': description,\n 'recommendation': recommendation,\n }\n }\n\n _log.info('Generating sql_db_tde_event; %r', event_record)\n yield event_record", "def CreateFromExtendedKey(self,\n wallet_name: str,\n ex_key_str: str) -> HdWalletBase:\n try:\n bip_obj = Bip32Secp256k1.FromExtendedKey(ex_key_str)\n except Bip32KeyError as ex:\n raise ValueError(f\"Invalid extended key: {ex_key_str}\") from ex\n\n # Segwit wallet uses hardened derivation, not supported by public-only objects\n if bip_obj.IsPublicOnly() and self.m_mnemonic_type == HdWalletElectrumV2MnemonicTypes.SEGWIT:\n raise ValueError(\"Only private extended keys are supported for segwit mnemonic type\")\n\n electrum_obj = self.m_electrum_cls(bip_obj)\n return HdWalletElectrumV2(wallet_name=wallet_name,\n electrum_obj=electrum_obj)", "def create_tsigkey(self, context, tsigkey):\n\n if tsigkey['algorithm'] not in TSIG_SUPPORTED_ALGORITHMS:\n raise exceptions.NotImplemented('Unsupported algorithm')\n\n tsigkey_m = models.TsigKey()\n\n tsigkey_m.update({\n 'designate_id': tsigkey['id'],\n 'name': tsigkey['name'],\n 'algorithm': tsigkey['algorithm'],\n 'secret': base64.b64encode(tsigkey['secret'])\n })\n\n tsigkey_m.save(self.session)\n\n # NOTE(kiall): Prepare and execute query to install this TSIG Key on\n # every domain. We use a manual query here since anything\n # else would be impossibly slow.\n query_select = select([\n models.Domain.__table__.c.id,\n \"'TSIG-ALLOW-AXFR'\",\n \"'%s'\" % tsigkey['name']]\n )\n\n columns = [\n models.DomainMetadata.__table__.c.domain_id,\n models.DomainMetadata.__table__.c.kind,\n models.DomainMetadata.__table__.c.content,\n ]\n\n query = InsertFromSelect(models.DomainMetadata.__table__, query_select,\n columns)\n\n # NOTE(kiall): A TX is required for, at the least, SQLite.\n self.session.begin()\n self.session.execute(query)\n self.session.commit()", "def encrypt_item(table_name, aws_cmk_id):\n index_key = {\"partition_attribute\": {\"S\": \"is this\"}, \"sort_attribute\": {\"N\": \"55\"}}\n plaintext_item = {\n \"example\": {\"S\": \"data\"},\n \"some numbers\": {\"N\": \"99\"},\n \"and some binary\": {\"B\": b\"\\x00\\x01\\x02\"},\n \"leave me\": {\"S\": \"alone\"}, # We want to ignore this attribute\n }\n # Collect all of the attributes that will be encrypted (used later).\n encrypted_attributes = set(plaintext_item.keys())\n encrypted_attributes.remove(\"leave me\")\n # Collect all of the attributes that will not be encrypted (used later).\n unencrypted_attributes = set(index_key.keys())\n unencrypted_attributes.add(\"leave me\")\n # Add the index pairs to the item.\n plaintext_item.update(index_key)\n\n # Create a normal client.\n client = boto3.client(\"dynamodb\")\n # Create a crypto materials provider using the specified AWS KMS key.\n aws_kms_cmp = AwsKmsCryptographicMaterialsProvider(key_id=aws_cmk_id)\n # Create attribute actions that tells the encrypted client to encrypt all attributes except one.\n actions = AttributeActions(\n default_action=CryptoAction.ENCRYPT_AND_SIGN, attribute_actions={\"leave me\": CryptoAction.DO_NOTHING}\n )\n # Use these objects to create an encrypted client.\n encrypted_client = EncryptedClient(client=client, materials_provider=aws_kms_cmp, attribute_actions=actions)\n\n # Put the item to the table, using the encrypted client to transparently encrypt it.\n encrypted_client.put_item(TableName=table_name, Item=plaintext_item)\n\n # Get the encrypted item using the standard client.\n encrypted_item = client.get_item(TableName=table_name, Key=index_key)[\"Item\"]\n\n # Get the item using the encrypted client, transparently decrypting it.\n decrypted_item = encrypted_client.get_item(TableName=table_name, Key=index_key)[\"Item\"]\n\n # Verify that all of the attributes are different in the encrypted item\n for name in encrypted_attributes:\n assert encrypted_item[name] != plaintext_item[name]\n assert decrypted_item[name] == plaintext_item[name]\n\n # Verify that all of the attributes that should not be encrypted were not.\n for name in unencrypted_attributes:\n assert decrypted_item[name] == encrypted_item[name] == plaintext_item[name]\n\n # Clean up the item\n encrypted_client.delete_item(TableName=table_name, Key=index_key)", "def modify_db_instance(DBInstanceIdentifier=None, AllocatedStorage=None, DBInstanceClass=None, DBSubnetGroupName=None, DBSecurityGroups=None, VpcSecurityGroupIds=None, ApplyImmediately=None, MasterUserPassword=None, DBParameterGroupName=None, BackupRetentionPeriod=None, PreferredBackupWindow=None, PreferredMaintenanceWindow=None, MultiAZ=None, EngineVersion=None, AllowMajorVersionUpgrade=None, AutoMinorVersionUpgrade=None, LicenseModel=None, Iops=None, OptionGroupName=None, NewDBInstanceIdentifier=None, StorageType=None, TdeCredentialArn=None, TdeCredentialPassword=None, CACertificateIdentifier=None, Domain=None, CopyTagsToSnapshot=None, MonitoringInterval=None, DBPortNumber=None, PubliclyAccessible=None, MonitoringRoleArn=None, DomainIAMRoleName=None, PromotionTier=None, EnableIAMDatabaseAuthentication=None, EnablePerformanceInsights=None, PerformanceInsightsKMSKeyId=None, PerformanceInsightsRetentionPeriod=None, CloudwatchLogsExportConfiguration=None, ProcessorFeatures=None, UseDefaultProcessorFeatures=None, DeletionProtection=None):\n pass", "def ekey(self):\n if self.security == 'LOW':\n raise Exception('LOW security does not need access to encryption key.')\n if self.__ekey:\n ekey = self.__ekey\n else:\n ekey = self.getuserinput(\n 'Provide kindi secrets encryption key: ',\n 'Cannot instantiate Secrets for MEDIUM and HIGH security if no key is provided'\n )\n if self.security == 'MEDIUM':\n self.__ekey = ekey\n return ekey", "def testEnumerateOnBDE(self):\n test_path = self._getTestDataPath('bdetogo.raw')\n test_evidence = evidence.RawDisk(source_path=test_path)\n\n # Test without credentials\n with self.assertRaises(TurbiniaException):\n partitions.Enumerate(test_evidence)\n\n # Test with bad credentials\n test_evidence.credentials = [('password', 'bde!TEST')]\n with self.assertRaises(TurbiniaException):\n partitions.Enumerate(test_evidence)\n\n # Test with good credentials\n test_evidence.credentials = [('password', 'bde-TEST')]\n path_specs = partitions.Enumerate(test_evidence)\n self.assertEqual(len(path_specs), 2)\n\n # Test GetPartitionEncryptionType\n encryption_type = partitions.GetPartitionEncryptionType(path_specs[0])\n self.assertEqual(encryption_type, 'BDE')", "def tdes(self, pin_block, cur_pin_key, action): # action = True means encryption, False means decryption\t\n\t\tif action: return enCrypt(\"DES3_ECB\", pin_block, cur_pin_key[:__KEYSLEN_3DES])\n\t\telse: return deCrypt(\"DES3_ECB\", pin_block, cur_pin_key[:__KEYSLEN_3DES])", "async def set_database_encryption_key(\n self,\n new_encryption_key: str,\n *,\n request_id: str = None,\n request_timeout: int = None,\n skip_validation: bool = False\n ) -> Ok:\n _constructor = SetDatabaseEncryptionKey.construct if skip_validation else SetDatabaseEncryptionKey\n\n return await self.client.request(\n _constructor(\n new_encryption_key=new_encryption_key,\n ),\n request_id=request_id,\n request_timeout=request_timeout,\n )", "def encryption_key(self) -> typing.Optional[aws_cdk.aws_kms.IKey]:\n ...", "async def check_database_encryption_key(\n self,\n encryption_key: str,\n *,\n request_id: str = None,\n request_timeout: int = None,\n skip_validation: bool = False\n ) -> Ok:\n _constructor = CheckDatabaseEncryptionKey.construct if skip_validation else CheckDatabaseEncryptionKey\n\n return await self.client.request(\n _constructor(\n encryption_key=encryption_key,\n ),\n request_id=request_id,\n request_timeout=request_timeout,\n )", "def hab_encrypted(self) -> bool:\n return self._dek_key is not None", "def get_data_encryption_key(self, account: str) -> str:\n return self._contract.functions.getDataEncryptionKey(account).call()", "def test_encryption_cycle_aes_256_gcm_iv12_tag16_hkdf_sha256_non_framed(self):\n ciphertext, _ = aws_encryption_sdk.encrypt(\n source=VALUES[\"plaintext_128\"],\n key_provider=self.kms_master_key_provider,\n encryption_context=VALUES[\"encryption_context\"],\n frame_length=0,\n algorithm=Algorithm.AES_256_GCM_IV12_TAG16_HKDF_SHA256,\n )\n plaintext, _ = aws_encryption_sdk.decrypt(source=ciphertext, key_provider=self.kms_master_key_provider)\n assert plaintext == VALUES[\"plaintext_128\"]" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
You can use the custom key obtained by calling the DescribeUserEncryptionKeyList operation to enable TDE. For more information, see [ModifyDBInstanceTDE](~~131267~~).
def describe_user_encryption_key_list_with_options( self, request: dds_20151201_models.DescribeUserEncryptionKeyListRequest, runtime: util_models.RuntimeOptions, ) -> dds_20151201_models.DescribeUserEncryptionKeyListResponse: UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.dbinstance_id): query['DBInstanceId'] = request.dbinstance_id if not UtilClient.is_unset(request.owner_account): query['OwnerAccount'] = request.owner_account if not UtilClient.is_unset(request.owner_id): query['OwnerId'] = request.owner_id if not UtilClient.is_unset(request.resource_owner_account): query['ResourceOwnerAccount'] = request.resource_owner_account if not UtilClient.is_unset(request.resource_owner_id): query['ResourceOwnerId'] = request.resource_owner_id if not UtilClient.is_unset(request.security_token): query['SecurityToken'] = request.security_token if not UtilClient.is_unset(request.target_region_id): query['TargetRegionId'] = request.target_region_id req = open_api_models.OpenApiRequest( query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( action='DescribeUserEncryptionKeyList', version='2015-12-01', protocol='HTTPS', pathname='/', method='POST', auth_type='AK', style='RPC', req_body_type='formData', body_type='json' ) return TeaCore.from_map( dds_20151201_models.DescribeUserEncryptionKeyListResponse(), self.call_api(params, req, runtime) )
[ "def describe_dbinstance_encryption_key_with_options(\n self,\n request: dds_20151201_models.DescribeDBInstanceEncryptionKeyRequest,\n runtime: util_models.RuntimeOptions,\n ) -> dds_20151201_models.DescribeDBInstanceEncryptionKeyResponse:\n UtilClient.validate_model(request)\n query = {}\n if not UtilClient.is_unset(request.dbinstance_id):\n query['DBInstanceId'] = request.dbinstance_id\n if not UtilClient.is_unset(request.encryption_key):\n query['EncryptionKey'] = request.encryption_key\n if not UtilClient.is_unset(request.owner_account):\n query['OwnerAccount'] = request.owner_account\n if not UtilClient.is_unset(request.owner_id):\n query['OwnerId'] = request.owner_id\n if not UtilClient.is_unset(request.resource_owner_account):\n query['ResourceOwnerAccount'] = request.resource_owner_account\n if not UtilClient.is_unset(request.resource_owner_id):\n query['ResourceOwnerId'] = request.resource_owner_id\n if not UtilClient.is_unset(request.security_token):\n query['SecurityToken'] = request.security_token\n req = open_api_models.OpenApiRequest(\n query=OpenApiUtilClient.query(query)\n )\n params = open_api_models.Params(\n action='DescribeDBInstanceEncryptionKey',\n version='2015-12-01',\n protocol='HTTPS',\n pathname='/',\n method='POST',\n auth_type='AK',\n style='RPC',\n req_body_type='formData',\n body_type='json'\n )\n return TeaCore.from_map(\n dds_20151201_models.DescribeDBInstanceEncryptionKeyResponse(),\n self.call_api(params, req, runtime)\n )", "def create_tsigkey(self, context, tsigkey):\n\n if tsigkey['algorithm'] not in TSIG_SUPPORTED_ALGORITHMS:\n raise exceptions.NotImplemented('Unsupported algorithm')\n\n tsigkey_m = models.TsigKey()\n\n tsigkey_m.update({\n 'designate_id': tsigkey['id'],\n 'name': tsigkey['name'],\n 'algorithm': tsigkey['algorithm'],\n 'secret': base64.b64encode(tsigkey['secret'])\n })\n\n tsigkey_m.save(self.session)\n\n # NOTE(kiall): Prepare and execute query to install this TSIG Key on\n # every domain. We use a manual query here since anything\n # else would be impossibly slow.\n query_select = select([\n models.Domain.__table__.c.id,\n \"'TSIG-ALLOW-AXFR'\",\n \"'%s'\" % tsigkey['name']]\n )\n\n columns = [\n models.DomainMetadata.__table__.c.domain_id,\n models.DomainMetadata.__table__.c.kind,\n models.DomainMetadata.__table__.c.content,\n ]\n\n query = InsertFromSelect(models.DomainMetadata.__table__, query_select,\n columns)\n\n # NOTE(kiall): A TX is required for, at the least, SQLite.\n self.session.begin()\n self.session.execute(query)\n self.session.commit()", "def describe_dbinstance_encryption_key(\n self,\n request: dds_20151201_models.DescribeDBInstanceEncryptionKeyRequest,\n ) -> dds_20151201_models.DescribeDBInstanceEncryptionKeyResponse:\n runtime = util_models.RuntimeOptions()\n return self.describe_dbinstance_encryption_key_with_options(request, runtime)", "def generate_ed448_privkey():\n return ed448.Ed448PrivateKey.generate()", "def CreateFromExtendedKey(self,\n wallet_name: str,\n ex_key_str: str) -> HdWalletBase:\n try:\n bip_obj = Bip32Secp256k1.FromExtendedKey(ex_key_str)\n except Bip32KeyError as ex:\n raise ValueError(f\"Invalid extended key: {ex_key_str}\") from ex\n\n # Segwit wallet uses hardened derivation, not supported by public-only objects\n if bip_obj.IsPublicOnly() and self.m_mnemonic_type == HdWalletElectrumV2MnemonicTypes.SEGWIT:\n raise ValueError(\"Only private extended keys are supported for segwit mnemonic type\")\n\n electrum_obj = self.m_electrum_cls(bip_obj)\n return HdWalletElectrumV2(wallet_name=wallet_name,\n electrum_obj=electrum_obj)", "def ekey(self):\n if self.security == 'LOW':\n raise Exception('LOW security does not need access to encryption key.')\n if self.__ekey:\n ekey = self.__ekey\n else:\n ekey = self.getuserinput(\n 'Provide kindi secrets encryption key: ',\n 'Cannot instantiate Secrets for MEDIUM and HIGH security if no key is provided'\n )\n if self.security == 'MEDIUM':\n self.__ekey = ekey\n return ekey", "def _derive_key_iv(nonce, user_key, settings):\n if settings.ticketCipher == \"aes128gcm\":\n prf_name = \"sha256\"\n prf_size = 32\n else:\n prf_name = \"sha384\"\n prf_size = 48\n\n # mix the nonce with the key set by user\n secret = bytearray(prf_size)\n secret = secureHMAC(secret, nonce, prf_name)\n secret = derive_secret(secret, bytearray(b'derived'), None, prf_name)\n secret = secureHMAC(secret, user_key, prf_name)\n\n ticket_secret = derive_secret(secret,\n bytearray(b'SessionTicket secret'),\n None, prf_name)\n\n key = HKDF_expand_label(ticket_secret, b\"key\", b\"\", len(user_key),\n prf_name)\n # all AEADs use 12 byte long IV\n iv = HKDF_expand_label(ticket_secret, b\"iv\", b\"\", 12, prf_name)\n return key, iv", "def _derive_key(\n self, passphrase: str, otp: YubikeyOTP, *args : bytes\n ) -> bytes:\n return self._context_kdf.derive(\n combine_keys(\n passphrase.encode('utf-8'),\n otp.token.private_uid,\n *args\n )\n )", "def update_wallet(db, type, data):\n d = data\n kds = BCDataStream()\n vds = BCDataStream()\n\n # Write the type code to the key\n kds.write_string(type)\n vds.write(\"\") # Ensure there is something\n\n try:\n if type == \"tx\":\n raise NotImplementedError(\"Writing items of type 'tx'\")\n kds.write(d['tx_id'])\n #d.update(parse_WalletTx(vds))\n elif type == \"name\":\n kds.write(d['hash'])\n vds.write(d['name'])\n elif type == \"version\":\n vds.write_uint32(d['version'])\n elif type == \"setting\":\n raise NotImplementedError(\"Writing items of type 'setting'\")\n kds.write_string(d['setting'])\n #d['value'] = parse_setting(d['setting'], vds)\n elif type == \"key\":\n kds.write_string(d['public_key'])\n vds.write_string(d['private_key'])\n elif type == \"wkey\":\n kds.write_string(d['public_key'])\n vds.write_string(d['private_key'])\n vds.write_int64(d['created'])\n vds.write_int64(d['expires'])\n vds.write_string(d['comment'])\n elif type == \"ckey\":\n kds.write_string(d['public_key'])\n kds.write_string(d['crypted_key'])\n elif type == \"mkey\":\n kds.write_int32(d['nID'])\n vds.write_string(d['crypted_key'])\n vds.write_string(d['salt'])\n vds.write_int32(d['nDeriveIterations'])\n vds.write_int32(d['nDerivationMethod'])\n vds.write_string(d['vchOtherDerivationParameters'])\n elif type == \"defaultkey\":\n vds.write_string(d['key'])\n elif type == \"pool\":\n kds.write_int64(d['n'])\n vds.write_int32(d['nVersion'])\n vds.write_int64(d['nTime'])\n vds.write_string(d['public_key'])\n elif type == \"acc\":\n kds.write_string(d['account'])\n vds.write_int32(d['nVersion'])\n vds.write_string(d['public_key'])\n elif type == \"acentry\":\n kds.write_string(d['account'])\n kds.write_uint64(d['n'])\n vds.write_int32(d['nVersion'])\n vds.write_int64(d['nCreditDebit'])\n vds.write_int64(d['nTime'])\n vds.write_string(d['otherAccount'])\n vds.write_string(d['comment'])\n elif type == \"bestblock\":\n vds.write_int32(d['nVersion'])\n vds.write_compact_size(len(d['hashes']))\n for h in d['hashes']:\n vds.write(h)\n else:\n print \"Unknown key type: \"+type\n\n # Write the key/value pair to the database\n db.put(kds.input, vds.input)\n\n except Exception, e:\n print(\"ERROR writing to wallet.dat, type %s\"%type)\n print(\"data dictionary: %r\"%data)", "def update_keys(key, user_name):\n iam_user = iam_manager.deactivate_keys(key, user_name)", "def testDbKeyRotation(self):\r\n @contextmanager\r\n def _OverrideSecret(secret, secret_value):\r\n try:\r\n old_secret_value = secrets.GetSharedSecretsManager()._secrets[secret]\r\n secrets.GetSharedSecretsManager()._secrets[secret] = secret_value\r\n # Clear the cached crypter.\r\n if hasattr(_CryptValue, '_crypter'):\r\n del _CryptValue._crypter\r\n yield\r\n finally:\r\n secrets.GetSharedSecretsManager()._secrets[secret] = old_secret_value\r\n if hasattr(_CryptValue, '_crypter'):\r\n del _CryptValue._crypter\r\n\r\n # Encrypt a value using the original key.\r\n plaintext = 'quick brown fox'\r\n self._crypt_inst.Set(plaintext)\r\n\r\n # Add a new key to the keyset and make it primary and ensure that plaintext can still be recovered.\r\n writer = keyczar_dict.DictWriter(secrets.GetSharedSecretsManager()._secrets['db_crypt'])\r\n czar = keyczar.GenericKeyczar(keyczar_dict.DictReader(writer.dict))\r\n czar.AddVersion(keyinfo.PRIMARY)\r\n czar.Write(writer)\r\n\r\n with _OverrideSecret('db_crypt', json.dumps(writer.dict)):\r\n self.assertEqual(self._crypt_inst.Get().Decrypt(), plaintext)\r\n\r\n # Now remove old key and verify that plaintext cannot be recovered.\r\n czar.Demote(1)\r\n czar.Revoke(1)\r\n czar.Write(writer)\r\n with _OverrideSecret('db_crypt', json.dumps(writer.dict)):\r\n self.assertRaises(errors.KeyNotFoundError, self._crypt_inst.Get().Decrypt)", "def tdes(self, pin_block, cur_pin_key, action): # action = True means encryption, False means decryption\t\n\t\tif action: return enCrypt(\"DES3_ECB\", pin_block, cur_pin_key[:__KEYSLEN_3DES])\n\t\telse: return deCrypt(\"DES3_ECB\", pin_block, cur_pin_key[:__KEYSLEN_3DES])", "def seed_to_privkey(self, seed: str) -> bytes:\n seed_bytes = mnemonic.Mnemonic.to_seed(seed, passphrase=\"\")\n hd_wallet = hdwallets.BIP32.from_seed(seed_bytes)\n # This can raise a `hdwallets.BIP32DerivationError` (which we alias so\n # that the same exception type is also in the `cosmospy` namespace).\n derived_privkey = hd_wallet.get_privkey_from_path(self.derive_path)\n\n self._privkey = derived_privkey\n\n return derived_privkey", "async def generate_wallet_key(self, seed: str = None) -> str:\n return await indy.wallet.generate_wallet_key(seed)", "def _get_sql_db_tde_disabled_event(com, ext):\n friendly_cloud_type = util.friendly_string(com.get('cloud_type'))\n reference = com.get('reference')\n description = (\n '{} SQL DB {} has TDE disabled.'\n .format(friendly_cloud_type, reference)\n )\n recommendation = (\n 'Check {} SQL DB {} and enable TDE.'\n .format(friendly_cloud_type, reference)\n )\n event_record = {\n # Preserve the extended properties from the virtual\n # machine record because they provide useful context to\n # locate the virtual machine that led to the event.\n 'ext': util.merge_dicts(ext, {\n 'record_type': 'sql_db_tde_event'\n }),\n 'com': {\n 'cloud_type': com.get('cloud_type'),\n 'record_type': 'sql_db_tde_event',\n 'reference': reference,\n 'description': description,\n 'recommendation': recommendation,\n }\n }\n\n _log.info('Generating sql_db_tde_event; %r', event_record)\n yield event_record", "def enable_kv2_engine(mount_path=\"kv\"):\n headers = insert_token_in_headers()\n payload = {\n \"type\": \"kv\",\n \"options\": {\n \"version\": \"2\"\n }\n }\n requests.post(VAULT_ADDRESS + VAULT_MNT + mount_path, headers=headers, data=json.dumps(payload), verify=False)", "def get_key(self, user):\n if not user in self.keys:\n return None\n private_key = secfs.crypto.keys[user]\n return secfs.crypto.decrypt(private_key, self.keys[user])", "def save_symmetric_key(self, key, user):\n self.temp_passphrase = key\n self.send_request(user, self.KM_TEMP_KEY_ACK)", "def create_key(self, context, algorithm=None, length=0,\n expiration=None, **kwargs):\n return key.Passphrase(passphrase=kwargs.get('passphrase', ''))" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
This operation is available only for replica set instances that run MongoDB 4.2 or earlier and sharded cluster instances. If you have applied for a public endpoint for the ApsaraDB for MongoDB instance, you must call the [ReleasePublicNetworkAddress](~~67604~~) operation to release the public endpoint before you call the MigrateAvailableZone operation. Transparent data encryption (TDE) is disabled for the ApsaraDB for MongoDB instance. The source zone and the destination zone belong to the same region. A vSwitch is created in the destination zone. This prerequisite must be met if the instance resides in a virtual private cloud (VPC). For more information about how to create a vSwitch, see [Work with vSwitches](~~65387~~).
def migrate_available_zone_with_options( self, request: dds_20151201_models.MigrateAvailableZoneRequest, runtime: util_models.RuntimeOptions, ) -> dds_20151201_models.MigrateAvailableZoneResponse: UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.dbinstance_id): query['DBInstanceId'] = request.dbinstance_id if not UtilClient.is_unset(request.effective_time): query['EffectiveTime'] = request.effective_time if not UtilClient.is_unset(request.owner_account): query['OwnerAccount'] = request.owner_account if not UtilClient.is_unset(request.owner_id): query['OwnerId'] = request.owner_id if not UtilClient.is_unset(request.resource_owner_account): query['ResourceOwnerAccount'] = request.resource_owner_account if not UtilClient.is_unset(request.resource_owner_id): query['ResourceOwnerId'] = request.resource_owner_id if not UtilClient.is_unset(request.vswitch): query['Vswitch'] = request.vswitch if not UtilClient.is_unset(request.zone_id): query['ZoneId'] = request.zone_id req = open_api_models.OpenApiRequest( query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( action='MigrateAvailableZone', version='2015-12-01', protocol='HTTPS', pathname='/', method='POST', auth_type='AK', style='RPC', req_body_type='formData', body_type='json' ) return TeaCore.from_map( dds_20151201_models.MigrateAvailableZoneResponse(), self.call_api(params, req, runtime) )
[ "def migrate_available_zone(\n self,\n request: dds_20151201_models.MigrateAvailableZoneRequest,\n ) -> dds_20151201_models.MigrateAvailableZoneResponse:\n runtime = util_models.RuntimeOptions()\n return self.migrate_available_zone_with_options(request, runtime)", "def test_replace_namespaced_virtual_machine_instance_replica_set(self):\n pass", "def migrate_neutron_database_to_ovn(plugin):\n ctx = n_context.get_admin_context()\n with db_api.CONTEXT_WRITER.using(ctx) as session:\n # Change network type from vxlan geneve\n segments = network_obj.NetworkSegment.get_objects(\n ctx, network_type='vxlan')\n for segment in segments:\n segment.network_type = 'geneve'\n segment.update()\n # Update Geneve allocation for the segment\n session.query(geneveallocation.GeneveAllocation).filter(\n geneveallocation.GeneveAllocation.geneve_vni ==\n segment.segmentation_id).update({\"allocated\": True})\n # Zero Vxlan allocations\n session.query(vxlanallocation.VxlanAllocation).filter(\n vxlanallocation.VxlanAllocation.vxlan_vni ==\n segment.segmentation_id).update({\"allocated\": False})\n\n port_bindings = port_obj.PortBinding.get_objects(\n ctx, vif_type='ovs', vnic_type='normal', status='ACTIVE')\n for pb in port_bindings:\n if not pb.vif_details:\n continue\n vif_details = pb.vif_details.copy()\n for detail in VIF_DETAILS_TO_REMOVE:\n try:\n del vif_details[detail]\n except KeyError:\n pass\n if vif_details != pb.vif_details:\n pb.vif_details = vif_details\n try:\n pb.update()\n except exceptions.ObjectNotFound:\n # When Neutron server is running, it could happen that\n # for example gateway port has been rescheduled to a\n # different gateway chassis.\n pass\n\n for trunk in trunk_obj.Trunk.get_objects(ctx):\n for subport in trunk.sub_ports:\n pbs = port_obj.PortBinding.get_objects(\n ctx, port_id=subport.port_id)\n for pb in pbs:\n profile = {}\n if pb.profile:\n profile = pb.profile.copy()\n profile['parent_name'] = trunk.port_id\n profile['tag'] = subport.segmentation_id\n if profile != pb.profile:\n pb.profile = profile\n pb.update()", "def mongo_upgrade():\n if mongo.init():\n print \"Starting MongoDB migration.\"\n mongo.connect()\n\n # Check for schema version and create it.\n if \"cuckoo_schema\" in mongo.db.collection_names():\n print \"Mongo schema version not expected\"\n sys.exit()\n else:\n mongo.db.cuckoo_schema.save({\"version\": mongo_revision})\n else:\n print \"Mongo reporting module not enabled, skipping mongo migration.\"", "def transfer_from_local_to_remote():\n local_client = MongoClient()\n local_db = local_client['analytical360']\n collections = local_db.list_collection_names()\n\n uname = creds['username']\n pwd = creds['password']\n server = creds['server']\n\n conn_str = 'mongodb+srv://{}:{}@{}.mongodb.net/test?retryWrites=true&w=majority'.format(uname, pwd, server)\n remote_client = MongoClient(conn_str)\n remote_db = remote_client['products']\n\n for c in collections:\n print(c)\n coll = local_db[c]\n for doc in tqdm(list(coll.find())):\n existing = list(remote_db[c].find({**doc}))\n if len(existing) == 0:\n remote_db[c].insert_one(doc)", "def test_1_pdns_zone_action(self):\n name = \"test.example36.com\"\n label = \"us-south\"\n resp = self.zone.list_dnszones(instance_id=self.instance_id)\n assert resp is not None\n assert resp.status_code == 200\n\n # create dns zone\n resp = self.zone.create_dnszone(\n instance_id=self.instance_id, name=name, label=label)\n assert resp is not None\n assert resp.status_code == 200\n assert resp.get_result().get(\"instance_id\") == self.instance_id\n assert resp.get_result().get(\"name\") == name\n assert resp.get_result().get(\"label\") == label\n zone_id = resp.get_result().get(\"id\")\n\n # get dns zone\n resp = self.zone.get_dnszone(\n instance_id=self.instance_id, dnszone_id=zone_id)\n assert resp.status_code == 200\n assert resp.get_result().get(\"instance_id\") == self.instance_id\n assert resp.get_result().get(\"name\") == name\n assert resp.get_result().get(\"label\") == label\n\n # update dns zone\n label = \"us-south-1\"\n desc = \"test instance\"\n resp = self.zone.update_dnszone(\n instance_id=self.instance_id, dnszone_id=zone_id, description=desc, label=label)\n assert resp is not None\n assert resp.status_code == 200\n assert resp.get_result().get(\"instance_id\") == self.instance_id\n assert resp.get_result().get(\"name\") == name\n assert resp.get_result().get(\"label\") == label\n assert resp.get_result().get(\"description\") == desc\n\n # delete dns zone\n resp = self.zone.delete_dnszone(\n instance_id=self.instance_id, dnszone_id=zone_id)\n assert resp is not None\n assert resp.status_code == 204", "def _update_zone_info(self, obj, server):\n if server.availability_zone:\n placement = obj.data.get('placement', None)\n if not placement:\n obj.data['placement'] = {'zone': server.availability_zone}\n else:\n obj.data['placement'].setdefault('zone',\n server.availability_zone)\n # It is safe to use admin context here\n ctx = context.get_admin_context()\n node_obj.Node.update(ctx, obj.id, {'data': obj.data})", "def __init__(self, compute, project,\n network_name,\n subnetwork_name, preserve_external_ip, zone, region,\n instance_group_name):\n super(UnmanagedInstanceGroupMigration, self).__init__()\n self.instance_group = self.build_instance_group()\n self.instance_migration_handlers = []\n self.migration_status = MigrationStatus(0)", "def deploy(instance_id, new_db_id):\n if isinstance(query_db_cluster(instance_id), str):\n cluster_id = query_db_cluster(instance_id)\n try:\n response = RDS.restore_db_cluster_to_point_in_time(\n DBClusterIdentifier=new_db_id,\n SourceDBClusterIdentifier=cluster_id,\n UseLatestRestorableTime=True\n )\n click.secho(response['DBCluster']['DBClusterArn'], fg='green')\n except ClientError as error:\n click.echo(error)\n else:\n db_subnet = query_db_cluster(instance_id)\n try:\n response = RDS.restore_db_instance_to_point_in_time(\n SourceDBInstanceIdentifier=instance_id,\n TargetDBInstanceIdentifier=new_db_id,\n UseLatestRestorableTime=True,\n PubliclyAccessible=False,\n DBSubnetGroupName=db_subnet[1]\n )\n click.secho(response['DBInstance']['DBInstanceArn'], fg='green')\n except ClientError as error:\n click.echo(error)", "def test_replica_auto_balance_zone_best_effort_with_uneven_node_in_zones(client, core_api, volume_name, pod): # NOQA\n\n common.update_setting(client,\n SETTING_REPLICA_NODE_SOFT_ANTI_AFFINITY, \"true\")\n common.update_setting(client,\n SETTING_REPLICA_ZONE_SOFT_ANTI_AFFINITY, \"true\")\n common.update_setting(client,\n SETTING_DEFAULT_DATA_LOCALITY, \"best-effort\")\n common.update_setting(client,\n SETTING_REPLICA_AUTO_BALANCE, \"best-effort\")\n\n n1, n2, n3, n4, n5 = client.list_node()\n\n set_k8s_node_zone_label(core_api, n1.name, ZONE1)\n set_k8s_node_zone_label(core_api, n2.name, ZONE1)\n set_k8s_node_zone_label(core_api, n3.name, ZONE1)\n set_k8s_node_zone_label(core_api, n4.name, ZONE2)\n set_k8s_node_zone_label(core_api, n5.name, ZONE2)\n wait_longhorn_node_zone_updated(client)\n\n client.update(n2, allowScheduling=False)\n client.update(n3, allowScheduling=False)\n client.update(n4, allowScheduling=False)\n client.update(n5, allowScheduling=False)\n\n n_replicas = 4\n volume = create_and_check_volume(client, volume_name,\n num_of_replicas=n_replicas)\n volume.attach(hostId=n1.name)\n\n for _ in range(RETRY_COUNTS):\n n1_r_count = common.get_host_replica_count(\n client, volume_name, n1.name, chk_running=True)\n n2_r_count = common.get_host_replica_count(\n client, volume_name, n2.name, chk_running=False)\n n3_r_count = common.get_host_replica_count(\n client, volume_name, n3.name, chk_running=False)\n n4_r_count = common.get_host_replica_count(\n client, volume_name, n4.name, chk_running=False)\n n5_r_count = common.get_host_replica_count(\n client, volume_name, n5.name, chk_running=False)\n\n if n1_r_count == 4 and \\\n n2_r_count == n3_r_count == n4_r_count == n5_r_count == 0:\n break\n\n time.sleep(RETRY_INTERVAL)\n assert n1_r_count == 4\n assert n2_r_count == 0\n assert n3_r_count == 0\n assert n4_r_count == 0\n assert n5_r_count == 0\n\n client.update(n4, allowScheduling=True)\n\n for _ in range(RETRY_COUNTS):\n z1_r_count = get_zone_replica_count(\n client, volume_name, ZONE1, chk_running=True)\n z2_r_count = get_zone_replica_count(\n client, volume_name, ZONE2, chk_running=True)\n\n if z1_r_count == z2_r_count == 2:\n break\n\n time.sleep(RETRY_INTERVAL)\n\n assert z1_r_count == 2\n assert z2_r_count == 2\n\n client.update(n2, allowScheduling=True)\n client.update(n3, allowScheduling=True)\n\n for _ in range(RETRY_COUNTS):\n n1_r_count = common.get_host_replica_count(\n client, volume_name, n1.name, chk_running=True)\n n2_r_count = common.get_host_replica_count(\n client, volume_name, n2.name, chk_running=True)\n n3_r_count = common.get_host_replica_count(\n client, volume_name, n3.name, chk_running=True)\n n4_r_count = common.get_host_replica_count(\n client, volume_name, n4.name, chk_running=True)\n n5_r_count = common.get_host_replica_count(\n client, volume_name, n5.name, chk_running=False)\n\n if n1_r_count == n2_r_count == n3_r_count == n4_r_count == 1 and \\\n n5_r_count == 0:\n break\n\n time.sleep(RETRY_INTERVAL)\n assert n1_r_count == 1\n assert n2_r_count == 1\n assert n3_r_count == 1\n assert n4_r_count == 1\n assert n5_r_count == 0\n\n client.update(n5, allowScheduling=True)\n\n for _ in range(RETRY_COUNTS):\n z1_r_count = get_zone_replica_count(\n client, volume_name, ZONE1, chk_running=True)\n z2_r_count = get_zone_replica_count(\n client, volume_name, ZONE2, chk_running=True)\n\n if z1_r_count == z2_r_count == 2:\n break\n\n time.sleep(RETRY_INTERVAL)\n\n assert z1_r_count == 2\n assert z2_r_count == 2", "def change_zone_ip(config, section, new_ip):\n\n a_name = config.get(section, \"a_name\")\n apikey = config.get(section, \"apikey\")\n ttl = int(config.get(section, \"ttl\"))\n zone_id = get_zone_id(config, section)\n\n zone_record = {'name': a_name, 'value': new_ip, 'ttl': ttl, 'type': 'A'}\n\n new_zone_ver = api.domain.zone.version.new(apikey, zone_id)\n\n # clear old A record (defaults to previous verison's\n api.domain.zone.record.delete(apikey, zone_id, new_zone_ver,\n {'type': 'A', 'name': a_name})\n\n # Add in new A record\n api.domain.zone.record.add(apikey, zone_id, new_zone_ver, zone_record)\n\n # Set new zone version as the active zone\n api.domain.zone.version.set(apikey, zone_id, new_zone_ver)", "def _initiate_replica_set(host: str, port: int):\n client = MongoClient(host, port)\n try:\n client.admin.command('replSetInitiate')\n except PyMongoError:\n logger.info('Replica set already initiated')\n finally:\n client.close()", "def migrateTo(destination):", "def promote_read_replica_db_cluster(DBClusterIdentifier=None):\n pass", "def copy_mongodb_database(self, dbFromName, dbToName):\r\n\r\n try:\r\n dbFromName = str(dbFromName)\r\n dbToName = str(dbToName)\r\n # print \"| Copy MongoDB Database | %s %s |\" %(dbFromName), (dbToName)\r\n #self._dbconnection.copy_database('%s' % (dbFromName), '%s' % (dbToName))\r\n self._dbconnection.admin.command('copydb',fromdb=dbFromName, todb=dbToName)\r\n finally:\r\n self._dbconnection.end_request()", "def test_patch_namespaced_virtual_machine_instance_replica_set(self):\n pass", "def test_replace_namespaced_replica_set(self):\n pass", "def migrate_instance():\n logger.debug(\"Migration not yet supported.\")", "def test_mongodb_destination(sdc_builder, sdc_executor, mongodb):\n pipeline_builder = sdc_builder.get_pipeline_builder()\n pipeline_builder.add_error_stage('Discard')\n\n dev_raw_data_source = pipeline_builder.add_stage('Dev Raw Data Source')\n dev_raw_data_source.set_attributes(data_format='TEXT', raw_data='\\n'.join(DATA))\n\n expression_evaluator = pipeline_builder.add_stage('Expression Evaluator')\n # MongoDB destination uses the CRUD operation in the sdc.operation.type record header attribute when writing\n # to MongoDB. Value 4 specified below is for UPSERT.\n expression_evaluator.header_attribute_expressions = [{'attributeToSet': 'sdc.operation.type',\n 'headerAttributeExpression': '1'}]\n\n mongodb_dest = pipeline_builder.add_stage('MongoDB', type='destination')\n mongodb_dest.set_attributes(database=get_random_string(ascii_letters, 5),\n collection=get_random_string(ascii_letters, 10))\n # From 3.6.0, unique key field is a list, otherwise single string for older version.\n mongodb_dest.unique_key_field = ['/text'] if Version(sdc_builder.version) >= Version('3.6.0') else '/text'\n\n record_deduplicator = pipeline_builder.add_stage('Record Deduplicator')\n trash = pipeline_builder.add_stage('Trash')\n dev_raw_data_source >> record_deduplicator >> expression_evaluator >> mongodb_dest\n record_deduplicator >> trash\n pipeline = pipeline_builder.build().configure_for_environment(mongodb)\n\n try:\n # Data is generated in dev_raw_data_source and sent to MongoDB using pipeline.\n sdc_executor.add_pipeline(pipeline)\n sdc_executor.start_pipeline(pipeline).wait_for_pipeline_output_records_count(len(DATA))\n sdc_executor.stop_pipeline(pipeline)\n\n # Verify data is received correctly using PyMongo.\n # Similar to writing, while reading data, we specify MongoDB database and the collection inside it.\n logger.info('Verifying docs received with PyMongo...')\n assert [item['text'] for item in mongodb.engine[mongodb_dest.database][mongodb_dest.collection].find()] == DATA\n\n finally:\n logger.info('Dropping %s database...', mongodb_dest.database)\n mongodb.engine.drop_database(mongodb_dest.database)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
This operation is available only for replica set instances that run MongoDB 4.2 or earlier and sharded cluster instances. If you have applied for a public endpoint for the ApsaraDB for MongoDB instance, you must call the [ReleasePublicNetworkAddress](~~67604~~) operation to release the public endpoint before you call the MigrateAvailableZone operation. Transparent data encryption (TDE) is disabled for the ApsaraDB for MongoDB instance. The source zone and the destination zone belong to the same region. A vSwitch is created in the destination zone. This prerequisite must be met if the instance resides in a virtual private cloud (VPC). For more information about how to create a vSwitch, see [Work with vSwitches](~~65387~~).
def migrate_available_zone( self, request: dds_20151201_models.MigrateAvailableZoneRequest, ) -> dds_20151201_models.MigrateAvailableZoneResponse: runtime = util_models.RuntimeOptions() return self.migrate_available_zone_with_options(request, runtime)
[ "def migrate_available_zone_with_options(\n self,\n request: dds_20151201_models.MigrateAvailableZoneRequest,\n runtime: util_models.RuntimeOptions,\n ) -> dds_20151201_models.MigrateAvailableZoneResponse:\n UtilClient.validate_model(request)\n query = {}\n if not UtilClient.is_unset(request.dbinstance_id):\n query['DBInstanceId'] = request.dbinstance_id\n if not UtilClient.is_unset(request.effective_time):\n query['EffectiveTime'] = request.effective_time\n if not UtilClient.is_unset(request.owner_account):\n query['OwnerAccount'] = request.owner_account\n if not UtilClient.is_unset(request.owner_id):\n query['OwnerId'] = request.owner_id\n if not UtilClient.is_unset(request.resource_owner_account):\n query['ResourceOwnerAccount'] = request.resource_owner_account\n if not UtilClient.is_unset(request.resource_owner_id):\n query['ResourceOwnerId'] = request.resource_owner_id\n if not UtilClient.is_unset(request.vswitch):\n query['Vswitch'] = request.vswitch\n if not UtilClient.is_unset(request.zone_id):\n query['ZoneId'] = request.zone_id\n req = open_api_models.OpenApiRequest(\n query=OpenApiUtilClient.query(query)\n )\n params = open_api_models.Params(\n action='MigrateAvailableZone',\n version='2015-12-01',\n protocol='HTTPS',\n pathname='/',\n method='POST',\n auth_type='AK',\n style='RPC',\n req_body_type='formData',\n body_type='json'\n )\n return TeaCore.from_map(\n dds_20151201_models.MigrateAvailableZoneResponse(),\n self.call_api(params, req, runtime)\n )", "def test_replace_namespaced_virtual_machine_instance_replica_set(self):\n pass", "def migrate_neutron_database_to_ovn(plugin):\n ctx = n_context.get_admin_context()\n with db_api.CONTEXT_WRITER.using(ctx) as session:\n # Change network type from vxlan geneve\n segments = network_obj.NetworkSegment.get_objects(\n ctx, network_type='vxlan')\n for segment in segments:\n segment.network_type = 'geneve'\n segment.update()\n # Update Geneve allocation for the segment\n session.query(geneveallocation.GeneveAllocation).filter(\n geneveallocation.GeneveAllocation.geneve_vni ==\n segment.segmentation_id).update({\"allocated\": True})\n # Zero Vxlan allocations\n session.query(vxlanallocation.VxlanAllocation).filter(\n vxlanallocation.VxlanAllocation.vxlan_vni ==\n segment.segmentation_id).update({\"allocated\": False})\n\n port_bindings = port_obj.PortBinding.get_objects(\n ctx, vif_type='ovs', vnic_type='normal', status='ACTIVE')\n for pb in port_bindings:\n if not pb.vif_details:\n continue\n vif_details = pb.vif_details.copy()\n for detail in VIF_DETAILS_TO_REMOVE:\n try:\n del vif_details[detail]\n except KeyError:\n pass\n if vif_details != pb.vif_details:\n pb.vif_details = vif_details\n try:\n pb.update()\n except exceptions.ObjectNotFound:\n # When Neutron server is running, it could happen that\n # for example gateway port has been rescheduled to a\n # different gateway chassis.\n pass\n\n for trunk in trunk_obj.Trunk.get_objects(ctx):\n for subport in trunk.sub_ports:\n pbs = port_obj.PortBinding.get_objects(\n ctx, port_id=subport.port_id)\n for pb in pbs:\n profile = {}\n if pb.profile:\n profile = pb.profile.copy()\n profile['parent_name'] = trunk.port_id\n profile['tag'] = subport.segmentation_id\n if profile != pb.profile:\n pb.profile = profile\n pb.update()", "def mongo_upgrade():\n if mongo.init():\n print \"Starting MongoDB migration.\"\n mongo.connect()\n\n # Check for schema version and create it.\n if \"cuckoo_schema\" in mongo.db.collection_names():\n print \"Mongo schema version not expected\"\n sys.exit()\n else:\n mongo.db.cuckoo_schema.save({\"version\": mongo_revision})\n else:\n print \"Mongo reporting module not enabled, skipping mongo migration.\"", "def transfer_from_local_to_remote():\n local_client = MongoClient()\n local_db = local_client['analytical360']\n collections = local_db.list_collection_names()\n\n uname = creds['username']\n pwd = creds['password']\n server = creds['server']\n\n conn_str = 'mongodb+srv://{}:{}@{}.mongodb.net/test?retryWrites=true&w=majority'.format(uname, pwd, server)\n remote_client = MongoClient(conn_str)\n remote_db = remote_client['products']\n\n for c in collections:\n print(c)\n coll = local_db[c]\n for doc in tqdm(list(coll.find())):\n existing = list(remote_db[c].find({**doc}))\n if len(existing) == 0:\n remote_db[c].insert_one(doc)", "def test_1_pdns_zone_action(self):\n name = \"test.example36.com\"\n label = \"us-south\"\n resp = self.zone.list_dnszones(instance_id=self.instance_id)\n assert resp is not None\n assert resp.status_code == 200\n\n # create dns zone\n resp = self.zone.create_dnszone(\n instance_id=self.instance_id, name=name, label=label)\n assert resp is not None\n assert resp.status_code == 200\n assert resp.get_result().get(\"instance_id\") == self.instance_id\n assert resp.get_result().get(\"name\") == name\n assert resp.get_result().get(\"label\") == label\n zone_id = resp.get_result().get(\"id\")\n\n # get dns zone\n resp = self.zone.get_dnszone(\n instance_id=self.instance_id, dnszone_id=zone_id)\n assert resp.status_code == 200\n assert resp.get_result().get(\"instance_id\") == self.instance_id\n assert resp.get_result().get(\"name\") == name\n assert resp.get_result().get(\"label\") == label\n\n # update dns zone\n label = \"us-south-1\"\n desc = \"test instance\"\n resp = self.zone.update_dnszone(\n instance_id=self.instance_id, dnszone_id=zone_id, description=desc, label=label)\n assert resp is not None\n assert resp.status_code == 200\n assert resp.get_result().get(\"instance_id\") == self.instance_id\n assert resp.get_result().get(\"name\") == name\n assert resp.get_result().get(\"label\") == label\n assert resp.get_result().get(\"description\") == desc\n\n # delete dns zone\n resp = self.zone.delete_dnszone(\n instance_id=self.instance_id, dnszone_id=zone_id)\n assert resp is not None\n assert resp.status_code == 204", "def _update_zone_info(self, obj, server):\n if server.availability_zone:\n placement = obj.data.get('placement', None)\n if not placement:\n obj.data['placement'] = {'zone': server.availability_zone}\n else:\n obj.data['placement'].setdefault('zone',\n server.availability_zone)\n # It is safe to use admin context here\n ctx = context.get_admin_context()\n node_obj.Node.update(ctx, obj.id, {'data': obj.data})", "def __init__(self, compute, project,\n network_name,\n subnetwork_name, preserve_external_ip, zone, region,\n instance_group_name):\n super(UnmanagedInstanceGroupMigration, self).__init__()\n self.instance_group = self.build_instance_group()\n self.instance_migration_handlers = []\n self.migration_status = MigrationStatus(0)", "def deploy(instance_id, new_db_id):\n if isinstance(query_db_cluster(instance_id), str):\n cluster_id = query_db_cluster(instance_id)\n try:\n response = RDS.restore_db_cluster_to_point_in_time(\n DBClusterIdentifier=new_db_id,\n SourceDBClusterIdentifier=cluster_id,\n UseLatestRestorableTime=True\n )\n click.secho(response['DBCluster']['DBClusterArn'], fg='green')\n except ClientError as error:\n click.echo(error)\n else:\n db_subnet = query_db_cluster(instance_id)\n try:\n response = RDS.restore_db_instance_to_point_in_time(\n SourceDBInstanceIdentifier=instance_id,\n TargetDBInstanceIdentifier=new_db_id,\n UseLatestRestorableTime=True,\n PubliclyAccessible=False,\n DBSubnetGroupName=db_subnet[1]\n )\n click.secho(response['DBInstance']['DBInstanceArn'], fg='green')\n except ClientError as error:\n click.echo(error)", "def test_replica_auto_balance_zone_best_effort_with_uneven_node_in_zones(client, core_api, volume_name, pod): # NOQA\n\n common.update_setting(client,\n SETTING_REPLICA_NODE_SOFT_ANTI_AFFINITY, \"true\")\n common.update_setting(client,\n SETTING_REPLICA_ZONE_SOFT_ANTI_AFFINITY, \"true\")\n common.update_setting(client,\n SETTING_DEFAULT_DATA_LOCALITY, \"best-effort\")\n common.update_setting(client,\n SETTING_REPLICA_AUTO_BALANCE, \"best-effort\")\n\n n1, n2, n3, n4, n5 = client.list_node()\n\n set_k8s_node_zone_label(core_api, n1.name, ZONE1)\n set_k8s_node_zone_label(core_api, n2.name, ZONE1)\n set_k8s_node_zone_label(core_api, n3.name, ZONE1)\n set_k8s_node_zone_label(core_api, n4.name, ZONE2)\n set_k8s_node_zone_label(core_api, n5.name, ZONE2)\n wait_longhorn_node_zone_updated(client)\n\n client.update(n2, allowScheduling=False)\n client.update(n3, allowScheduling=False)\n client.update(n4, allowScheduling=False)\n client.update(n5, allowScheduling=False)\n\n n_replicas = 4\n volume = create_and_check_volume(client, volume_name,\n num_of_replicas=n_replicas)\n volume.attach(hostId=n1.name)\n\n for _ in range(RETRY_COUNTS):\n n1_r_count = common.get_host_replica_count(\n client, volume_name, n1.name, chk_running=True)\n n2_r_count = common.get_host_replica_count(\n client, volume_name, n2.name, chk_running=False)\n n3_r_count = common.get_host_replica_count(\n client, volume_name, n3.name, chk_running=False)\n n4_r_count = common.get_host_replica_count(\n client, volume_name, n4.name, chk_running=False)\n n5_r_count = common.get_host_replica_count(\n client, volume_name, n5.name, chk_running=False)\n\n if n1_r_count == 4 and \\\n n2_r_count == n3_r_count == n4_r_count == n5_r_count == 0:\n break\n\n time.sleep(RETRY_INTERVAL)\n assert n1_r_count == 4\n assert n2_r_count == 0\n assert n3_r_count == 0\n assert n4_r_count == 0\n assert n5_r_count == 0\n\n client.update(n4, allowScheduling=True)\n\n for _ in range(RETRY_COUNTS):\n z1_r_count = get_zone_replica_count(\n client, volume_name, ZONE1, chk_running=True)\n z2_r_count = get_zone_replica_count(\n client, volume_name, ZONE2, chk_running=True)\n\n if z1_r_count == z2_r_count == 2:\n break\n\n time.sleep(RETRY_INTERVAL)\n\n assert z1_r_count == 2\n assert z2_r_count == 2\n\n client.update(n2, allowScheduling=True)\n client.update(n3, allowScheduling=True)\n\n for _ in range(RETRY_COUNTS):\n n1_r_count = common.get_host_replica_count(\n client, volume_name, n1.name, chk_running=True)\n n2_r_count = common.get_host_replica_count(\n client, volume_name, n2.name, chk_running=True)\n n3_r_count = common.get_host_replica_count(\n client, volume_name, n3.name, chk_running=True)\n n4_r_count = common.get_host_replica_count(\n client, volume_name, n4.name, chk_running=True)\n n5_r_count = common.get_host_replica_count(\n client, volume_name, n5.name, chk_running=False)\n\n if n1_r_count == n2_r_count == n3_r_count == n4_r_count == 1 and \\\n n5_r_count == 0:\n break\n\n time.sleep(RETRY_INTERVAL)\n assert n1_r_count == 1\n assert n2_r_count == 1\n assert n3_r_count == 1\n assert n4_r_count == 1\n assert n5_r_count == 0\n\n client.update(n5, allowScheduling=True)\n\n for _ in range(RETRY_COUNTS):\n z1_r_count = get_zone_replica_count(\n client, volume_name, ZONE1, chk_running=True)\n z2_r_count = get_zone_replica_count(\n client, volume_name, ZONE2, chk_running=True)\n\n if z1_r_count == z2_r_count == 2:\n break\n\n time.sleep(RETRY_INTERVAL)\n\n assert z1_r_count == 2\n assert z2_r_count == 2", "def change_zone_ip(config, section, new_ip):\n\n a_name = config.get(section, \"a_name\")\n apikey = config.get(section, \"apikey\")\n ttl = int(config.get(section, \"ttl\"))\n zone_id = get_zone_id(config, section)\n\n zone_record = {'name': a_name, 'value': new_ip, 'ttl': ttl, 'type': 'A'}\n\n new_zone_ver = api.domain.zone.version.new(apikey, zone_id)\n\n # clear old A record (defaults to previous verison's\n api.domain.zone.record.delete(apikey, zone_id, new_zone_ver,\n {'type': 'A', 'name': a_name})\n\n # Add in new A record\n api.domain.zone.record.add(apikey, zone_id, new_zone_ver, zone_record)\n\n # Set new zone version as the active zone\n api.domain.zone.version.set(apikey, zone_id, new_zone_ver)", "def _initiate_replica_set(host: str, port: int):\n client = MongoClient(host, port)\n try:\n client.admin.command('replSetInitiate')\n except PyMongoError:\n logger.info('Replica set already initiated')\n finally:\n client.close()", "def migrateTo(destination):", "def promote_read_replica_db_cluster(DBClusterIdentifier=None):\n pass", "def copy_mongodb_database(self, dbFromName, dbToName):\r\n\r\n try:\r\n dbFromName = str(dbFromName)\r\n dbToName = str(dbToName)\r\n # print \"| Copy MongoDB Database | %s %s |\" %(dbFromName), (dbToName)\r\n #self._dbconnection.copy_database('%s' % (dbFromName), '%s' % (dbToName))\r\n self._dbconnection.admin.command('copydb',fromdb=dbFromName, todb=dbToName)\r\n finally:\r\n self._dbconnection.end_request()", "def test_patch_namespaced_virtual_machine_instance_replica_set(self):\n pass", "def test_replace_namespaced_replica_set(self):\n pass", "def migrate_instance():\n logger.debug(\"Migration not yet supported.\")", "def test_mongodb_destination(sdc_builder, sdc_executor, mongodb):\n pipeline_builder = sdc_builder.get_pipeline_builder()\n pipeline_builder.add_error_stage('Discard')\n\n dev_raw_data_source = pipeline_builder.add_stage('Dev Raw Data Source')\n dev_raw_data_source.set_attributes(data_format='TEXT', raw_data='\\n'.join(DATA))\n\n expression_evaluator = pipeline_builder.add_stage('Expression Evaluator')\n # MongoDB destination uses the CRUD operation in the sdc.operation.type record header attribute when writing\n # to MongoDB. Value 4 specified below is for UPSERT.\n expression_evaluator.header_attribute_expressions = [{'attributeToSet': 'sdc.operation.type',\n 'headerAttributeExpression': '1'}]\n\n mongodb_dest = pipeline_builder.add_stage('MongoDB', type='destination')\n mongodb_dest.set_attributes(database=get_random_string(ascii_letters, 5),\n collection=get_random_string(ascii_letters, 10))\n # From 3.6.0, unique key field is a list, otherwise single string for older version.\n mongodb_dest.unique_key_field = ['/text'] if Version(sdc_builder.version) >= Version('3.6.0') else '/text'\n\n record_deduplicator = pipeline_builder.add_stage('Record Deduplicator')\n trash = pipeline_builder.add_stage('Trash')\n dev_raw_data_source >> record_deduplicator >> expression_evaluator >> mongodb_dest\n record_deduplicator >> trash\n pipeline = pipeline_builder.build().configure_for_environment(mongodb)\n\n try:\n # Data is generated in dev_raw_data_source and sent to MongoDB using pipeline.\n sdc_executor.add_pipeline(pipeline)\n sdc_executor.start_pipeline(pipeline).wait_for_pipeline_output_records_count(len(DATA))\n sdc_executor.stop_pipeline(pipeline)\n\n # Verify data is received correctly using PyMongo.\n # Similar to writing, while reading data, we specify MongoDB database and the collection inside it.\n logger.info('Verifying docs received with PyMongo...')\n assert [item['text'] for item in mongodb.engine[mongodb_dest.database][mongodb_dest.collection].find()] == DATA\n\n finally:\n logger.info('Dropping %s database...', mongodb_dest.database)\n mongodb.engine.drop_database(mongodb_dest.database)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Precautions The instance must be in the Running state when you call this operation. If you call this operation to modify specific instance parameters and the modification for part of the parameters can take effect only after an instance restart, the instance is automatically restarted after this operation is called. You can call the [DescribeParameterTemplates](~~67618~~) operation to query the parameters that take effect only after the instance is restarted.
def modify_parameters_with_options( self, request: dds_20151201_models.ModifyParametersRequest, runtime: util_models.RuntimeOptions, ) -> dds_20151201_models.ModifyParametersResponse: UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.character_type): query['CharacterType'] = request.character_type if not UtilClient.is_unset(request.dbinstance_id): query['DBInstanceId'] = request.dbinstance_id if not UtilClient.is_unset(request.node_id): query['NodeId'] = request.node_id if not UtilClient.is_unset(request.owner_account): query['OwnerAccount'] = request.owner_account if not UtilClient.is_unset(request.owner_id): query['OwnerId'] = request.owner_id if not UtilClient.is_unset(request.parameters): query['Parameters'] = request.parameters if not UtilClient.is_unset(request.region_id): query['RegionId'] = request.region_id if not UtilClient.is_unset(request.resource_owner_account): query['ResourceOwnerAccount'] = request.resource_owner_account if not UtilClient.is_unset(request.resource_owner_id): query['ResourceOwnerId'] = request.resource_owner_id if not UtilClient.is_unset(request.security_token): query['SecurityToken'] = request.security_token req = open_api_models.OpenApiRequest( query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( action='ModifyParameters', version='2015-12-01', protocol='HTTPS', pathname='/', method='POST', auth_type='AK', style='RPC', req_body_type='formData', body_type='json' ) return TeaCore.from_map( dds_20151201_models.ModifyParametersResponse(), self.call_api(params, req, runtime) )
[ "def source_instance_params(self) -> 'outputs.SourceInstanceParamsResponse':\n return pulumi.get(self, \"source_instance_params\")", "def _apply_params(self):\n config = self.get_startup_config()\n # Pass true to _set_params so we know these are startup values\n self._set_params(config, True)", "def gen_parameters(self):\n\n print \"\\t* Adding parameters to compute template\"\n # get all the server client\n servers = self.novaclient.servers.list()\n\n # add all key_pair_names\n self.gen_key_name_parameters(servers)\n\n # add all images\n self.gen_image_parameters(servers)\n\n # add all flavors\n self.gen_flavor_parameters(servers)\n\n # add all networks\n self.gen_network_parameters()", "def ApplyRuntimeParameters(self):\n \n if self.models is None or len(self.models) == 0:\n\n if self.verbose:\n\n print \"No model runtime parameters defined\"\n\n return\n\n num_models = len(self.models)\n\n if self.verbose:\n\n print \"Applying model runtime parameters to %d models\" % num_models\n\n for m in self.models:\n\n try:\n \n modelname = m['modelname']\n\n if self.verbose:\n\n print \"\\tSetting runtime parameters for '%s'\" % modelname\n\n\n self.SetModelName(modelname)\n \n if m.has_key('runtime_parameters') and not m['runtime_parameters'] is None:\n \n for parameter in m['runtime_parameters']:\n\n component_name = parameter[0]\n field = parameter[1]\n val = parameter[2]\n\n self.SetParameter(path=component_name, parameter=field, value=val)\n\n except Exception, e:\n\n print e\n\n continue\n\n # Now apply genericly set parameters\n\n if len(self._runtime_parameters) > 0:\n\n if self.verbose:\n\n print \"Applying generically set model runtime parameters\"\n\n \n for p in self._runtime_parameters:\n\n try:\n\n path = p['path'] \n parameter = p['parameter']\n value = p['value']\n service = None if not p.has_key('service') else p['service']\n\n self.SetParameter(path, parameter, value, service)\n \n except Exception, e:\n\n print e\n\n continue", "def generative_parameters(self):\n raise NotImplementedError", "def parameters():\n return render_template(\n 'parameters.html',\n title= \"Pi-Lapse\",\n year=datetime.now().year,\n )", "def set_parameters(self, params):\n self.kp = params.pgain", "def _instantiate_parameter_states(self, context=None):\n\n from PsyNeuLink.Components.States.ParameterState import _instantiate_parameter_states\n _instantiate_parameter_states(owner=self, context=context)", "def postprocess_hyperparams(args, config):\n # apply task-specific overrides\n t = config['data'].current_value # current task name\n for k, v in tasks[t].items():\n assert k in config\n config[k].current_value = v\n\n # configure number of updates (warmup and total)\n config['--warmup-updates'].current_value = int(0.06 * config['--max-update'].current_value)\n config['--total-num-update'].current_value = config['--max-update'].current_value", "def get_resource_params():\n return Parameter.list()", "def _base_troposphere_template(self):\n template = troposphere.Template()\n template.add_parameter(\n troposphere.Parameter(\n \"Stage\",\n Default=\"dev\",\n Description=\"Name of the Stage\",\n Type=\"String\",\n )\n )\n\n template.add_parameter(\n troposphere.Parameter(\n \"Region\",\n Description=\"AWS Region\",\n Type=\"String\",\n )\n )\n return template", "def configure_stp_instance(self, instance, **kwargs):\n pass", "def create_postgresinstance_params():\n return Database.Postgres.Create(\n name=\"SS_NDB_VM_INSTANCE_@@{calm_random}@@\",\n database_parameter_profile=Ref.NutanixDB.Profile.Database_Parameter(\n name=\"DEFAULT_POSTGRES_PARAMS\"\n ),\n initial_database_name=\"@@{initial_database_name}@@\",\n initial_database_password=\"@@{database_password}@@\",\n listener_port=\"5432\",\n size=\"70\",\n )", "def potential_parameters(cls):\n raise NotImplementedError()", "def update_params(self):\r\n parameters = dict()\r\n # Take the first value for all parameters\r\n for key, value in self.total_params.items():\r\n parameters[key] = value[0]\r\n # Update model\r\n self.model = self.inst(random_state=RANDOM_SEED, **parameters)", "def setup_parameters(self):\n structure = self.ctx.structure_initial_primitive\n ecutwfc = []\n ecutrho = []\n\n for kind in structure.get_kind_names():\n try:\n dual = self.ctx.protocol['pseudo_data'][kind]['dual']\n cutoff = self.ctx.protocol['pseudo_data'][kind]['cutoff']\n cutrho = dual * cutoff\n ecutwfc.append(cutoff)\n ecutrho.append(cutrho)\n except KeyError as exception:\n self.abort_nowait('failed to retrieve the cutoff or dual factor for {}'.format(kind))\n\n natoms = len(structure.sites)\n conv_thr = self.ctx.protocol['convergence_threshold'] * natoms\n\n self.ctx.inputs['parameters'] = {\n 'CONTROL': {\n 'restart_mode': 'from_scratch',\n 'tstress': self.ctx.protocol['tstress'],\n },\n 'SYSTEM': {\n 'ecutwfc': max(ecutwfc),\n 'ecutrho': max(ecutrho),\n 'smearing': self.ctx.protocol['smearing'],\n 'degauss': self.ctx.protocol['degauss'],\n 'occupations': self.ctx.protocol['occupations'],\n },\n 'ELECTRONS': {\n 'conv_thr': conv_thr,\n }\n }", "def psfTemplateModel(n, params):\n psf_template = params[\"psf_template\"]\n self.m_psf = psf_template\n print(\"PSF template shape\", np.shape(psf_template))\n dim = int(n)\n m = np.shape(psf_template)[0]\n #if m != dim:\n # raise ValueError(\"PSF template dimension not equal patch size\")\n \n if np.sum(psf_template) != 1:\n print(\"Normalizing PSF template to sum = 1\")\n psf_template = psf_template/np.sum(psf_template) \n return psf_template", "def get_parameter_settings(self):\n return copy.deepcopy(self._project_param_idx)", "def pre_instance_ip_create(self, resource_dict):\n pass" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
This operation can be used to release the internal endpoint of a shard or Configserver node in a sharded cluster instance. For more information, see [Release the endpoint of a shard or Configserver node](~~134067~~). To release the public endpoint of a shard or Configserver node in a sharded cluster instance, you can call the [ReleasePublicNetworkAddress](~~67604~~) operation.
def release_node_private_network_address( self, request: dds_20151201_models.ReleaseNodePrivateNetworkAddressRequest, ) -> dds_20151201_models.ReleaseNodePrivateNetworkAddressResponse: runtime = util_models.RuntimeOptions() return self.release_node_private_network_address_with_options(request, runtime)
[ "def release_public_ips(conn, public_ips_list):\n\n for addr in conn.get_all_addresses(addresses = public_ips_list):\n if addr.instance_id == '':\n addr.release()", "def release_floating_ip(self, req, iface):\n # net_id it is not needed if there is just one port of the VM\n try:\n net_public = self._get_public_network(req)\n except Exception:\n raise exception.NetworkNotFound()\n response = self._remove_floating_ip(req, net_public, iface['ip'])\n\n return response", "def release_eip_address(\n public_ip=None, allocation_id=None, region=None, key=None, keyid=None, profile=None\n):\n if not salt.utils.data.exactly_one((public_ip, allocation_id)):\n raise SaltInvocationError(\n \"Exactly one of 'public_ip' OR 'allocation_id' must be provided\"\n )\n\n conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)\n\n try:\n return conn.release_address(public_ip, allocation_id)\n except boto.exception.BotoServerError as e:\n log.error(e)\n return False", "def delete_db_cluster_endpoint(DBClusterEndpointIdentifier=None):\n pass", "def clear_endpoint(fabric, pod, node, vnid, addr, addr_type=\"ip\", vrf_name=\"\"):\n from . fabric import Fabric\n from . ept.common import parse_vrf_name\n from . ept.common import get_mac_string\n from . ept.common import get_mac_value\n from . ept.ept_vnid import eptVnid\n if isinstance(fabric, Fabric): f = fabric\n else: f = Fabric.load(fabric=fabric)\n\n logger.debug(\"clear endpoint [%s, node:%s, vnid:%s, addr:%s]\", f.fabric, node, vnid, addr)\n if not f.exists():\n logger.warn(\"unknown fabric: %s\", f.fabric)\n return False\n session = get_apic_session(f)\n if session is None:\n logger.warn(\"failed to get apic session for fabric: %s\", f.fabric)\n return False\n ssh = get_ssh_connection(f, pod, node, session=session)\n if ssh is None:\n logger.warn(\"failed to ssh to pod:%s node:%s\", pod, node)\n return False\n\n if addr_type == \"ip\":\n ctype = \"ipv6\" if \":\" in addr else \"ip\"\n if len(vrf_name) == 0:\n # try to determine vrf name from eptVnid table\n v = eptVnid.find(fabric=f.fabric, vnid=vnid)\n if len(v) > 0:\n vrf_name = parse_vrf_name(v[0].name)\n if vrf_name is None:\n logger.warn(\"failed to parse vrf name from ept vnid_name: %s\", v.name)\n return False\n else:\n logger.warn(\"failed to determine vnid_name for fabric: %s, vnid: %s\",f.fabric,vnid)\n return False\n cmd = \"vsh -c 'clear system internal epm endpoint key vrf %s %s %s'\" % (vrf_name,ctype,addr)\n if ssh.cmd(cmd) == \"prompt\":\n logger.debug(\"successfully cleared endpoint: %s\", cmd)\n return True\n else:\n logger.warn(\"failed to execute clear cmd: %s\", cmd)\n return False\n else:\n # first cast mac into correct format\n addr = get_mac_string(get_mac_value(addr),fmt=\"std\")\n \n # to determine mac FD need to first verify mac exists and then use parent dn to query\n # l2BD, vlanCktEp, vxlanCktEp object (good thing here is that we don't care which object \n # type, each will have id attribute that is the PI vlan)\n # here we have two choices, first is APIC epmMacEp query which hits all nodes or, since ssh\n # session is already up, we can execute directly on the leaf. For that later case, it will\n # be easier to use moquery with grep then parsing json with extra terminal characters...\n cmd = \"moquery -c epmMacEp -f 'epm.MacEp.addr==\\\"%s\\\"' | egrep '^dn' | egrep 'vxlan-%s'\"%(\n addr, vnid)\n if ssh.cmd(cmd) == \"prompt\":\n r1 = re.search(\"dn[ ]*:[ ]*(?P<dn>sys/.+)/db-ep\", ssh.output)\n if r1 is not None:\n cmd = \"moquery -d '%s' | egrep '^id'\" % r1.group(\"dn\")\n if ssh.cmd(cmd) == \"prompt\":\n r2 = re.search(\"id[ ]*:[ ]*(?P<pi>[0-9]+)\", ssh.output)\n if r2 is not None:\n cmd = \"vsh -c 'clear system internal epm endpoint key vlan %s mac %s'\" % (\n r2.group(\"pi\"), addr)\n if ssh.cmd(cmd) == \"prompt\":\n logger.debug(\"successfully cleared endpoint: %s\", cmd)\n return True\n else:\n logger.warn(\"failed to execute clear cmd: %s\", cmd)\n return False\n else:\n logger.warn(\"failed to extract pi-vlan id from %s: %s\", r1.group(\"dn\"), \n ssh.output)\n return False\n else:\n logger.warn(\"failed to execute command: %s\", cmd)\n else:\n logger.debug(\"failed to parse bd/cktEp from dn or endpoint not found: %s\",ssh.output)\n # assume parsing was fine and that endpoint is no longer present (so cleared!)\n return True\n else:\n logger.warn(\"failed to execute moquery command to determine mac fd on leaf\")\n return False", "def release_port(self):\n return TFNode.release_port(self)", "def release(self) -> None:\n if not is_local_host(self.location):\n self.api.perform(\"ReleasePort\", portList=self.obj_ref())", "def release_static_ip(staticIpName=None):\n pass", "def disassociate_elastic_ip(ElasticIp=None):\n pass", "def unconfigure_nat64_v4_pool(\n device, \n pool_name, \n start_ipv4_address, \n end_ipv4_address\n):\n cmd = [\"no nat64 v4 pool {} {} {}\".format(pool_name,start_ipv4_address,end_ipv4_address)]\n\n try:\n device.configure(cmd)\n except SubCommandFailure as e:\n log.error(e)\n raise SubCommandFailure(\"Could not unconfigure nat64 v4 pool \")", "def delete_endpoint(EndpointName=None):\n pass", "def unconfigure_nat_pool_address(device, pool_name, start_ip_address, end_ip_address,\n network_mask=None, prefix_length=None, pool_type=None):\n cmd = [f'ip nat pool {pool_name}']\n if network_mask:\n cmd[0] += f' netmask {network_mask}'\n cmd.append(f'no address {start_ip_address} {end_ip_address}')\n elif prefix_length:\n cmd[0] += f' prefix-length {prefix_length}'\n cmd.append(f'no address {start_ip_address} {end_ip_address}')\n if pool_type:\n cmd[0] += f' type {pool_type}'\n try:\n device.configure(cmd)\n except SubCommandFailure as e:\n log.error(e)\n raise SubCommandFailure(\"Could not Unconfigure NAT pool address\")", "def test_delete_host_subnet(self):\n pass", "def remove_ip(enode, portlbl, addr, shell=None):\n assert portlbl\n assert ip_interface(addr)\n port = enode.ports[portlbl]\n\n cmd = 'ip addr del {addr} dev {port}'.format(addr=addr, port=port)\n response = enode(cmd, shell=shell)\n assert not response", "def release_port(internal_port):\n upnp, router = connect()\n mapping = upnp.getspecificportmapping(internal_port, 'UDP')\n\n if mapping is None:\n log.error('could not find a port mapping', router=router)\n return False\n\n if upnp.deleteportmapping(internal_port, 'UDP'):\n log.info('successfully released port mapping', router=router)\n return True\n\n log.warning(\n 'could not release port mapping, check your router for stale mappings',\n router=router,\n )\n return False", "def release_floatingip(compute, project, zone, floatingip):\n address = get_floatingip(compute, project, zone, floatingip)\n for user in address.get('users', []):\n # Parse instance info\n # Eg. /compute/v1/projects/<name>/zones/<zone>/instances/<name>\n\n items = urllib.parse.urlparse(user).path.strip('/').split('/')\n if len(items) < 4 or items[-2] != 'instances':\n LOG.warning(\n _LI('Unknown referrer %s to GCE static IP %s') % (user,\n floatingip))\n continue\n\n instance, zone = items[-1], items[-3]\n instance_info = get_instance(compute, project, zone, instance)\n for interface in instance_info['networkInterfaces']:\n for accessconfig in interface.get('accessConfigs', []):\n if accessconfig.get('natIP') == floatingip:\n LOG.info(\n _LI('Releasing %s from instance %s') % (floatingip,\n instance))\n operation = compute.instances().deleteAccessConfig(\n project=project, zone=zone, instance=instance,\n accessConfig=accessconfig['name'],\n networkInterface=interface['name']).execute()\n wait_for_operation(compute, project, operation)", "def deregister_elastic_ip(ElasticIp=None):\n pass", "def deallocate_ip(self, view, network, ip_address):\n host = objects.FixedAddress.search(self.conn,\n cidr=network,\n ip=ip_address,\n network_view=view)\n if host == []:\n raise Exception('deallocate_ip: IP $s not found' % ip_address)\n\n resp = host.delete()\n return resp", "def release_port_fixed_ip(self, network_id, device_id, subnet_id):\n return self.call(self.context,\n self.make_msg('release_port_fixed_ip',\n network_id=network_id,\n subnet_id=subnet_id,\n device_id=device_id,\n host=self.host),\n topic=self.topic)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
The instance must be in the running state when you call this operation. > The available database versions depend on the storage engine used by the instance. For more information, see [Upgrades of MongoDB major versions](~~398673~~). You can also call the [DescribeAvailableEngineVersion](~~141355~~) operation to query the available database versions. > You cannot downgrade the MongoDB version of an instance after you upgrade it. > The instance is automatically restarted for two to three times during the upgrade process. Make sure that you upgrade the instance during offpeak hours.
def upgrade_dbinstance_engine_version( self, request: dds_20151201_models.UpgradeDBInstanceEngineVersionRequest, ) -> dds_20151201_models.UpgradeDBInstanceEngineVersionResponse: runtime = util_models.RuntimeOptions() return self.upgrade_dbinstance_engine_version_with_options(request, runtime)
[ "def mongo_upgrade():\n if mongo.init():\n print \"Starting MongoDB migration.\"\n mongo.connect()\n\n # Check for schema version and create it.\n if \"cuckoo_schema\" in mongo.db.collection_names():\n print \"Mongo schema version not expected\"\n sys.exit()\n else:\n mongo.db.cuckoo_schema.save({\"version\": mongo_revision})\n else:\n print \"Mongo reporting module not enabled, skipping mongo migration.\"", "def upgrade_db(self):\n self.check_release_requirements()\n self.check_run('upgrade_db')\n\n self.show_step(1, initialize=True)\n self.env.revert_snapshot(\"upgrade_first_cic\")\n if MAKE_SNAPSHOT:\n # some paranoid time sync sequence\n self.env.sync_time([\"admin\"])\n self.env.sync_time()\n self.install_octane()\n\n self.show_step(2)\n seed_cluster_id = self.fuel_web.get_last_created_cluster()\n\n self.upgrade_db_code(seed_cluster_id)\n\n self.env.make_snapshot(\"upgrade_db\")", "def _modify_db(self):\n for pg_engine_version in self.upgrade_path:\n with RDSWaiter(rds_client, self.db_instance_id, pg_engine_version):\n rds_client.modify_db_instance(\n DBInstanceIdentifier=self.db_instance_id,\n EngineVersion=pg_engine_version,\n AllowMajorVersionUpgrade=True,\n ApplyImmediately=True,\n )", "def db_version(self):\n return self._db_version", "def get_db_version():\n return migrate_api.db_version(url=db_url, repository=db_repo)", "def mmo_mongo_version(self, mmo_connection):\n return mmo_connection[\"admin\"].command(\"serverStatus\")[\"version\"]", "def version(self):\r\n print migration.db_version()", "def mongo_available():\n client = MongoClient(serverSelectionTimeoutMS=50)\n try:\n # Can we talk to the Mongo server?\n client.server_info()\n return True\n except:\n # No, we cannot. :(\n return False", "def upgrade_to_1():\n config.db.singletons.insert_one({'_id': 'version', 'database': 1})", "def check_database(self, database_version):\n pass", "def environment_needs_upgrade(self, db):\n cursor = db.cursor()\n return self._get_version(cursor) != db_version", "def AddDatabaseVersion(\n parser, restrict_choices=True, hidden=False, support_default_version=True\n):\n # Section for engine-specific content.\n # This section is auto-generated by //cloud/storage_fe/sql/sync_engines.\n # Do not make manual edits.\n choices = [\n 'MYSQL_5_6',\n 'MYSQL_5_7',\n 'MYSQL_8_0',\n 'POSTGRES_9_6',\n 'POSTGRES_10',\n 'POSTGRES_11',\n 'POSTGRES_12',\n 'POSTGRES_13',\n 'POSTGRES_14',\n 'POSTGRES_15',\n 'SQLSERVER_2017_EXPRESS',\n 'SQLSERVER_2017_WEB',\n 'SQLSERVER_2017_STANDARD',\n 'SQLSERVER_2017_ENTERPRISE',\n 'SQLSERVER_2019_EXPRESS',\n 'SQLSERVER_2019_WEB',\n 'SQLSERVER_2019_STANDARD',\n 'SQLSERVER_2019_ENTERPRISE',\n 'SQLSERVER_2022_EXPRESS',\n 'SQLSERVER_2022_WEB',\n 'SQLSERVER_2022_STANDARD',\n 'SQLSERVER_2022_ENTERPRISE',\n ]\n # End of engine-specific content.\n\n help_text_unspecified_part = (\n DEFAULT_INSTANCE_DATABASE_VERSION + ' is used.'\n if support_default_version\n else 'no changes occur.'\n )\n help_text = (\n 'The database engine type and versions. If left unspecified, '\n + help_text_unspecified_part\n + ' See the list of database versions at '\n + 'https://cloud.google.com/sql/docs/mysql/admin-api/rest/v1beta4/SqlDatabaseVersion.'\n )\n\n if restrict_choices:\n help_text += (\n ' Apart from listed major versions, DATABASE_VERSION also accepts'\n ' supported minor versions.'\n )\n\n parser.add_argument(\n '--database-version',\n required=False,\n default=DEFAULT_INSTANCE_DATABASE_VERSION\n if support_default_version\n else None,\n choices=_MajorVersionMatchList(choices) if restrict_choices else None,\n help=help_text,\n hidden=hidden,\n )", "def safe_upgrade():\n goviewbe.upgrade_db(current_app)", "def env_need_upgrade(dbManager):", "def get_db_instances():\n # Get Engine Version\n eng_version = request.args.get('eng_version')\n if 'aws' not in session:\n return make_json_response(\n status=410,\n success=0,\n errormsg=gettext('Session has not created yet.')\n )\n\n if not eng_version or eng_version == '' or eng_version == 'undefined':\n eng_version = '11.16'\n\n rds_obj = pickle.loads(session['aws']['aws_rds_obj'])\n res = rds_obj.get_available_db_instance_class(\n engine_version=eng_version)\n versions_set = set()\n versions = []\n for value in res:\n versions_set.add(value['DBInstanceClass'])\n\n for value in versions_set:\n versions.append({\n 'label': value,\n 'value': value\n })\n\n return make_json_response(data=versions)", "def need_upgrade_db(self):\n major_ver = 0\n minor_ver = 0\n error_msg, major_ver, minor_ver = self.get_db_version()\n if error_msg:\n logging.error(\"need_upgrade_db: fail to get version info of auth-db, cannot do upgrade\")\n return False\n\n if major_ver != DB_MAJOR_VER or minor_ver != DB_MINOR_VER:\n auth_db_ver = get_version_str(major_ver, minor_ver)\n curr_db_ver = get_version_str(DB_MAJOR_VER, DB_MINOR_VER)\n logging.error(\"version %s in auth-db does not match latest DB version %s\",\n auth_db_ver, curr_db_ver)\n logging.error(\"DB upgrade is not supported. Please remove the DB file at %s. All existing configuration \"\n \"will be removed and need to be recreated after removing the DB file.\", AUTH_DB_PATH)\n return True\n\n return False", "def open_db_connection():\n client = MongoClient() #'104.131.185.191', 27017\n db = client[\"225VOH\"]\n return client, db", "async def _upgrade_db(self) -> None:\n cur_version = await self._get_db_version()\n for n in range(cur_version + 1, sql_data.CUR_VERSION + 1):\n log.msg('Upgrading database to version %d' % n)\n if n in sql_data.SQL_UPGRADES:\n for command in sql_data.SQL_UPGRADES[n]:\n await self.operation(command)\n if cur_version != sql_data.CUR_VERSION:\n await self._set_db_version(sql_data.CUR_VERSION)", "def get_db_versions():\n if 'aws' not in session:\n return make_json_response(\n status=410,\n success=0,\n errormsg=gettext('Session has not created yet.')\n )\n\n rds_obj = pickle.loads(session['aws']['aws_rds_obj'])\n db_versions = rds_obj.get_available_db_version()\n res = list(filter(lambda val: not val['EngineVersion'].startswith('9.6'),\n db_versions['DBEngineVersions']))\n versions = []\n for value in res:\n versions.append({\n 'label': value['DBEngineVersionDescription'],\n 'value': value['EngineVersion']\n })\n\n return make_json_response(data=versions)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
When you call the UpgradeDBInstanceKernelVersion operation, the instance must be in the Running state. > The UpgradeDBInstanceKernelVersion operation is applicable to replica set and sharded cluster instances, but not to standalone instances. > The instance will be restarted once during the upgrade. Call this operation during offpeak hours.
def upgrade_dbinstance_kernel_version( self, request: dds_20151201_models.UpgradeDBInstanceKernelVersionRequest, ) -> dds_20151201_models.UpgradeDBInstanceKernelVersionResponse: runtime = util_models.RuntimeOptions() return self.upgrade_dbinstance_kernel_version_with_options(request, runtime)
[ "async def upgrade_dbinstance_kernel_version_async(\n self,\n request: dds_20151201_models.UpgradeDBInstanceKernelVersionRequest,\n ) -> dds_20151201_models.UpgradeDBInstanceKernelVersionResponse:\n runtime = util_models.RuntimeOptions()\n return await self.upgrade_dbinstance_kernel_version_with_options_async(request, runtime)", "def reboot_db_instance(DBInstanceIdentifier=None, ForceFailover=None):\n pass", "def upgrade_dbinstance_engine_version(\n self,\n request: dds_20151201_models.UpgradeDBInstanceEngineVersionRequest,\n ) -> dds_20151201_models.UpgradeDBInstanceEngineVersionResponse:\n runtime = util_models.RuntimeOptions()\n return self.upgrade_dbinstance_engine_version_with_options(request, runtime)", "def upgrade_kernel(**kwargs):\n execute(\"upgrade_kernel_node\", env.host_string, **kwargs)", "def upgrade_kernel():\n execute(\"upgrade_kernel_node\", env.host_string)", "def update_rds_db_instance(RdsDbInstanceArn=None, DbUser=None, DbPassword=None):\n pass", "def RebootInstance(self, instance):\n raise HypervisorError(\"The chroot manager doesn't implement the\"\n \" reboot functionality\")", "def _modify_db(self):\n for pg_engine_version in self.upgrade_path:\n with RDSWaiter(rds_client, self.db_instance_id, pg_engine_version):\n rds_client.modify_db_instance(\n DBInstanceIdentifier=self.db_instance_id,\n EngineVersion=pg_engine_version,\n AllowMajorVersionUpgrade=True,\n ApplyImmediately=True,\n )", "def modify_db_instance(DBInstanceIdentifier=None, AllocatedStorage=None, DBInstanceClass=None, DBSubnetGroupName=None, DBSecurityGroups=None, VpcSecurityGroupIds=None, ApplyImmediately=None, MasterUserPassword=None, DBParameterGroupName=None, BackupRetentionPeriod=None, PreferredBackupWindow=None, PreferredMaintenanceWindow=None, MultiAZ=None, EngineVersion=None, AllowMajorVersionUpgrade=None, AutoMinorVersionUpgrade=None, LicenseModel=None, Iops=None, OptionGroupName=None, NewDBInstanceIdentifier=None, StorageType=None, TdeCredentialArn=None, TdeCredentialPassword=None, CACertificateIdentifier=None, Domain=None, CopyTagsToSnapshot=None, MonitoringInterval=None, DBPortNumber=None, PubliclyAccessible=None, MonitoringRoleArn=None, DomainIAMRoleName=None, PromotionTier=None, EnableIAMDatabaseAuthentication=None, EnablePerformanceInsights=None, PerformanceInsightsKMSKeyId=None, PerformanceInsightsRetentionPeriod=None, CloudwatchLogsExportConfiguration=None, ProcessorFeatures=None, UseDefaultProcessorFeatures=None, DeletionProtection=None):\n pass", "def upgrade_db(self):\n self.check_release_requirements()\n self.check_run('upgrade_db')\n\n self.show_step(1, initialize=True)\n self.env.revert_snapshot(\"upgrade_first_cic\")\n if MAKE_SNAPSHOT:\n # some paranoid time sync sequence\n self.env.sync_time([\"admin\"])\n self.env.sync_time()\n self.install_octane()\n\n self.show_step(2)\n seed_cluster_id = self.fuel_web.get_last_created_cluster()\n\n self.upgrade_db_code(seed_cluster_id)\n\n self.env.make_snapshot(\"upgrade_db\")", "def restart_kernel(self, **kw):\n raise RuntimeError(\"Cannot restart the kernel. \")", "def update_notebook_instance(NotebookInstanceName=None, InstanceType=None, RoleArn=None, LifecycleConfigName=None, DisassociateLifecycleConfig=None, VolumeSizeInGB=None, DefaultCodeRepository=None, AdditionalCodeRepositories=None, AcceleratorTypes=None, DisassociateAcceleratorTypes=None, DisassociateDefaultCodeRepository=None, DisassociateAdditionalCodeRepositories=None):\n pass", "def reboot_instance(instanceName=None):\n pass", "def kernel_version(self, kernel_version):\n\n self._kernel_version = kernel_version", "def post_is_instance_upgradeable(\n self, response: service.IsInstanceUpgradeableResponse\n ) -> service.IsInstanceUpgradeableResponse:\n return response", "def kernel_version(self):\n return self._kernel_version", "def reboot_instance(InstanceId=None):\n pass", "def upgrade():\n db_api = eon.db.get_api()\n db_api.setup_db_env()\n activated_resource_ids = _get_resource_ids(\n None, db_api, constants.EON_RESOURCE_STATE_ACTIVATED)\n _create_hypervisor_id(None, db_api, activated_resource_ids)\n _introduce_state_for_res_mgr(None, db_api)\n op.drop_index(NAME_COLUMN, table_name=RESOURCE_TABLE)\n [op.drop_table(table) for table in DEPRECATED_TABLES]", "def reboot(self, instance):\n try:\n out, err = utils.execute('sudo', 'vzctl', 'restart',\n instance['id'])\n if err:\n LOG.error(err)\n except ProcessExecutionError:\n raise exception.Error('Failed to restart container: %d' %\n instance['id'])" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
When you call the UpgradeDBInstanceKernelVersion operation, the instance must be in the Running state. > The UpgradeDBInstanceKernelVersion operation is applicable to replica set and sharded cluster instances, but not to standalone instances. > The instance will be restarted once during the upgrade. Call this operation during offpeak hours.
async def upgrade_dbinstance_kernel_version_async( self, request: dds_20151201_models.UpgradeDBInstanceKernelVersionRequest, ) -> dds_20151201_models.UpgradeDBInstanceKernelVersionResponse: runtime = util_models.RuntimeOptions() return await self.upgrade_dbinstance_kernel_version_with_options_async(request, runtime)
[ "def upgrade_dbinstance_kernel_version(\n self,\n request: dds_20151201_models.UpgradeDBInstanceKernelVersionRequest,\n ) -> dds_20151201_models.UpgradeDBInstanceKernelVersionResponse:\n runtime = util_models.RuntimeOptions()\n return self.upgrade_dbinstance_kernel_version_with_options(request, runtime)", "def reboot_db_instance(DBInstanceIdentifier=None, ForceFailover=None):\n pass", "def upgrade_dbinstance_engine_version(\n self,\n request: dds_20151201_models.UpgradeDBInstanceEngineVersionRequest,\n ) -> dds_20151201_models.UpgradeDBInstanceEngineVersionResponse:\n runtime = util_models.RuntimeOptions()\n return self.upgrade_dbinstance_engine_version_with_options(request, runtime)", "def upgrade_kernel(**kwargs):\n execute(\"upgrade_kernel_node\", env.host_string, **kwargs)", "def upgrade_kernel():\n execute(\"upgrade_kernel_node\", env.host_string)", "def update_rds_db_instance(RdsDbInstanceArn=None, DbUser=None, DbPassword=None):\n pass", "def RebootInstance(self, instance):\n raise HypervisorError(\"The chroot manager doesn't implement the\"\n \" reboot functionality\")", "def _modify_db(self):\n for pg_engine_version in self.upgrade_path:\n with RDSWaiter(rds_client, self.db_instance_id, pg_engine_version):\n rds_client.modify_db_instance(\n DBInstanceIdentifier=self.db_instance_id,\n EngineVersion=pg_engine_version,\n AllowMajorVersionUpgrade=True,\n ApplyImmediately=True,\n )", "def modify_db_instance(DBInstanceIdentifier=None, AllocatedStorage=None, DBInstanceClass=None, DBSubnetGroupName=None, DBSecurityGroups=None, VpcSecurityGroupIds=None, ApplyImmediately=None, MasterUserPassword=None, DBParameterGroupName=None, BackupRetentionPeriod=None, PreferredBackupWindow=None, PreferredMaintenanceWindow=None, MultiAZ=None, EngineVersion=None, AllowMajorVersionUpgrade=None, AutoMinorVersionUpgrade=None, LicenseModel=None, Iops=None, OptionGroupName=None, NewDBInstanceIdentifier=None, StorageType=None, TdeCredentialArn=None, TdeCredentialPassword=None, CACertificateIdentifier=None, Domain=None, CopyTagsToSnapshot=None, MonitoringInterval=None, DBPortNumber=None, PubliclyAccessible=None, MonitoringRoleArn=None, DomainIAMRoleName=None, PromotionTier=None, EnableIAMDatabaseAuthentication=None, EnablePerformanceInsights=None, PerformanceInsightsKMSKeyId=None, PerformanceInsightsRetentionPeriod=None, CloudwatchLogsExportConfiguration=None, ProcessorFeatures=None, UseDefaultProcessorFeatures=None, DeletionProtection=None):\n pass", "def upgrade_db(self):\n self.check_release_requirements()\n self.check_run('upgrade_db')\n\n self.show_step(1, initialize=True)\n self.env.revert_snapshot(\"upgrade_first_cic\")\n if MAKE_SNAPSHOT:\n # some paranoid time sync sequence\n self.env.sync_time([\"admin\"])\n self.env.sync_time()\n self.install_octane()\n\n self.show_step(2)\n seed_cluster_id = self.fuel_web.get_last_created_cluster()\n\n self.upgrade_db_code(seed_cluster_id)\n\n self.env.make_snapshot(\"upgrade_db\")", "def restart_kernel(self, **kw):\n raise RuntimeError(\"Cannot restart the kernel. \")", "def update_notebook_instance(NotebookInstanceName=None, InstanceType=None, RoleArn=None, LifecycleConfigName=None, DisassociateLifecycleConfig=None, VolumeSizeInGB=None, DefaultCodeRepository=None, AdditionalCodeRepositories=None, AcceleratorTypes=None, DisassociateAcceleratorTypes=None, DisassociateDefaultCodeRepository=None, DisassociateAdditionalCodeRepositories=None):\n pass", "def reboot_instance(instanceName=None):\n pass", "def kernel_version(self, kernel_version):\n\n self._kernel_version = kernel_version", "def post_is_instance_upgradeable(\n self, response: service.IsInstanceUpgradeableResponse\n ) -> service.IsInstanceUpgradeableResponse:\n return response", "def kernel_version(self):\n return self._kernel_version", "def reboot_instance(InstanceId=None):\n pass", "def upgrade():\n db_api = eon.db.get_api()\n db_api.setup_db_env()\n activated_resource_ids = _get_resource_ids(\n None, db_api, constants.EON_RESOURCE_STATE_ACTIVATED)\n _create_hypervisor_id(None, db_api, activated_resource_ids)\n _introduce_state_for_res_mgr(None, db_api)\n op.drop_index(NAME_COLUMN, table_name=RESOURCE_TABLE)\n [op.drop_table(table) for table in DEPRECATED_TABLES]", "def reboot(self, instance):\n try:\n out, err = utils.execute('sudo', 'vzctl', 'restart',\n instance['id'])\n if err:\n LOG.error(err)\n except ProcessExecutionError:\n raise exception.Error('Failed to restart container: %d' %\n instance['id'])" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Use features and result to train Support Vector Machine
def train(features, result): clf = grid_search(result) clf.fit(features, result) return clf
[ "def training(string):\n print(\"Training...\")\n vec = create_vector(string)\n print(\"Selecting features...\")\n feature_list = select_features(vec)\n print(\"Done!\")\n return feature_list", "def svm():", "def __trainLocal__(self, featureVals, targetVals):\n pass", "def test_svm():\n backend = BasicAer.get_backend('statevector_simulator')\n random_seed = r.randint(1, 10598)\n\n quantum_instance = QuantumInstance(backend, seed=random_seed, seed_transpiler=random_seed)\n\n # iris\n pres = \"Test pour le data set Iris (facile, classique)\"\n test_from_func(pres, 15, 10, 3, True, Iris, quantum_instance)\n\n # breast cancer\n pres = \"Test pour le data set Breast Cancer (facile, classique)\"\n test_from_func(pres, 15, 10, 3, True, Breast_cancer, quantum_instance)\n\n # digits (it's long so be careful)\n #pres = \"Test pour le data set Digits (difficile, classique)\"\n #test_from_func(pres, 10, 10, 10, True, Digits, quantum_instance)\n\n # wine\n pres = \"Test pour le data set Wine (moyen, classique)\"\n test_from_func(pres, 15, 10, 5, True, Wine, quantum_instance)\n\n # gaussian\n pres = \"Test pour des données gaussiennes (moyen, classique)\"\n for _ in range(1):\n print(\"\\n\")\n print(\"New iteration\")\n test_from_func(pres, 25, 10, 2, True, Gaussian, quantum_instance)\n print(\"\\n\")\n\n # small adn strings\n pres = \"Test pour des séquences ADN courtes (difficile, classique)\"\n test_from_func(pres, 10, 15, 14, True, Sequence, quantum_instance)", "def support_vector_machine(x_train, x_test, y_train, y_test, C, gamma):\n clf = svm.SVC(kernel='rbf', C=C, gamma=gamma)\n clf.fit(x_train, y_train)\n\n y_predict = clf.predict(x_test)\n acc = accuracy_score(y_test, y_predict)\n\n print(metrics.confusion_matrix(y_test, y_predict))\n return acc", "def _train(self, feature, label):\n self.mdl.fit(feature, label)", "def gen_features(self, X):", "def learn1_svc():\n \n svc.fit(vector_training,sentiment_training) ##fit the training data of vector tweets and sentiments using LinearSVC\n correct = 0\n for i in range(vector_testing.shape[0]): ##using the testing data, see how accurate LinearSVC is\n prediction = svc.predict(vector_testing[i])\n sentiment = sentiment_testing[i]\n if prediction[0] == sentiment:\n correct +=1\n accuracy = correct/vector_testing.shape[0]\n print('Linear Support Vector Classifier Testing Accuracy: {:.2f}'.format(accuracy)) ##print the accuracy of the algorithm", "def train_svm_model(self, X_train, X_test, y_train, y_test):\r\n clf = Pipeline([('vect', CountVectorizer()), ('tfidf', TfidfTransformer()),\r\n ('clf', LinearSVC())])\r\n clf = clf.fit(X_train, y_train)\r\n pred = clf.predict(X_test)\r\n print('Confusion matrix\\n',confusion_matrix(y_test,pred))\r\n print('Classification_report\\n',classification_report(y_test,pred))\r\n return clf", "def test_svm_classifier_manual_test_set(self):\n\n classname = 'Soluble'\n dataframe = sdf_to_csv(\n self.sdf_file_path, self.fingerprints, class_name_list=classname)\n manual_test_dataframe = sdf_to_csv(\n self.manual_test_file_path, self.fingerprints,\n class_name_list=classname\n )\n classic_classifier = ALGORITHM[TRAINER_CLASS][\n SUPPORT_VECTOR_MACHINE_CLASSIFIER\n ](\n self.sdf_file_path, classname, dataframe, subsample_size=1.0,\n test_set_size=self.test_set_size, seed=0, fptype=self.fingerprints,\n scale='standard', output_path=self.temporary_folder,\n n_split=self.n_split, manual_test_set=manual_test_dataframe\n )\n classic_classifier.train_model(\n CODES[SUPPORT_VECTOR_MACHINE_CLASSIFIER])\n\n metrics = classic_classifier.metrics[\n SUPPORT_VECTOR_MACHINE_CLASSIFIER]['mean']\n true_metrics = {\n ('train', 'AUC'): 0.99,\n ('train', 'ACC'): 0.99,\n ('train', 'f1-score'): 0.99,\n ('train', 'Cohen_Kappa'): 0.95,\n ('train', 'Matthews_corr'): 0.96,\n ('train', 'Precision'): 0.99,\n ('train', 'Recall'): 0.99,\n ('test', 'AUC'): 0.95,\n ('test', 'ACC'): 0.93,\n ('test', 'f1-score'): 0.96,\n ('test', 'Cohen_Kappa'): 0.64,\n ('test', 'Matthews_corr'): 0.66,\n ('test', 'Precision'): 0.93,\n ('test', 'Recall'): 0.98,\n ('validation', 'AUC'): 0.94,\n ('validation', 'ACC'): 0.93,\n ('validation', 'f1-score'): 0.96,\n ('validation', 'Cohen_Kappa'): 0.59,\n ('validation', 'Matthews_corr'): 0.63,\n ('validation', 'Precision'): 0.93,\n ('validation', 'Recall'): 0.99\n }\n\n self.assertDictAlmostEqual(metrics, true_metrics, delta=0.1)", "def train_classifier (classifier, train_vectors, train_labels, test_vectors, test_labels):\n t0 = time.time()\n classifier.fit(train_vectors, train_labels)\n t1 = time.time()\n prediction=classifier.predict(test_vectors)\n t2 = time.time()\n time_train=t1-t0\n time_predict=t2-t1\n accuracy=m.accuracy_score(test_labels,prediction)\n #fear is hardcoded in the next two lines...heads up\n precision=m.precision_score(test_labels,prediction,pos_label='fear',average='micro')\n recall=m.recall_score(test_labels,prediction,pos_label='fear',average='micro')\n print(\"Training Results for \"+str(type(classifier).__name__))\n print(\"Training time: %fs; Prediction time: %fs\" % (time_train, time_predict))\n print(\"Accuracy: {0}, Precision: {1}, Recall:{2}\".format(str(accuracy),str(precision),str(recall)))\n return classifier\n # Create feature vectors", "def train(self, trainingData, trainingLabels, validationData, validationLabels):\n from sklearn import svm\n \n \"*** YOUR CODE HERE ***\"\n self.sklearn_svm = svm.SVC(C=5, kernel='rbf', gamma=0.005, decision_function_shape='ovo')\n self.sklearn_svm.fit(trainingData, trainingLabels)", "def train():\n raise NotImplementedError(\"Train method not implemented\")", "def main():\n parser = argparse.ArgumentParser(\n description='Linear SVM implementation using PyTorch (option for using Logistic regression is also included).')\n parser.add_argument('--num_epochs', type=int, default=20,\n help='Number of epochs for training')\n parser.add_argument('--batch_size', type=int, default=100,\n help='Batch size for training')\n parser.add_argument('--lr', type=float, default=0.001,\n help='Initial learning rate for training')\n parser.add_argument('--c', type=float, default=0.01,\n help='Regularization parameter')\n parser.add_argument('--beta', type=float, default=1.0,\n help='Mixing parameter for Elastic net regularization')\n parser.add_argument('--rg_type', type=str, default='', choices=['L1', 'L2', 'L1L2'],\n help='Regularization type to use: L1 (LASSO), L2 (Ridge), Elastic net (beta*L2 + L1) or None')\n parser.add_argument('--classification_type', type=str, default='svm', choices=['svm', 'logisticR'],\n help='Classification type to use: SVM or Logistic regression')\n parser.add_argument('--num_workers', type=int, default=4,\n help='Number of workers to use in data loading')\n parser.add_argument('--input_size', type=int, default=784,\n help='Number of input size for training and validation dataset')\n parser.add_argument('--fp16', action='store_true',\n help='Use float16 instead of float32, which can save about 50% memory usage without accuracy \\\n drop')\n parser.add_argument('--device', default='cuda', choices=['cpu', 'cuda'])\n args = parser.parse_args()\n args.device = torch.device(args.device if torch.cuda.is_available() else 'cpu')\n\n print(args)\n\n # Use cudnn backend\n if args.device.type == 'cuda':\n cudnn.benchmark = True # This flag allows you to enable the inbuilt cudnn auto-tuner to find the best algorithm\n # to use for your hardware.\n\n # MNIST dataset (images and labels)\n train_dataset = torchvision.datasets.MNIST(root='./data',\n train=True,\n transform=transforms.ToTensor(),\n download=True)\n\n val_dataset = torchvision.datasets.MNIST(root='./data',\n train=False,\n transform=transforms.ToTensor())\n\n # Data loader (input pipeline) i.e. Using torch.utils.data.DataLoader, we can obtain two iterators\n # data_loaders['train'] and data_loaders['val'] to read data (images) and labels.\n train_loader = torch.utils.data.DataLoader(dataset=train_dataset,\n batch_size=args.batch_size,\n shuffle=True)\n\n val_loader = torch.utils.data.DataLoader(dataset=val_dataset,\n batch_size=args.batch_size,\n shuffle=False)\n data_loaders = dict()\n data_loaders['train'] = train_loader\n data_loaders['val'] = val_loader\n\n dataset_sizes = dict()\n dataset_sizes['train'] = len(train_dataset)\n dataset_sizes['val'] = len(val_dataset)\n\n num_classes = len(data_loaders['train'].dataset.classes) # 10 for MNIST\n input_size = train_loader.dataset.data[0].reshape(1, -1).size()[1] # input_size = 28*28 = 784 for MNIST i.e.\n # vectorized the input for fully connected network.\n\n args.input_size = input_size\n\n # Initialized the model to be trained: SVM or Logistic regression\n model = SVM(input_size, num_classes)\n model.to(args.device)\n\n # Loss and optimizer\n if args.classification_type == 'svm':\n criterion = nn.MultiMarginLoss() # Multi-class classification hinge loss (margin-based loss); SVM\n elif args.classification_type == 'logisticR':\n criterion = nn.CrossEntropyLoss() # Cross-entropy loss which computes softmax internally; logistic regression\n\n optimizer = optim.SGD(model.parameters(), lr=args.lr)\n\n # Use fp16. It can be used by simply adding --fp16 i.e. $ python3 SVM_train.py --fp16\n if args.fp16:\n model, optimizer = amp.initialize(model, optimizer, opt_level=\"O1\")\n\n # Decay LR by a factor of 0.1 every 10 epochs\n exp_lr_scheduler = lr_scheduler.StepLR(optimizer, step_size=10, gamma=0.1)\n\n # Train the model\n model = train_model(model, data_loaders, dataset_sizes, criterion, optimizer, exp_lr_scheduler, args)\n\n # Save the model\n torch.save(model.state_dict(), './model/model.pth')\n\n # Save the used args\n with open('./model/used_args.yaml', 'w') as fp:\n yaml.dump(vars(args), fp, default_flow_style=False)", "def svm_clf_training(max_features, data):\r\n X_train, y_train, X_test, y_test = data\r\n clf = Pipeline([('feature_selection', SelectKBest(score_func=chi2, k=max_features)),\r\n ('clf', svm.SVC(C=1., kernel='linear'))])\r\n\r\n vectorizer = CountVectorizer(ngram_range=(1, 2), lowercase=True) # unigrams and bigrams\r\n X_matrix_tr = vectorizer.fit_transform(X_train)\r\n # parameters = [{'clf__kernel': ['linear'], 'clf__C': [0.1, 1, 10, 100]},\r\n # {'clf__kernel': ['rbf'], 'clf__C': [0.1, 1, 10, 100], 'clf__gamma': [0.001, 0.01, 0.1]},\r\n # {'clf__kernel': ['poly'], 'clf__C': [0.1, 1, 10, 100], 'clf__degree': [2, 3, 4, 5]}]\r\n # clf = GridSearchCV(svc, parameters, scoring='accuracy')\r\n clf.fit(X_matrix_tr, y_train)\r\n # print(\"Best parameters set found on development set:\")\r\n # print()\r\n # print(clf.best_estimator_)\r\n # print()\r\n # print(\"Grid scores on development set:\")\r\n # print()\r\n # for params, mean_score, scores in clf.grid_scores_:\r\n # print(\"%0.3f (+/-%0.03f) for %r\"\r\n # % (mean_score, scores.std() / 2, params))\r\n # print()\r\n voc = vectorizer.get_feature_names()\r\n # vectorizer1 = CountVectorizer(ngram_range=(1, 2), lowercase=True, vocabulary=voc)\r\n # X_matrix_val = vectorizer1.fit_transform(X_test)\r\n # y_pred = clf.predict(X_matrix_val)\r\n\r\n # for i in range(len(X_test)):\r\n # if y_test[i] != y_pred[i]:\r\n # print(X_test[i], y_test[i], y_pred[i])\r\n # print(classification_report(y_test, y_pred))\r\n return clf, voc", "def train(self, *args: Any, **kwargs: Any) -> Any:", "def make_features(user_master:SparkDataFrame):\n df = user_master.select([f'feature{i}' for i in range(1,7) ] + [\"user_id\"] )\n cols = df.columns\n\n categoricalColumns = [f'feature{i}' for i in range(1,7)]\n\n stages = []\n for categoricalCol in categoricalColumns:\n stringIndexer = StringIndexer(inputCol = categoricalCol, outputCol = categoricalCol + 'Index')\n encoder = OneHotEncoder(inputCols=[stringIndexer.getOutputCol()], outputCols=[categoricalCol + \"classVec\"])\n stages += [stringIndexer, encoder]\n\n #label_stringIdx = StringIndexer(inputCol = 'item_id', outputCol = 'label')\n #stages += [label_stringIdx]\n\n\n assemblerInputs = [c + \"classVec\" for c in categoricalColumns] \n assembler = VectorAssembler(inputCols=assemblerInputs, outputCol=\"features\")\n stages += [assembler]\n\n \n pipeline = Pipeline(stages = stages)\n pipelineModel = pipeline.fit(df)\n df = pipelineModel.transform(df)\n selectedCols = ['features'] + cols\n df = df.select(selectedCols)\n #df.printSchema()\n\n return df", "def run(task,Model,vec,eval):\n model_dict = {\"LR\":\"Logistic Regression\",\"NB\":\"Naive Bayes\",\"SVC\": \"SVM Classifier\"}\n vec_dict = {\"CV\":\"CountVectorizer\",\"TFIDF\":\"TfidfVectorizer\"}\n task_dict = {\"A\": \"Aggresive Text Classification\",\"B\":\"Gendered Text Classification\"}\n\n data = pd.read_csv(config.TRAIN_FILE)\n data.columns = ['ID','Text','aggressive','gendered']\n\n task_a_labels,task_b_labels = labelencoder()\n\n data = data.replace({'aggressive':task_a_labels,'gendered':task_b_labels})\n \n if task == \"a\":\n target = 'aggressive'\n elif task == \"b\":\n target = 'gendered'\n else:\n raise ValueError(\"Task not found\")\n\n # random state for reproducibility \n x_train,x_test,y_train,y_test = train_test_split(data['Text'],data[target],stratify=data[target],random_state=21) \n\n vect = model_util.vecs[vec]\n vect.fit(x_train)\n\n x_train_cv = vect.transform(x_train)\n x_test_cv = vect.transform(x_test)\n\n model = model_util.models[Model]\n\n model.fit(x_train_cv,y_train)\n\n preds = model.predict(x_test_cv)\n \n report = metrics.classification_report(y_test,preds)\n print(\"\\n\")\n print(model_dict[Model.upper()] + \" with \" + vec_dict[vec.upper()])\n print(\"\\n\")\n print(\"Training Report\")\n print(report)\n \n if eval:\n BaselineEvaluateTest(task,model,vect,eval)", "def train(self, trainingData, trainingLabels, validationData, validationLabels ):\n import sklearn\n from sklearn import svm\n\n \"*** YOUR CODE HERE ***\"\n self.sklearn_classifier = svm.SVC(C=2, gamma=0.025, decision_function_shape='ovo', tol=0.015)\n self.sklearn_classifier.fit(trainingData, trainingLabels)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
When a user asks for a potential_category, rank the possible categories by relevance and return the top match
def find_match(potential_category: str, categories: List[str]): return process.extractOne(potential_category, categories)[0]
[ "def category(category, term):", "def FoodRank(search_term):\n return _ranks[search_term.lower()]", "def get_field(self, summary, category=\"nationality\"):\n known_set = self.sets[category]\n guess = [word\n for word in self.tokenizr.tokenize(summary)\n if word in known_set]\n guess = list(set(guess)) # make sure entries are unique\n result = None\n\n while guess:\n current_guess = guess.pop(0)\n resp = raw_input(\"guessing, accept %s? [y]/n: \" % (current_guess))\n if not resp:\n result = current_guess\n break\n\n if result is None:\n result = self.suggest[category].prompt(\n \"Enter {}: \".format(category))\n\n return result", "def test_category(self):\n # XXX identifiers would be groovy\n self.check_search(\n dict(category=u'36:self'), # trap\n [u'Ingrain'],\n 'simple category search, vs self',\n exact=True,\n )\n self.check_search(\n dict(category=u'14:target'), # protect\n [u'Conversion 2', u'False Swipe'],\n 'simple category search, vs target',\n exact=True,\n )\n\n # Multiple categories\n # sleep OR attack up\n self.check_search(\n dict(category=[u'29:self', u'15:target'], category_operator=u'any'),\n [u'Rest', u'Swagger'],\n 'multiple category search (OR)',\n exact=True,\n )\n\n # sleep AND heal self\n self.check_search(\n dict(category=[u'29:self', u'13:self'], category_operator=u'all'),\n [u'Rest'],\n 'multiple category search (AND)',\n exact=True,\n )", "def get_category_score(comb):\n score = 0\n comb_type = card_mapping[comb][0]['type']\n if comb_type == COMB_TYPE.ROCKET:\n score = 20\n else:\n max_card = card_mapping[comb][0]['main']\n if type(max_card) != int:\n max_card = mapping(max_card)\n if comb_type == COMB_TYPE.PASS:\n score = 0\n elif comb_type in [COMB_TYPE.SOLO, COMB_TYPE.PAIR, COMB_TYPE.TRIO, COMB_TYPE.TRIO_ONE, COMB_TYPE.TRIO_TWO]:\n score = max_card - 10\n elif comb_type in [COMB_TYPE.SEQUENCE, COMB_TYPE.SEQUENCE_TWO]:\n score = max_card - 10 + 1\n elif comb_type == COMB_TYPE.SEQUENCE_THREE:\n score = (max_card - 3 + 1) / 2\n elif comb_type == COMB_TYPE.BOMB:\n score = max_card - 3 + 7\n else:\n print('invalid card type')\n return score", "def run_search(question, category, breed_list):\n category_list = show_options(breed_list, category)\n user_category = user_input_value(question, category_list)\n breed_list = create_breed_list(breed_list, category, user_category)\n return breed_list", "def _look_in_concordance(self, term, concordance):\n\n suggested = dict()\n words = [word.strip(',.:;*').lower() \\\n for word in str(self.tree.item(term)['values'][0]).split(' ')]\n# messagebox.showwarning(\"_look_in_concordance\",\"words={}\".format(words))\n for word in words:\n if word in concordance:\n for item in concordance[word]:\n if item in suggested:\n suggested[item] += 1\n else:\n suggested[item] = 1\n# if word == 'ad':\n# messagebox.showwarning(\"word 'ad' suggested?\",\"suggested={}\".format(suggested))\n# pass\n rank = sorted(suggested, key=suggested.get, reverse=True)\n for item in rank:\n if item not in self.tree.get_children(term):\n self.tree.insert(term,'end', \\\n values=[self.tree.item(item)['values'][0], \\\n self.tree.item(item)['values'][1]],\\\n text='possible', tags=('suggestions',))\n if len(rank) > 0 and self.tree.parent(term) != 'suggestions':\n for child in self.tree.get_children(term):\n self.tree.item(item, tags='suggestions')\n self.tree.item(term, tags='suggestions')\n self.tree.move(term, 'suggestions', 'end')", "def search_categorie(input) :\n j = _jpdb()\n _input = _process_search_input(input)\n if not _input : return None\n f = j.base_format\n q = Query().select(f.categories, f.categories.id, f.categories.name)\n q.where().equal(f.categories.name, _input)\n categorie_data = j.executeQuery(q)\n\n if categorie_data: \n cat_id, cat_name = categorie_data[0]\n examples = _create_examples(j.list_word_by_categorie, cat_name)\n return SelectorResult('categorie', cat_id, cat_name, *examples)", "def find_evaluator(self, category, force_creation=False):\n cat = category.lower()\n for e in self.evaluators:\n if str(e.CATEGORY.lower()) == cat: return e\n for e in self.evaluators: # To find modified categories, ex. '5-Hubs':\n # But note, it picks the first match it finds, so this will be a\n # problem if you've told it to compute two versions of a category.\n if cat in str(e.CATEGORY.lower()):\n print(\"DECISION: Found no category '\" + category + \"'; \" \\\n \"using '\" + e.CATEGORY + \"' instead.\")\n return e\n if force_creation: \n e = Analyst.make_default_evaluator(str(category))\n self.add_evaluators(e, allow_duplicates=False)\n return e\n return None", "def match_category(self):\n\n\t\t# if an admin manually assigned category\n\t\tif self.category:\n\t\t\treturn self.category\n\n\t\tcategories = Category.objects.all()\n\t\tif categories.count() > 0:\n\t\t\t# TODO: need to come up with intelligent algorithm to category iwants\n\t\t\tmatch_cat = random.sample(categories, 1)\n\t\t\tself.category = match_cat[0]\n\t\t\tself.save()\n\t\treturn None", "def best(self, inputted_word, suggestions, word_model=None):\n\n suggestions = list(suggestions)\n\n def comparehamm(one, two):\n score1 = self.hamming_distance(inputted_word, one)\n score2 = self.hamming_distance(inputted_word, two)\n # print(type(score1),\" \", score1, score2)\n # return functools.cmp_to_key(score1, score2) # lower is better\n if score1 < score2:\n return -1\n elif score1 > score2:\n return 1\n else:\n return 0\n\n def comparefreq(one, two):\n score1 = self.frequency(one, word_model)\n score2 = self.frequency(two, word_model)\n # print(\"freq of \", one, \" = \", score1, \"freq of \", two, \" = \", score2)\n # return functools.cmp_to_key(score2, score1) # higher is better\n if score1 < score2:\n return 1\n elif score1 > score2:\n return -1\n else:\n return 0\n\n freq_sorted = sorted(suggestions, key=functools.cmp_to_key(comparefreq))[10:] # take the top 10\n hamming_sorted = sorted(suggestions, key=functools.cmp_to_key(comparehamm))[10:] # take the top 10\n # print('FREQ', freq_sorted)\n # print('HAM', hamming_sorted)\n return ''", "def parse_from_search_page_with_top_category(self, response):\n data_type = response.meta['data_type']\n\n if data_type == 'category':\n original_url = response.meta['original_url']\n pid = response.meta['category_id']\n\n loader = ItemLoader(item=SearchPage(), response=response)\n loader.add_value('platform_code', response.meta['platform_code'])\n\n for s in response.css('div.catid_%s > div.nav-category' % pid):\n\n cat_id = ''\n for s1 in s.css('div.nc-value div.types a'):\n loader.add_value(\n 'category_name', s1.xpath('text()').extract_first())\n loader.add_value(\n 'category_id', s1.xpath('@data-val').extract_first())\n loader.add_value(\n 'parent_category_id',\n s1.xpath('@data-tagid').extract_first())\n loader.add_value(\n 'category_url',\n original_url + '&vcid=%s' % s1.xpath(\n '@data-val').extract_first())\n loader.add_value('category_status', '1')\n loader.add_value('category_level', '3')\n loader.add_value('category_is_leaf', '1')\n cat_id = s1.xpath('@data-tagid').extract_first()\n\n loader.add_value('category_id', cat_id)\n loader.add_value(\n 'category_name',\n s.css('div.nc-key::text').extract_first())\n loader.add_value('parent_category_id', pid)\n loader.add_value('category_url', '')\n loader.add_value('category_status', '1')\n loader.add_value('category_level', '2')\n loader.add_value('category_is_leaf', '0')\n\n loader.add_value(\n 'created_time',\n datetime.datetime.strftime(\n datetime.datetime.today(), '%Y-%m-%d %H:%M:%S'))\n yield loader.load_item()", "def get_category_related(category_name):\n \n\n apk_lst = set()\n logging.info('start')\n # Process category_name into the correct format\n if ' ' in category_name:\n category_name = '_'.join(category_name.split(' '))\n category_name = category_name.upper()\n \n # Visit the main page of the category\n base_url = 'https://play.google.com/store/apps/category/'\n url = base_url + category_name + '?hl=en'\n r = requests.get(url)\n if check_error(r, url):\n return\n data = r.text\n soup = BeautifulSoup(data, 'html.parser')\n\n # Collect urls of all the sub-categories \n link_lst = set()\n for item in soup.find_all('a', class_='title-link id-track-click'):\n link_url = item.get('href').encode('utf-8').strip()\n url = 'https://play.google.com' + link_url\n link_lst.add(url)\n \n # Visit the webpage of each sub-category and collect all package names\n for link_url in link_lst:\n count = 0\n for _ in range(50):\n data = {'start':str(count), 'num':60}\n r = requests.post(link_url, data=data)\n if check_error(r, url):\n break\n \n data = r.text\n soup = BeautifulSoup(data, 'html.parser')\n\n tmp = list()\n for item in soup.find_all('span', class_='preview-overlay-container'):\n tmp.append(item.get('data-docid'))\n old_size = len(apk_lst)\n apk_lst = apk_lst.union(tmp)\n if old_size == len(apk_lst):\n logging.info('sub-category %s done; current size: %d' % (link_url, len(apk_lst)))\n \n break \n count += 60\n logging.info('category %s done; total size: %d' % (category_name, len(apk_lst)))\n return apk_lst", "def search(query):\n r = requests.get(BASE_URL + str(query))\n page_body = r.text\n # Hand the page source to Beautiful Soup\n\n soup = BeautifulSoup(page_body, 'html.parser')\n \n product_item = soup.select('div.product-info-item')\n if(len(product_item)==0):\n product_item = soup.select('div.cat')\n #get the cateegory\n product_item = product_item[0]\n category = str(product_item.find_all('a'))\n category = category[category.find(\">\")+1:-5]\n \n url = str(soup.findAll('meta',property=\"og:url\"))\n url_splitted = url.split('/')\n print(url)\n #parent_category = url_splitted[4]\n if(len(url)>20):\n parent_category = url_splitted[4]\n else:\n parent_category = None\n\n \n return category, parent_category", "def eval_category(self, u_eval, v_compares):\n # get cosinus sim with k-NN\n # cos_sim_results = [(tag, cos_sim) ... ] \n cos_sim_results = self.kNN(u_eval, v_compares)\n\n # found the most common tag\n c = Counter([tag for tag, _ in cos_sim_results])\n try:\n tag, number = c.most_common(1)[0]\n except IndexError: # No result (cos_sim_results is empty)\n logging.error(\"No results for %s %s\" % (u_eval, cos_sim_results))\n return\n\n # get the cosinus similarity average for the founded tag\n average = 0.0\n for _tag, _number in cos_sim_results:\n if _tag == tag:\n average += _number\n average /= number\n\n logging.debug(\"%s common tag %s (nb %s) (av %s)\" % \\\n (u_eval, tag, number, average))\n\n return tag, average", "def category_match(tags):\n\n # Import list of category averages\n categories = {}\n with open(os.path.join(TESTS, 'average_tags.txt'), 'r') as fi:\n categories = json.load(fi)\n fi.close()\n\n # Calculate match for each category\n matches = {}\n for category in categories:\n match = cat_similar(tags, categories[category])\n\n if match:\n matches[category] = match\n\n return matches", "def predict_category(predictions, category='crying_baby', strategy='Once', threshold=0.15):\n return {\n 'Full': TRUE_CAT if len([1 for prediction in predictions\n for cat in [list(prediction.keys())[0]] if cat == category and prediction[category] > threshold\n ]) == len(predictions) else FALSE_CAT,\n 'Half': TRUE_CAT if len([1 for prediction in predictions\n for cat in [list(prediction.keys())[0]] if cat == category and prediction[category] > threshold\n ]) > len(predictions) / 2.0 else FALSE_CAT,\n 'Once': TRUE_CAT if len([1 for prediction in predictions\n for cat in [list(prediction.keys())[0]] if cat == category and prediction[category] > threshold\n ]) >= 1 else FALSE_CAT,\n 'Panic': TRUE_CAT if len([1 for prediction in predictions\n for cat in list(prediction.keys()) if cat == category]) > 0 else FALSE_CAT\n }[strategy]", "def get_player_rankings(self, category):\n pass", "def keyword_classifier(utterance):\n categories = {\n 'hello': ['hi ', 'greetings', 'hello', 'what\\'s up', 'hey ', 'how are you?', 'good morning', 'good night',\n 'good evening', 'good day', 'howdy', 'hi-ya', 'hey ya'],\n 'bye': ['bye', 'cheerio', 'adios', 'sayonara', 'peace out', 'see ya', 'see you', 'c ya', 'c you', 'ciao'],\n 'ack': ['okay', 'whatever', 'ok ', 'o.k. ', 'kay ', 'fine '],\n 'confirm': ['is it', 'is that', 'make sure', 'confirm', 'double check', 'check again', 'does it'],\n 'deny': ['dont want', 'don\\'t want', 'wrong', 'dont like', 'don\\'t like'],\n 'inform': ['dont care', 'don\\'t care', 'whatever', 'bakery', 'bar', 'cafe', 'coffeeshop', 'pub', 'restaurants',\n 'roadhouse', 'african',\n 'american', 'arabian', 'asian', 'international', 'european', 'central american', 'middle eastern',\n 'world', 'vegan', 'vegetarian', 'free', 'kosher', 'traditional', 'fusion', 'modern', 'afghan',\n 'algerian', 'angolan', 'argentine',\n 'austrian', 'australian', 'bangladeshi', 'belarusian', 'belgian', 'bolivian', 'bosnian',\n 'herzegovinian', 'brazilian', 'british', 'bulgarian', 'cambodian',\n 'cameroonian', 'canadian', 'cantonese', 'catalan', 'caribbean', 'chadian', 'chinese', 'colombian',\n 'costa rican', 'czech', 'congolese', 'cuban', 'danish', 'ecuadorian', 'salvadoran', 'emirati',\n 'english', 'eritrean',\n 'estonian',\n 'ethiopian', 'finnish', 'french', 'german', 'ghanaian', 'greek', 'guatemalan', 'dutch', 'honduran',\n 'hungarian', 'icelandic',\n 'indian', 'indonesian', 'iranian', 'iraqi', 'irish', 'israeli', 'italian', 'ivorian', 'jamaican',\n 'japanese',\n 'jordanian', 'kazakh', 'kenyan', 'korean', 'lao', 'latvian', 'lebanese', 'libyan', 'lithuanian',\n 'malagasy', 'malaysian',\n 'mali', 'mauritanian', 'mediterranean', 'mexican', 'moroccan', 'namibian', 'new zealand',\n 'nicaraguan',\n 'nigerien', 'nigerian', 'norwegian', 'omani', 'pakistani', 'panamanian', 'paraguayan', 'peruvian',\n 'persian', 'philippine', 'polynesian', 'polish', 'portuguese', 'romanian', 'russian', 'scottish',\n 'senegalese', 'serbian',\n 'singaporean', 'slovak', 'somalian', 'spanish', 'sudanese', 'swedish', 'swiss', 'syrian', 'thai',\n 'tunisian', 'turkish',\n 'ukranian', 'uruguayan', 'vietnamese', 'welsh', 'zambian', 'zimbabwean', 'west', 'north', 'south',\n 'east', 'part of town', 'moderate', 'expensive', 'cheap', 'any ', 'priced', 'barbecue', 'burger',\n 'chicken',\n 'doughnut', 'fast food',\n 'fish and chips', 'hamburger', 'hot dog', 'ice cream', 'noodles', 'pasta', 'pancake', 'pizza',\n 'ramen', 'restaurant', 'seafood', 'steak',\n 'sandwich', 'sushi'],\n 'negate': ['no ', 'false', 'nope'],\n 'repeat': ['repeat', 'say again', 'what was that'],\n 'reqalts': ['how about', 'what about', 'anything else'],\n 'reqmore': ['more', 'additional information'],\n 'request': ['what', 'whats' 'what\\'s', 'why', 'where', 'when', 'how much', 'may', 'address', 'post code',\n 'location', 'phone number'],\n 'restart': ['reset', 'start over', 'restart'],\n 'thankyou': ['thank you', 'cheers', 'thanks'],\n 'affirm': ['ye ', 'yes', 'right ']\n }\n\n classification = []\n sentence_to_classify = utterance.lower()\n for category, keywords in categories.items():\n keywords_found = [keyword for keyword in keywords if keyword in sentence_to_classify]\n if len(keywords_found) > 0: classification.append(category)\n\n return classification if len(classification) > 0 else ['null']" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Gets the is_bot of this UserBase. A boolean specifying whether the user is a bot or full account.
def is_bot(self): return self._is_bot
[ "def get_user_is_bot(self, user_id):\n if user_id == \"USLACKBOT\": return True # for some reason, Slack doesn't consider Slackbot a real bot\n user_info = self.get_user_info_by_id_cached(user_id)\n return user_info.get(\"is_bot\", False)", "def bot_type(self):\n return self._bot_type", "def is_bot(self) -> bool:", "def is_bot(self, request):\n lowercase_user_agent = request.META.get(\"HTTP_USER_AGENT\", \"\").lower()\n return (\n \"bot\" in lowercase_user_agent or \"spider\" in lowercase_user_agent\n )", "def is_bot(self, is_bot):\n\n self._is_bot = is_bot", "def get_is_current_user(self):\n return self.is_current_user", "def is_default_bot_for_cog_svc_account(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"is_default_bot_for_cog_svc_account\")", "def is_active(self):\n return self.user.is_active", "def is_active(self): # pragma: no cover\n return self.user.is_active", "def is_active(self):\n return self.status == ACTIVE_USER", "def is_admin(self):\n if self.user is None:\n return False\n if unicode(self.user._id) in self.barcamp.admins:\n return True\n if self.user.is_admin:\n return True\n return False", "def isBoolean(self):\r\n return self._wrap(type(self.obj) is bool)", "def getTweetVerified(self):\n return self.isVerified", "def _get_user_active_status(self, user):\n return user.is_active or is_account_activation_requirement_disabled()", "def is_auth(self):\n auth = False\n if self._session_id:\n auth = True\n elif self._username and self._password:\n auth = True\n return auth", "def is_bot(environ, is_bot_ua=is_bot_ua):\n if is_bot_ua(environ.get('HTTP_USER_AGENT', '')):\n return True\n if 'HTTP_ACCEPT_LANGUAGE' not in environ:\n return True\n return False", "def is_admin(self):\n if not self.current_user:\n return False\n else:\n return self.current_user in [\"1\"]", "def is_user_business(user):\n return user.groups.filter(name=settings.BUSINESS_USER).exists()", "async def isadmin(self, ctx: Context, user: Union[discord.Member, discord.User]=None):\n user = user or ctx.author\n\n if user.id in config.bot_owners:\n return await ctx.success(f\"Yes! {user} is an bot's admin.\")\n\n return await ctx.failure(f\"No! {user} is not an bot's admin.\")" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the is_bot of this UserBase. A boolean specifying whether the user is a bot or full account.
def is_bot(self, is_bot): self._is_bot = is_bot
[ "def is_bot(self):\n return self._is_bot", "def get_user_is_bot(self, user_id):\n if user_id == \"USLACKBOT\": return True # for some reason, Slack doesn't consider Slackbot a real bot\n user_info = self.get_user_info_by_id_cached(user_id)\n return user_info.get(\"is_bot\", False)", "def bot_type(self, bot_type):\n\n self._bot_type = bot_type", "async def _togglebot(self, ctx):\n user= ctx.message.author\n if self.settings[\"BOT_SETTINGS\"][\"ENABLED\"]:\n self.settings[\"BOT_SETTINGS\"][\"ENABLED\"] = False\n allowBot = \"Disabled\"\n elif not self.settings[\"BOT_SETTINGS\"][\"ENABLED\"]:\n self.settings[\"BOT_SETTINGS\"][\"ENABLED\"] = True\n allowBot = \"Enabled\"\n await self.bot.say(\"`Work in progress. Be aware that enabling and using this may cause strange behaviour`\")#deleteme\n await self.bot.say(\"{} ` The in-game bot is now: {}.`\".format(user.mention, allowBot))\n logger.info(\"{}({}) has {} the in-game bot.\".format(user, user.id, allowBot.upper()))\n fileIO(SETTINGS, \"save\", self.settings)", "def set_is_current_user(self, is_current_user):\n self.is_current_user = is_current_user", "def is_bot(self) -> bool:", "def bot_type(self):\n return self._bot_type", "def is_business(self, is_business):\n\n self._is_business = is_business", "def is_logical(self, is_logical):\n self._is_logical = is_logical", "def set_is_org_active(self, is_org_active):\n self.is_org_active = is_org_active", "async def set_bot():\n\n self = await LOCAL.APP.get_me()\n LOCAL.bot_id = self.id\n LOCAL.bot_name = self.first_name\n LOCAL.bot_username = self.username", "def is_admin(self, is_admin):\n\n self._is_admin = is_admin", "def is_bot(self, request):\n lowercase_user_agent = request.META.get(\"HTTP_USER_AGENT\", \"\").lower()\n return (\n \"bot\" in lowercase_user_agent or \"spider\" in lowercase_user_agent\n )", "def setBooleanType(self, booleanType: cern.japc.value.BooleanType) -> None:\n ...", "def is_default_bot_for_cog_svc_account(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"is_default_bot_for_cog_svc_account\")", "async def create_bot_bans(self):\n for user in await self.ex.sql.s_general.fetch_bot_bans():\n user_id = user[0]\n user_obj = await self.ex.get_user(user_id)\n user_obj.bot_banned = True", "def is_admin(self, is_admin: bool):\n if is_admin is None:\n raise ValueError(\"Invalid value for `is_admin`, must not be `None`\") # noqa: E501\n\n self._is_admin = is_admin", "def boolean(self, boolean):\n\n self._boolean = boolean", "def setValue(self, trueOrFalse):\r\n self.value = trueOrFalse" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Gets the avatar_url of this UserBase.
def avatar_url(self): return self._avatar_url
[ "def get_user_avatar_url(self):\n if self.user_id:\n return self.session.project_client.service.getUserAvatarURL(\n self.user_id)\n else:\n raise PyleroLibException(\"The user object is empty\")", "def get_avatar_url(self):\n return self.get_profile_pic(pic_type='square')", "def avatar_url(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"avatar_url\")", "def get_avatar_url(self):\n\n if not self.avatar_uuid:\n return None\n return '%s/%s' % (\n settings.MEDIA_SERVER_BASE_URL,\n self.avatar_uuid\n )", "def avatar_uri(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"avatar_uri\")", "def avatar(self):\n # This is lazy loaded and cached, so it won't affect performances if\n # you don't need avatars\n if not hasattr(self, \"_avatar\"):\n avatars = self._api.call(\"getUserProfilePhotos\", {\n \"user_id\": self.id,\n \"limit\": 1,\n }, expect=UserProfilePhotos)\n\n # If the user has no avatars just use None\n self._avatar = None\n if len(avatars.photos):\n self._avatar = avatars.photos[0] # Take the most recent one\n\n return self._avatar", "def _get_avatar_url(user: Member) -> str:\n # if user.avatar.startswith(\"a\"):\n # url = user.avatar_url_as(format=\"gif\")\n # else:\n url = user.avatar_url_as(format=\"png\")\n\n return url.split(\"?\")[0] # we really don't care about the size, chop it off", "def profile_image_url(self):\n\n return \"https://www.gravatar.com/avatar/{}?s=40\".format(hashlib.md5(self.user.email.encode('utf-8')).hexdigest())", "def GetAvatar(self):\n\n return self.__GetJsonOrNone(\"/users/\"+self.userName+\"/avatar\", False)", "def get_user_avatar(user: Union[discord.User, discord.Member]) -> str:\n return user.avatar_url if user.avatar_url is not None else user.default_avatar_url", "def guild_avatar_url(self) -> typing.Optional[files.URL]:\n return self.make_guild_avatar_url()", "def default_avatar_url(self) -> files.URL: # noqa: D401 - Imperative mood\n return routes.CDN_DEFAULT_USER_AVATAR.compile_to_file(\n urls.CDN_URL,\n discriminator=int(self.discriminator) % 5,\n file_format=\"png\",\n )", "def full_avatar(self):\n return self.avatar or self._default_avatar", "def image_url(self):\n return self._user_dict['image_url']", "def get_user_avatar_url(user_id):\n user_data = get_user_data()\n try:\n return user_data[int(user_id)]['avatar_url']\n except KeyError:\n abort(404)", "def avatar_id(self):\n return self._avatar_id", "def user_get_default_avatar_url(self, size):\n return skin_utils.get_media_url(askbot_settings.DEFAULT_AVATAR_URL)", "def resolve_avatar(self, info):\n if self.avatar:\n self.avatar = info.context.build_absolute_uri(self.avatar.url)\n return self.avatar", "def get_image_url(self):\n return self._url" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the avatar_url of this UserBase.
def avatar_url(self, avatar_url): self._avatar_url = avatar_url
[ "def users_set_avatar(self, avatar_url, **kwargs):\n if avatar_url.startswith('http://') or avatar_url.startswith('https://'):\n return self.__call_api_post('users.setAvatar', avatarUrl=avatar_url, kwargs=kwargs)\n else:\n avatar_file = {\"image\": open(avatar_url, \"rb\")}\n return self.__call_api_post('users.setAvatar', files=avatar_file, kwargs=kwargs)", "def avatar_url(self):\n return self._avatar_url", "def avatar(self, avatar):\n\n self._avatar = avatar", "def profile_image_url(self, profile_image_url):\n\n self._profile_image_url = profile_image_url", "def get_user_avatar_url(self):\n if self.user_id:\n return self.session.project_client.service.getUserAvatarURL(\n self.user_id)\n else:\n raise PyleroLibException(\"The user object is empty\")", "def avatar_id(self, avatar_id):\n\n self._avatar_id = avatar_id", "def get_avatar_url(self):\n\n if not self.avatar_uuid:\n return None\n return '%s/%s' % (\n settings.MEDIA_SERVER_BASE_URL,\n self.avatar_uuid\n )", "def update_avatar(self, url):\n if self.avatar:\n logging.info(f'{self} already has an avatar: {self.avatar}')\n # TODO: check if image has been updated\n else:\n logging.info(f'{self} has no profile image.')\n img_temp = NamedTemporaryFile(delete=True)\n # TODO: Use requests instead of urllib?\n img_temp.write(urlopen(url).read()) # noqa: S310\n img_temp.flush()\n self.avatar.save(f'{self.pk}', File(img_temp))", "async def utils_set_avatar(self, ctx, url: str=None):\r\n if url is None:\r\n if not ctx.message.attachments:\r\n return await ctx.say(\"No avatar found! \"\r\n \"Provide an Url or Attachment!\")\r\n else:\r\n url = ctx.message.attachments[0].get(\"url\")\r\n\r\n ext = url.split(\".\")[-1]\r\n mime = mimetypes.types_map.get(ext)\r\n if mime is not None and not mime.startswith(\"image\"):\r\n # None can still be an image\r\n return await ctx.send(\"Url or Attachment is not an Image!\")\r\n\r\n async with aiohttp.ClientSession() as s, s.get(url) as r:\r\n if 200 <= r.status < 300:\r\n content = await r.read()\r\n else:\r\n return await ctx.send(\"Invalid Response code: {}\"\r\n .format(r.status_code))\r\n\r\n try:\r\n await self.amethyst.user.edit(avatar=content)\r\n except BaseException: # I don't know the exact Exception type\r\n return await ctx.send(\"Avatar was too big or not an image!\")\r\n\r\n await ctx.send(\"Successfully updated avatar!\")", "def default_avatar_url(self) -> files.URL: # noqa: D401 - Imperative mood\n return routes.CDN_DEFAULT_USER_AVATAR.compile_to_file(\n urls.CDN_URL,\n discriminator=int(self.discriminator) % 5,\n file_format=\"png\",\n )", "def avatar_url(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"avatar_url\")", "def set_avatar(self, asset_id):\n # Implemented from template for osid.resource.ResourceForm.set_avatar_template\n if self.get_avatar_metadata().is_read_only():\n raise errors.NoAccess()\n if not self._is_valid_id(asset_id):\n raise errors.InvalidArgument()\n self._my_map['avatarId'] = str(asset_id)", "def set_default_avatar(cls, v, *, values): # pylint: disable=no-self-argument\n seed = values[\"username\"]\n return v or f\"https://picsum.photos/seed/{seed}/200/\"", "def get_avatar_url(self):\n return self.get_profile_pic(pic_type='square')", "def set_avatar(self, handle_id, avatar_path):\n self.check_connected()\n self.check_handle(tp.constants.HANDLE_TYPE_CONTACT, handle_id)\n\n # FIXME: check that the avatar meets the requirements\n\n handle_obj = self._handles[tp.constants.HANDLE_TYPE_CONTACT, handle_id]\n\n # will raise tp.errors.InvalidArgument if it's not an image filename\n pin.common.image_filename_to_mime_type(avatar_path)\n\n handle_obj.set_avatar(avatar_path)\n self.save()", "def set_user_avatar():\n # 获取用户id\n user_id = g.iser_id\n # 获取图片文件\n avatar = request.files.get(\"avatar\")\n # 读取图片文件,转换成七牛云能接受的bytes类型\n avatar_data = avatar.read()\n # 调用七牛云,实现图片上传\n try:\n image_name = storage(avatar_data)\n except Exception as e:\n current_app.logger.error(e)\n return jsonify(errno=RET.THIRDERR, errmsg=\"上传图片失败\")\n # 把图片文件名保存到数据库中\n # db.session.add(user)数据库会话对象\n # 如果使用update则不需要添加数据库会话对象\n try:\n User.query.filter_by(id=user_id).update({\"avatar_url\":image_name})\n # 提交会话\n db.session.commit()\n except Exception as e:\n current_app.logger.error(e)\n # 写入数据失败,回滚会话\n db.session.rollback()\n return jsonify(errno=RET.DBERR, errmsg=\"保存用户头像失败\")\n # 拼接图片绝对路径\n image_url = constants.QINIU_DOMIN_PREFIX + image_name\n # 返回结果\n return jsonify(errno=RET.OK, errmsg=\"OK\", data={\"avatar_url\":image_url})", "def SetAvatar(self, fileName):\n\n self.__PostFile(\"/avatar\", fileName, \"avatar[file]\")", "def avatar_uri(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"avatar_uri\")", "def registerAvatar(self, avatar):\r\n assert self._avatar is None\r\n self._avatar = avatar" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Gets the avatar_version of this UserBase. Version for the user's avatar. Used for cachebusting requests for the user's avatar. Clients generally shouldn't need to use this; most avatar URLs sent by Zulip will already end with `?v={avatar_version}`.
def avatar_version(self): return self._avatar_version
[ "def avatar_version(self, avatar_version):\n\n self._avatar_version = avatar_version", "def GetAvatar(self):\n\n return self.__GetJsonOrNone(\"/users/\"+self.userName+\"/avatar\", False)", "def avatar(self):\n # This is lazy loaded and cached, so it won't affect performances if\n # you don't need avatars\n if not hasattr(self, \"_avatar\"):\n avatars = self._api.call(\"getUserProfilePhotos\", {\n \"user_id\": self.id,\n \"limit\": 1,\n }, expect=UserProfilePhotos)\n\n # If the user has no avatars just use None\n self._avatar = None\n if len(avatars.photos):\n self._avatar = avatars.photos[0] # Take the most recent one\n\n return self._avatar", "def avatar_url(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"avatar_url\")", "def avatar_url(self):\n return self._avatar_url", "def get_user_avatar_url(self):\n if self.user_id:\n return self.session.project_client.service.getUserAvatarURL(\n self.user_id)\n else:\n raise PyleroLibException(\"The user object is empty\")", "def avatar_id(self):\n return self._avatar_id", "def _get_avatar_url(user: Member) -> str:\n # if user.avatar.startswith(\"a\"):\n # url = user.avatar_url_as(format=\"gif\")\n # else:\n url = user.avatar_url_as(format=\"png\")\n\n return url.split(\"?\")[0] # we really don't care about the size, chop it off", "def getAvatar(self , username):\n methodName = 'getAvatar'\n viewName = '%s.view' % methodName\n\n q = {'username': username}\n\n req = self._getRequest(viewName , q)\n try:\n res = self._doBinReq(req)\n except urllib2.HTTPError:\n # Avatar is not set/does not exist, return None\n return None\n if isinstance(res , dict):\n self._checkStatus(res)\n return res", "def app_avatar(self):\n with open(self.AVATAR_PATH, 'rb') as handle:\n return handle.read()", "def get_user_avatar(user: Union[discord.User, discord.Member]) -> str:\n return user.avatar_url if user.avatar_url is not None else user.default_avatar_url", "def GetAvatarForUser(self, userName):\n\n return self.__GetJsonOrNone(\"/users/\"+userName+\"/avatar\", False)", "def avatar_uri(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"avatar_uri\")", "def full_avatar(self):\n return self.avatar or self._default_avatar", "def get_avatar_url(self):\n\n if not self.avatar_uuid:\n return None\n return '%s/%s' % (\n settings.MEDIA_SERVER_BASE_URL,\n self.avatar_uuid\n )", "async def avatar(self) -> FileBox:\n log.info('avatar() <%s>', self)\n\n avatar = await self.puppet.room_avatar(self.room_id)\n return avatar", "def get_avatar_url(self):\n return self.get_profile_pic(pic_type='square')", "def avatar(self):\n admin_user = User.objects.get(pk=1)\n email = self.original_author\n\n if self.author != admin_user:\n email = self.author.email\n import hashlib\n value = hashlib.md5(email)\n\n return 'http://www.gravatar.com/avatar/%s' % value.hexdigest() + '?s=200'", "def get_version(self):\n args = {\"access_token\": self.access_token}\n try:\n response = self.session.request(\n \"GET\",\n FACEBOOK_GRAPH_URL + self.version + \"/me\",\n params=args,\n timeout=self.timeout,\n proxies=self.proxies,\n )\n except requests.HTTPError as e:\n response = json.loads(e.read())\n raise GraphAPIError(response)\n\n try:\n headers = response.headers\n version = headers[\"facebook-api-version\"].replace(\"v\", \"\")\n return str(version)\n except Exception:\n raise GraphAPIError(\"API version number not available\")" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the avatar_version of this UserBase. Version for the user's avatar. Used for cachebusting requests for the user's avatar. Clients generally shouldn't need to use this; most avatar URLs sent by Zulip will already end with `?v={avatar_version}`.
def avatar_version(self, avatar_version): self._avatar_version = avatar_version
[ "def avatar_version(self):\n return self._avatar_version", "def users_set_avatar(self, avatar_url, **kwargs):\n if avatar_url.startswith('http://') or avatar_url.startswith('https://'):\n return self.__call_api_post('users.setAvatar', avatarUrl=avatar_url, kwargs=kwargs)\n else:\n avatar_file = {\"image\": open(avatar_url, \"rb\")}\n return self.__call_api_post('users.setAvatar', files=avatar_file, kwargs=kwargs)", "def avatar_url(self, avatar_url):\n\n self._avatar_url = avatar_url", "def avatar(self, avatar):\n\n self._avatar = avatar", "def update_avatar(self, url):\n if self.avatar:\n logging.info(f'{self} already has an avatar: {self.avatar}')\n # TODO: check if image has been updated\n else:\n logging.info(f'{self} has no profile image.')\n img_temp = NamedTemporaryFile(delete=True)\n # TODO: Use requests instead of urllib?\n img_temp.write(urlopen(url).read()) # noqa: S310\n img_temp.flush()\n self.avatar.save(f'{self.pk}', File(img_temp))", "def set_default_avatar(cls, v, *, values): # pylint: disable=no-self-argument\n seed = values[\"username\"]\n return v or f\"https://picsum.photos/seed/{seed}/200/\"", "def avatar_id(self, avatar_id):\n\n self._avatar_id = avatar_id", "def put_avatar(self, request):\n user = request.user\n\n file_field = request.POST.get('files[]')\n if file_field == None:\n return not_found(request)\n\n user.unlink_avatar()\n user.data['avatar-key'] = \"\".join([choice(\"0123456789\") for x in range(29)])\n user.data['avatar-filename'] = file_field.filename\n user.data['avatar-type'] = file_field.type\n user.data['avatar-type-options'] = file_field.type_options\n user.data['avatar-ext'] = os.path.splitext(file_field.filename)[1]\n\n upload_fd = open(user.avatar_upload_path, 'w')\n upload_fd.write(file_field.file.read())\n upload_fd.close()\n \n user.generate_avatar_sizes()\n \n cache.set(\"user-info-%s\" % request.user.name, request.user.info, 180)\n \n response_data = [\n {\"name\": user.data.get('avatar-key', \"\") + \".jpg\",\n \"url\": user.avatar_url(\"profile\"),\n \"thumbnail_url\": user.avatar_url(\"tiny\")}]\n \n return Response(content_type=\"application/json\",\n body=json.dumps(response_data))", "def update_avatar(avatar): \r\n updated_info = {\r\n \"current_hp\": avatar.current_hp,\r\n \"current_xp\": avatar.current_xp,\r\n \"level\": avatar.level\r\n }\r\n update_firebase_entry(u'avatar', avatar._id, updated_info)", "def set_user_avatar():\n # 获取用户id\n user_id = g.iser_id\n # 获取图片文件\n avatar = request.files.get(\"avatar\")\n # 读取图片文件,转换成七牛云能接受的bytes类型\n avatar_data = avatar.read()\n # 调用七牛云,实现图片上传\n try:\n image_name = storage(avatar_data)\n except Exception as e:\n current_app.logger.error(e)\n return jsonify(errno=RET.THIRDERR, errmsg=\"上传图片失败\")\n # 把图片文件名保存到数据库中\n # db.session.add(user)数据库会话对象\n # 如果使用update则不需要添加数据库会话对象\n try:\n User.query.filter_by(id=user_id).update({\"avatar_url\":image_name})\n # 提交会话\n db.session.commit()\n except Exception as e:\n current_app.logger.error(e)\n # 写入数据失败,回滚会话\n db.session.rollback()\n return jsonify(errno=RET.DBERR, errmsg=\"保存用户头像失败\")\n # 拼接图片绝对路径\n image_url = constants.QINIU_DOMIN_PREFIX + image_name\n # 返回结果\n return jsonify(errno=RET.OK, errmsg=\"OK\", data={\"avatar_url\":image_url})", "async def utils_set_avatar(self, ctx, url: str=None):\r\n if url is None:\r\n if not ctx.message.attachments:\r\n return await ctx.say(\"No avatar found! \"\r\n \"Provide an Url or Attachment!\")\r\n else:\r\n url = ctx.message.attachments[0].get(\"url\")\r\n\r\n ext = url.split(\".\")[-1]\r\n mime = mimetypes.types_map.get(ext)\r\n if mime is not None and not mime.startswith(\"image\"):\r\n # None can still be an image\r\n return await ctx.send(\"Url or Attachment is not an Image!\")\r\n\r\n async with aiohttp.ClientSession() as s, s.get(url) as r:\r\n if 200 <= r.status < 300:\r\n content = await r.read()\r\n else:\r\n return await ctx.send(\"Invalid Response code: {}\"\r\n .format(r.status_code))\r\n\r\n try:\r\n await self.amethyst.user.edit(avatar=content)\r\n except BaseException: # I don't know the exact Exception type\r\n return await ctx.send(\"Avatar was too big or not an image!\")\r\n\r\n await ctx.send(\"Successfully updated avatar!\")", "def SetAvatar(self, fileName):\n\n self.__PostFile(\"/avatar\", fileName, \"avatar[file]\")", "async def appearance_avatar(self, ctx, *, url: str):\n avatar = await util.get_file(self.bot, url)\n await self.bot.user.edit(avatar=avatar)\n await ctx.send(\"Changed avatar.\")", "def set_avatar(self, asset_id):\n # Implemented from template for osid.resource.ResourceForm.set_avatar_template\n if self.get_avatar_metadata().is_read_only():\n raise errors.NoAccess()\n if not self._is_valid_id(asset_id):\n raise errors.InvalidArgument()\n self._my_map['avatarId'] = str(asset_id)", "def avatar_url(self):\n return self._avatar_url", "def customer_change_avatar():\n\tcustomer_id = request.args.get(\"customer_id\")\n\tcustomer_avatar = request.args.get(\"customer_avatar\")\n\ttry:\n\t\tg.cursor.execute(\"UPDATE customer SET customer_avatar = '%s' WHERE customer_id = '%s'\"\n\t\t % (customer_avatar, customer_id))\n\t\tg.conn.commit()\n\t\treturn jsonify({\"Succeed!\": \"Change avatar Succeed!\"})\n\texcept Exception as e:\n\t\tg.conn.rollback()\n\t\tprint traceback.format_exc(e)\n\t\treturn jsonify({\"ERROR\": \"Change avatar failed, please try again later..\"})", "def default_avatar_url(self) -> files.URL: # noqa: D401 - Imperative mood\n return routes.CDN_DEFAULT_USER_AVATAR.compile_to_file(\n urls.CDN_URL,\n discriminator=int(self.discriminator) % 5,\n file_format=\"png\",\n )", "def update_avatar(self, file_instance):\n\n if not type(file_instance) == types.InstanceType:\n self.logger.error(\"Avatar update failed! 'survivor_avatar' must be %s instead of %s.\" % (types.InstanceType, type(file_instance)))\n return None\n\n fs = gridfs.GridFS(mdb)\n\n if \"avatar\" in self.survivor.keys():\n fs.delete(self.survivor[\"avatar\"])\n self.logger.debug(\"%s removed an avatar image (%s) from GridFS.\" % (self.User.user[\"login\"], self.survivor[\"avatar\"]))\n\n processed_image = StringIO()\n im = Image.open(file_instance.file)\n resize_tuple = tuple([int(n) for n in settings.get(\"application\",\"avatar_size\").split(\",\")])\n im.thumbnail(resize_tuple, Image.ANTIALIAS)\n im.save(processed_image, format=\"PNG\")\n\n avatar_id = fs.put(processed_image.getvalue(), content_type=file_instance.type, created_by=self.User.user[\"_id\"], created_on=datetime.now())\n self.survivor[\"avatar\"] = ObjectId(avatar_id)\n\n mdb.survivors.save(self.survivor)\n self.logger.debug(\"%s updated the avatar for survivor %s.\" % (self.User.user[\"login\"], self.get_name_and_id()))", "def user_update_avatar_type(self):\n\n if 'avatar' in django_settings.INSTALLED_APPS:\n if self.avatar_set.count() > 0:\n self.avatar_type = 'a'\n else:\n self.avatar_type = _check_gravatar(self.gravatar)\n else:\n self.avatar_type = _check_gravatar(self.gravatar)\n self.save()" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the is_admin of this UserBase. A boolean specifying whether the user is an organization administrator.
def is_admin(self, is_admin): self._is_admin = is_admin
[ "def is_admin(self, is_admin: bool):\n if is_admin is None:\n raise ValueError(\"Invalid value for `is_admin`, must not be `None`\") # noqa: E501\n\n self._is_admin = is_admin", "def is_admin(self) -> bool:\n return pulumi.get(self, \"is_admin\")", "def is_billing_admin(self, is_billing_admin):\n\n self._is_billing_admin = is_billing_admin", "def is_admin(self):\n return self.is_administrator", "def _switch_admin(self, admin=False):\n\n self._db.users.update_one(\n {\"email_address_lowercase\": self._tester_email},\n {'$set':\n {\n \"administrator\": admin,\n \"std_des_admin\": True\n },\n })", "def super_admin(self, super_admin):\n\n self._super_admin = super_admin", "def toggle_admin(self):\n self.admin = not self.admin\n db.session.merge(self)\n db.session.commit()\n current_app.logger.debug(\"Admin status for user {} has been changed into {}\"\n .format(self.id, self.admin))", "def is_admin(self):\n if self.user is None:\n return False\n if unicode(self.user._id) in self.barcamp.admins:\n return True\n if self.user.is_admin:\n return True\n return False", "def set_admin(self, admins):\n self.set_group(self._gp_admin_name, admins)", "def setAdminMode(self, adminMode=True):\n return _yarp.Port_setAdminMode(self, adminMode)", "def invalid_admin_state(isadmin):\n if isinstance(isadmin, bool):\n return False\n return True", "def is_admin(self):\n return self.login == ADMIN_LOGIN", "def is_hypervisor_admin_pwd_set(self, is_hypervisor_admin_pwd_set):\n\n self._is_hypervisor_admin_pwd_set = is_hypervisor_admin_pwd_set", "def parse_is_admin( cls, is_admin, lib ):\n try:\n lib.is_admin = bool(is_admin)\n except:\n raise Exception(\"Invalid is_admin '%s'\" % is_admin)\n \n return lib.is_admin, {}", "def is_admin(self,user):\n if user.is_superuser:\n return True\n\n if user.groups.filter(name=self.admin_group_name).count() > 0:\n return True\n else:\n return False", "def admin_type(self, admin_type):\n self._admin_type = admin_type", "def is_admin(self, user):\n return user.name in self.admins", "def is_admin(self):\n return self.is_superuser and self.is_staff and self.is_active", "def is_current_organization_admin(self, admin):\n return admin.is_organization_admin and admin.organization == self.organization" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Gets the is_owner of this UserBase.
def is_owner(self): return self._is_owner
[ "def is_user_is_owner(self):\n return self._tag == 'user_is_owner'", "def get_isowner(self, obj):\n user = self.context.get('user')\n\n if not user.is_anonymous():\n return obj.creator == user\n\n return False", "def is_owner(self, user: User) -> bool:\n return self.has_permission(Permission.OWNER, user)", "def is_user_owner(user_id: int) -> bool:\n owner = Admin.where('admin_user_id', '=', user_id).where('is_owner', '=', True).first()\n\n return owner.is_owner if owner else False", "def user_is_owner(user_id):\n if current_user:\n return user_id == current_user.id\n return False", "def is_owner(self, doc):\n return bool(self.current_user) and \\\n (self.current_user['email'] == doc['owner'] or\n self.is_admin())", "def owner(self) -> 'user.User':\n return self.group.owner if self.group is not None else self.direct_owner", "def is_owner(self, user):\n application_owner = self.application.owner\n if isinstance(application_owner, Team):\n if user in application_owner.accepted:\n return True\n else:\n if application_owner is user:\n return True\n \n additional_owner_ids = self._additional_owner_ids\n if (additional_owner_ids is not None) and (user.id in additional_owner_ids):\n return True\n \n return False", "def is_owner(self, role):\n return self.is_saved and not role.is_anonymous and \\\n self.owner_id == role.identity.pk", "def owner(self) -> None:\n return self.bot.get_user(self.bot.config.owner_ids[0])", "def is_owner(self):\n return Scopes.SCOPE_COMMANDS in self.scopes", "def owner_type(self) -> str:\n return pulumi.get(self, \"owner_type\")", "def _get_owner(self):\n if self.resource.owner is not None:\n try:\n return pwd.getpwnam(self.resource.owner).pw_uid\n except KeyError:\n raise error.InvalidUser()", "def owner_id(self):\n return self._owner_id", "def bot_owner_id(self):\n return self._bot_owner_id", "def getWrappedOwner(self):\n owner = self.getOwnerTuple()\n\n if owner is None or owner is UnownableOwner:\n return None\n\n udb_path, oid = owner\n\n root = self.getPhysicalRoot()\n udb = root.unrestrictedTraverse(udb_path, None)\n\n if udb is None:\n return SU.nobody\n\n user = udb.getUserById(oid, None)\n\n if user is None:\n return SU.nobody\n\n return user.__of__(udb)", "def is_owner(self, is_owner):\n\n self._is_owner = is_owner", "def owner(self) -> Optional[str]:\n return cast(Optional[str], self._properties.get('owner'))", "def get_owner(self):\n owner = gdef.PSID()\n lpbOwnerDefaulted = gdef.BOOL()\n winproxy.GetSecurityDescriptorOwner(self, owner, lpbOwnerDefaulted)\n # Return None of owner is NULL\n return owner or None" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the is_owner of this UserBase.
def is_owner(self, is_owner): self._is_owner = is_owner
[ "def set_owner(self, owner):\n self.settings[\"owner\"] = owner", "def set_owner(self, owner: Owner):\n ...", "def is_user_is_owner(self):\n return self._tag == 'user_is_owner'", "def owner_id(self, owner_id):\n self._owner_id = owner_id", "def owner_type(self, owner_type):\n\n self._owner_type = owner_type", "def setOwner(self, newOwner):\r\n self.owner = newOwner", "def bank_account_owner(self, bank_account_owner):\n\n self._bank_account_owner = bank_account_owner", "def owner(self, owner):\n \n self._owner = owner", "def is_owner(self):\n return self._is_owner", "def is_owner(self, user: User) -> bool:\n return self.has_permission(Permission.OWNER, user)", "def owner(self, owner):\n self._owner = owner", "def scope_owner(self, scope_owner):\n\n self._scope_owner = scope_owner", "def owner_reference(self, owner_reference):\n\n self._owner_reference = owner_reference", "def set_owner(self, owner: Optional[\"STACObject_Type\"]) -> \"Link\":\n self.owner = owner\n return self", "def __set_owner_permissions(self) -> None:\n self.__pg_dir_create_mode = self.__PG_DIR_MODE_OWNER\n self.__pg_file_create_mode = self.__PG_FILE_MODE_OWNER\n self.__pg_mode_mask = self.__PG_MODE_MASK_OWNER", "def bot_owner_id(self, bot_owner_id):\n\n self._bot_owner_id = bot_owner_id", "def _SetOwnerId(self, owner_id):\r\n assert owner_id is not None\r\n self._unique_id = owner_id\r\n self.owner_id = owner_id", "def set_owner(self, data):\n self._owner = self._uni(data)\n self.add_payload('owner', data)", "def user_is_owner(user_id):\n if current_user:\n return user_id == current_user.id\n return False" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Gets the is_billing_admin of this UserBase.
def is_billing_admin(self): return self._is_billing_admin
[ "def is_billing_admin(self, is_billing_admin):\n\n self._is_billing_admin = is_billing_admin", "def is_admin(self):\n if self.user is None:\n return False\n if unicode(self.user._id) in self.barcamp.admins:\n return True\n if self.user.is_admin:\n return True\n return False", "def is_admin(self) -> bool:\n return pulumi.get(self, \"is_admin\")", "def is_admin(self):\n if self.is_main_admin:\n return True\n if self.user is not None and self.barcamp is not None:\n if unicode(self.user._id) in self.barcamp.admins:\n return True\n return False", "def is_admin(self):\n return self.is_administrator", "def is_admin(self):\n return self._data.get(\"admin\", False)", "def is_admin(self):\n return users.is_current_user_admin()", "def is_admin(self):\n return self.is_superuser and self.is_staff and self.is_active", "def is_admin(self):\n return self.login == ADMIN_LOGIN", "def is_admin(self):\n if not self.current_user:\n return False\n else:\n return self.current_user in [\"1\"]", "def is_admin(self):\r\n user_data = self.exists()\r\n if user_data:\r\n return user_data['type'] == \"admin\"\r\n else:\r\n return False", "def billing_configuration(self):\n return self._billing_configuration", "def default_billing(self):\n return self._default_billing", "def supports_bank_admin(self):\n return # boolean", "def is_admin(self,user):\n if user.is_superuser:\n return True\n\n if user.groups.filter(name=self.admin_group_name).count() > 0:\n return True\n else:\n return False", "def iam_access(self):\n return self._validate_iam_access().get('billingConsoleAccessEnabled', False)", "def has_admin(self):\n return bool(search_permission(\"admin\"))", "def is_admin(self, user):\n return user.name in self.admins", "def is_user_admin(user_id: int) -> bool:\n admin = Admin.where('admin_user_id', '=', user_id).first()\n\n return bool(admin)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the is_billing_admin of this UserBase.
def is_billing_admin(self, is_billing_admin): self._is_billing_admin = is_billing_admin
[ "def is_billing_admin(self):\n return self._is_billing_admin", "def is_admin(self, is_admin):\n\n self._is_admin = is_admin", "def is_admin(self, is_admin: bool):\n if is_admin is None:\n raise ValueError(\"Invalid value for `is_admin`, must not be `None`\") # noqa: E501\n\n self._is_admin = is_admin", "def default_billing(self, default_billing):\n\n self._default_billing = default_billing", "def _switch_admin(self, admin=False):\n\n self._db.users.update_one(\n {\"email_address_lowercase\": self._tester_email},\n {'$set':\n {\n \"administrator\": admin,\n \"std_des_admin\": True\n },\n })", "def billing(self, billing):\n\n self._billing = billing", "def toggle_admin(self):\n self.admin = not self.admin\n db.session.merge(self)\n db.session.commit()\n current_app.logger.debug(\"Admin status for user {} has been changed into {}\"\n .format(self.id, self.admin))", "def set_is_shipping_charge_required(self, is_shipping_charge_required):\n self.is_shipping_charge_required = is_shipping_charge_required", "def billing_configuration(self, billing_configuration):\n\n self._billing_configuration = billing_configuration", "def super_admin(self, super_admin):\n\n self._super_admin = super_admin", "def setAdminMode(self, adminMode=True):\n return _yarp.Port_setAdminMode(self, adminMode)", "def admin_type(self, admin_type):\n self._admin_type = admin_type", "def promote(self):\n if self.is_admin == True:\n pass\n self.is_admin = True\n User.save(self)", "def billing_address(self, billing_address):\n\n self._billing_address = billing_address", "def admin_status(self, admin_status):\n\n self._admin_status = admin_status", "def admin_password(self, admin_password):\n self._admin_password = admin_password", "def is_hypervisor_admin_pwd_set(self, is_hypervisor_admin_pwd_set):\n\n self._is_hypervisor_admin_pwd_set = is_hypervisor_admin_pwd_set", "def set_admin(self, admins):\n self.set_group(self._gp_admin_name, admins)", "def admin_email(self, admin_email):\n\n self._admin_email = admin_email" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Gets the bot_type of this UserBase.
def bot_type(self): return self._bot_type
[ "def chat_type(self) -> str:\n return self.user.chat_type.name", "def user_type(self):\n return self._user_type", "def lobby_type(self):\n return self._get(\"lobby_type\")", "def gametype(self) -> Type[ba.GameActivity]:\n return self._gametype", "def user_type(self):\n if \"userType\" in self._prop_dict:\n return self._prop_dict[\"userType\"]\n else:\n return None", "def type(self):\n return self._league[\"league_type\"]", "def get_user_type(object):\n return object.get_user_type_display()", "def auth_type(self):\n return self._auth_type", "def user_type(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"user_type\")", "def get_agent_type(self):\n\n return self._agent_type;", "def bot_type(self, bot_type):\n\n self._bot_type = bot_type", "def db_type(cls, database) -> str:\n return database.type_from_base(cls.base_type)", "def get_entity_type(self):\n return self.entity_type", "def db_type(self):\n return self._db_type", "def account_type(self):\n return self._account_type", "def bot_id(self) -> str:\n return self._get(\"bot_id\", None)", "def get_entity_type(self):\n return models.get_model(\n 'achievements',\n 'Achievement'\n ).get_entity_type_map()[self.get_entity().__class__]", "def get_session_type(self) -> Type[ba.Session]:\n return self._sessiontype", "def get_type(self, ):\n return self.attrs.get(self.AttributeNames.TYPE, None)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the bot_type of this UserBase.
def bot_type(self, bot_type): self._bot_type = bot_type
[ "def bot_type(self):\n return self._bot_type", "def user_type(self, user_type):\n self._user_type = user_type", "def set_type(self, rtype=ALL_USERS):\r\n self.type = rtype", "def set_type(self, type):\n self.type = type", "def is_bot(self, is_bot):\n\n self._is_bot = is_bot", "def type(self, type):\n allowed_values = [\"USER\", \"ADMIN\"]\n if type not in allowed_values:\n raise ValueError(\n \"Invalid value for `type` ({0}), must be one of {1}\"\n .format(type, allowed_values)\n )\n\n self._type = type", "def set_type(self, type_balle):\n self.type_balle = type_balle", "def _set_binding_type(cls, binding_type):\n cls._binding_type = binding_type", "def user_id_type(self, user_id_type):\n\n self._user_id_type = user_id_type", "def set_as_type_user(self):\n self.type = MessageTypes.USER", "def set_auth_type(self, auth_type):\n pass", "def entity_type(self, entity_type: str):\n\n self._entity_type = entity_type", "def set_peer_type(self, peer_type):\n self.__block_manager.set_peer_type(peer_type)", "def engine_type(self, engine_type):\n\n self._engine_type = engine_type", "def auth_type(self, auth_type):\n self._auth_type = auth_type", "def db_type(self, db_type):\n self._db_type = db_type", "def setType(self, type):\n pass", "def owner_type(self, owner_type):\n\n self._owner_type = owner_type", "def _set_entity_owner_account_type(self, username, account_type):\n\t\tentity_owner = self._get_entity_owner_by_username(username)\n\t\tentity_owner.set_entity_owner_account_type(account_type)\n\n\t\t# TODO : Eventually remove this. The perform saves should cover it.\n\t\tself._update_owner(username)\n\n\t\treturn us.SUCCESS_MESSAGE" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Gets the bot_owner_id of this UserBase.
def bot_owner_id(self): return self._bot_owner_id
[ "def owner(self) -> None:\n return self.bot.get_user(self.bot.config.owner_ids[0])", "def owner_id(self):\n return self._owner_id", "def owner_id(self) -> int:\n return pulumi.get(self, \"owner_id\")", "def owner_account_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"owner_account_id\")", "def bot_id(self) -> str:\n return self._get(\"bot_id\", None)", "def owner_account_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"owner_account_id\")", "def get_owner_id(self):\n return self.strg_ctrl.get_database_handler().get_master_id_from_feed(self.feed_id)", "def _get_owner(self):\n owner = self.resource.owner\n if owner:\n try:\n return platform.getpwnam(owner).pw_uid\n except KeyError:\n raise error.InvalidUser()", "def _get_owner(self):\n if self.resource.owner is not None:\n try:\n return pwd.getpwnam(self.resource.owner).pw_uid\n except KeyError:\n raise error.InvalidUser()", "def owner(self) -> 'user.User':\n return self.group.owner if self.group is not None else self.direct_owner", "def owner_uuid(self) -> str:\n return pulumi.get(self, \"owner_uuid\")", "def owner_contactid(self):\n return self._owner_contactid", "def getWrappedOwner(self):\n owner = self.getOwnerTuple()\n\n if owner is None or owner is UnownableOwner:\n return None\n\n udb_path, oid = owner\n\n root = self.getPhysicalRoot()\n udb = root.unrestrictedTraverse(udb_path, None)\n\n if udb is None:\n return SU.nobody\n\n user = udb.getUserById(oid, None)\n\n if user is None:\n return SU.nobody\n\n return user.__of__(udb)", "def bot_owner_id(self, bot_owner_id):\n\n self._bot_owner_id = bot_owner_id", "def business_owner(self):\n return self._business_owner", "def owner(self):\n return Organization.objects.get(id=self.owner_id)", "def owner(self):\n import pwd\n return pwd.getpwuid(self._stat.st_uid).pw_name", "def owner(self) -> str:\n return pulumi.get(self, \"owner\")", "def owner(self):\n import pwd\n return pwd.getpwuid(self.stat().st_uid).pw_name" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the bot_owner_id of this UserBase.
def bot_owner_id(self, bot_owner_id): self._bot_owner_id = bot_owner_id
[ "def _SetOwnerId(self, owner_id):\r\n assert owner_id is not None\r\n self._unique_id = owner_id\r\n self.owner_id = owner_id", "def owner_id(self, owner_id):\n self._owner_id = owner_id", "def bot_owner_id(self):\n return self._bot_owner_id", "def set_owner(self, owner):\n self.settings[\"owner\"] = owner", "def set_owner(self, owner: Owner):\n ...", "def owner_contactid(self, owner_contactid):\n self._owner_contactid = owner_contactid", "def bank_account_owner(self, bank_account_owner):\n\n self._bank_account_owner = bank_account_owner", "def owner(self, owner):\n \n self._owner = owner", "def owner(self, owner):\n self._owner = owner", "def add_owner_id(data=None, **kw):\n data['owner_id'] = current_user.id", "def set_owner(self, data):\n self._owner = self._uni(data)\n self.add_payload('owner', data)", "def _SyncUniqueIdToOwnerId(self):\r\n self._unique_id = self.owner_id", "def scope_owner(self, scope_owner):\n\n self._scope_owner = scope_owner", "def setOwner(self, newOwner):\r\n self.owner = newOwner", "def business_owner(self, business_owner):\n\n self._business_owner = business_owner", "def owner_participant_uuid(self, owner_participant_uuid):\n\n self._owner_participant_uuid = owner_participant_uuid", "def set_owner(self, owner: Optional[\"STACObject_Type\"]) -> \"Link\":\n self.owner = owner\n return self", "def owner_id(self):\n return self._owner_id", "def migrate_fix_invalid_bot_owner_values(\n apps: StateApps, schema_editor: BaseDatabaseSchemaEditor\n) -> None:\n UserProfile = apps.get_model(\"zerver\", \"UserProfile\")\n UserProfile.objects.filter(is_bot=False).exclude(bot_owner=None).update(bot_owner=None)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Gets the is_guest of this UserBase. A boolean specifying whether the user is a guest user.
def is_guest(self): return self._is_guest
[ "def is_guest(self) -> bool:\n value = self._data.get(\"IsGuest\")\n return isinstance(value, bool) and value", "def get_guest(self):\n return self.get_user('guest')", "def is_guest(self, is_guest):\n\n self._is_guest = is_guest", "def get_guest_user():\n\n guest = User.query.filter_by(username=\"guest\").first()\n\n if guest == None:\n guest = User(username=\"guest\",\n password_token=pbkdf2_sha256.hash(\"password\"))\n db.session.add(guest)\n db.session.commit()\n\n return guest", "def isGuest(self):\r\n\t\treturn self.username.startswith('*')", "def guest_configuration_enabled(self) -> bool:\n return pulumi.get(self, \"guest_configuration_enabled\")", "def get_guest_level_userland(self, guest=None, session=None):\n if not guest:\n s = self.lock() if not session else session\n guest = s.console.guest\n\n res = bool(guest.get_additions_status(_virtualbox.library.AdditionsRunLevelType.userland))\n if not session:\n self.unlock(s)\n return res", "def is_guest_sharing_enabled(self):\n\t\treturn bool(call_sdk_function('PrlVmCfg_IsGuestSharingEnabled', self.handle))", "def get_is_current_user(self):\n return self.is_current_user", "def validUser(self):\n if self.state == SessionStates.LOGGED_OUT:\n return False\n\n # if self.user == None:\n # return False\n return True", "def guest_format(self):\n return self._guest_format", "def is_admin(self):\r\n user_data = self.exists()\r\n if user_data:\r\n return user_data['type'] == \"admin\"\r\n else:\r\n return False", "def is_admin(self) -> bool:\n return pulumi.get(self, \"is_admin\")", "def is_admin(self):\n return users.is_current_user_admin()", "def user_verified(self):\n return self.user.verified", "def is_admin(self):\n if not self.current_user:\n return False\n else:\n return self.current_user in [\"1\"]", "def is_registered(self):\n if self.user == getpass.getuser():\n return True\n else:\n return False", "def is_superuser_logged_in():\n if is_user_logged_in():\n user_roles = {role.name for role in current_user.roles}\n return bool(user_roles & {Roles.superuser.value})\n return False", "def is_admin(self):\n if self.user is None:\n return False\n if unicode(self.user._id) in self.barcamp.admins:\n return True\n if self.user.is_admin:\n return True\n return False" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the is_guest of this UserBase. A boolean specifying whether the user is a guest user.
def is_guest(self, is_guest): self._is_guest = is_guest
[ "def is_guest(self) -> bool:\n value = self._data.get(\"IsGuest\")\n return isinstance(value, bool) and value", "def guest(self, guest):\n if guest is None:\n raise ValueError(\"Invalid value for `guest`, must not be `None`\")\n\n self._guest = guest", "def is_guest(self):\n return self._is_guest", "def is_admin(self, is_admin):\n\n self._is_admin = is_admin", "def get_guest(self):\n return self.get_user('guest')", "def is_admin(self, is_admin: bool):\n if is_admin is None:\n raise ValueError(\"Invalid value for `is_admin`, must not be `None`\") # noqa: E501\n\n self._is_admin = is_admin", "def guest_format(self, guest_format):\n self._guest_format = guest_format", "def __init__(self, isUser):\n self.role = 'member' if isUser else 'guest_usr'\n super().__init__(self.role)", "def __init__(self, isAdmin):\n self.role = 'admin' if isAdmin else 'guest_usr'\n super().__init__(self.role)", "def isGuest(self):\r\n\t\treturn self.username.startswith('*')", "def enable_guest_mode(self, request):\n self.check_xsrf_token(self.request_state)\n device = _get_device(request)\n user_email = user_lib.get_user_email()\n try:\n device.enable_guest_mode(user_email)\n except device_model.EnableGuestError as err:\n raise endpoints.InternalServerErrorException(str(err))\n except (\n device_model.UnassignedDeviceError,\n device_model.GuestNotAllowedError,\n device_model.UnauthorizedError) as err:\n raise endpoints.UnauthorizedException(str(err))\n else:\n return message_types.VoidMessage()", "def is_hypervisor_admin_pwd_set(self, is_hypervisor_admin_pwd_set):\n\n self._is_hypervisor_admin_pwd_set = is_hypervisor_admin_pwd_set", "def get_guest_user():\n\n guest = User.query.filter_by(username=\"guest\").first()\n\n if guest == None:\n guest = User(username=\"guest\",\n password_token=pbkdf2_sha256.hash(\"password\"))\n db.session.add(guest)\n db.session.commit()\n\n return guest", "def addGuest(self, guest):\n self.guests.add( guest )\n guest.setSite(self)", "def set_guest_sharing_enabled(self, bVmGuestSharingEnabled):\n\t\tcall_sdk_function('PrlVmCfg_SetGuestSharingEnabled', self.handle, bVmGuestSharingEnabled)", "def is_billing_admin(self, is_billing_admin):\n\n self._is_billing_admin = is_billing_admin", "def test_guest_user_created(self):\n self.assertFalse(USER_MODEL.objects.all().exists())\n self.fill_session_cart()\n self.client.post(self.CHECKOUT_URL, self.build_checkout_form())\n self.assertTrue(USER_MODEL.objects.get().is_guest)", "def guest_configuration_enabled(self) -> bool:\n return pulumi.get(self, \"guest_configuration_enabled\")", "def _set_is_tunnel_interface(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name=\"is-tunnel-interface\", rest_name=\"is-tunnel-interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"is_tunnel_interface must be of a type compatible with boolean\"\"\",\n 'defined-type': \"boolean\",\n 'generated-type': \"\"\"YANGDynClass(base=YANGBool, is_leaf=True, yang_name=\"is-tunnel-interface\", rest_name=\"is-tunnel-interface\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)\"\"\",\n })\n\n self.__is_tunnel_interface = t\n if hasattr(self, '_set'):\n self._set()" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Gets the date_joined of this UserBase. The time the user account was created.
def date_joined(self): return self._date_joined
[ "def get_latest_user(self):\n\n return User.objects.latest('date_joined')", "def get_account_created_date(self):\n return self.account_created_date", "def getJoiningTime(self):\n return self.__joinTime", "def get_join_date(self, obj):\n return obj.join_date", "def created_at(self):\n created_at = self.joined_at\n if created_at is None:\n created_at = DISCORD_EPOCH_START\n \n return created_at", "def date_joined(self, date_joined):\n\n self._date_joined = date_joined", "def getUserSignupDate(self):\r\n userinfo = self.getUserInfo()\r\n timestamp = int(float(userinfo[\"signupTimeSec\"]))\r\n return time.strftime(\"%m/%d/%Y %H:%M\", time.gmtime(timestamp))", "def joined_days(self):\n return (timezone.now() - self.user.date_joined).days", "def date_added(self) -> str:\n return self._date_added", "def date_created(self):\n return self._date_created", "def user_time(self):\n return self._user_time", "def created_date(self):\n return self._created_date", "def get_date(self):\n return self.created_at.date()", "def created_datetime(self):\n return self._created_datetime", "def date_added(self) -> str:\n return self._date_added.strftime('%Y-%m-%d')", "def get_account_created_date_formatted(self):\n return self.account_created_date_formatted", "def created_time(self):\n return self._created_time", "def created_time(self) -> float:\n return self._created_time", "def creation_date(self):\n timestamp = self._prof.get(\"timecreated\")\n if timestamp:\n return time.localtime(timestamp)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the date_joined of this UserBase. The time the user account was created.
def date_joined(self, date_joined): self._date_joined = date_joined
[ "def date_joined(self):\n return self._date_joined", "def test_user_creation_old_date_joined(self):\n new_user = self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(), **self.user_info)\n self.assertEqual(new_user.get_username(), 'alice')\n self.assertEqual(new_user.email, 'alice@example.com')\n self.assertTrue(new_user.check_password('swordfish'))\n self.assertFalse(new_user.is_active)\n\n expiry_date = datetime_now() - timedelta(settings.ACCOUNT_ACTIVATION_DAYS)\n self.assertGreater(new_user.date_joined, expiry_date)", "def set_account_created_date(self, account_created_date):\n self.account_created_date = account_created_date", "def get_latest_user(self):\n\n return User.objects.latest('date_joined')", "def last_login_date(self, last_login_date):\n\n self._last_login_date = last_login_date", "def _set_joined(self, data):\n if self.joined_at is None:\n self.joined_at = parse_joined_at(data)", "def get_join_date(self, obj):\n return obj.join_date", "def SetDateCreated(self, date):\n self.datecreated = str(date)", "def created_date(self, created_date):\n self._created_date = created_date", "def date_created(self, date_created: datetime):\n\n self._date_created = date_created", "def created_user(self, created_user):\n self._created_user = created_user", "def authored_date(self, authored_date):\n self._authored_date = authored_date", "async def joined(self, ctx, member: discord.Member):\n await ctx.send(f'{member.name} joined in {member.joined_at}')", "def date_created(self, date_created):\n\n self._date_created = date_created", "def date_created(self, date_created):\n \n self._date_created = date_created", "def created_date(self, created_date):\n \n self._created_date = created_date", "def created_date(self, created_date):\n\n self._created_date = created_date", "def completed_date(self, completed_date):\n self._completed_date = completed_date", "def expire_date(self, expire_date):\n self._expire_date = expire_date" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Gets the delivery_email of this UserBase. The user's real email address. This field is present only if [email address visibility](/help/restrictvisibilityofemailaddresses) is limited and you are an administrator with access to real email addresses under the configured policy.
def delivery_email(self): return self._delivery_email
[ "def get_user_email(self):\n member = self.get_user()\n if member:\n return member.getProperty('email')", "def user_email(self):\n return self._user_email", "def Get_Email( self ):\n\n return self._email_address", "def customer_email(self):\n return self._customer_email", "def get_user_email(self, user: U):\n return user.email", "def email_address(self):\n return self._email_address", "def delivery_identity(self) -> Optional[pulumi.Input['EventSubscriptionDeliveryIdentityArgs']]:\n return pulumi.get(self, \"delivery_identity\")", "def customer_email_address(self):\n return self._customer_email_address", "def email(self):\n # Look for a primary address\n useremail = UserEmail.query.filter_by(user_id=self.id, primary=True).first()\n if useremail:\n return useremail\n # No primary? Maybe there's one that's not set as primary?\n useremail = UserEmail.query.filter_by(user_id=self.id).first()\n if useremail:\n # XXX: Mark at primary. This may or may not be saved depending on\n # whether the request ended in a database commit.\n useremail.primary = True\n return useremail\n # This user has no email address. Return a blank string instead of None\n # to support the common use case, where the caller will use unicode(user.email)\n # to get the email address as a string.\n return u''", "def get_email(self, obj):\n\n request = self.context.get('request')\n if request.user.is_superuser or request.user.id == obj.id:\n return obj.email\n return ''", "def email_address(self):\n return self.__email_address", "def business_email(self):\n return self._business_email", "def email_address(self) -> str:\n return self._email_address", "def email_address(self) -> str:\n return self.__email_address", "def _get_user_email_address(self, request):\n return request.session.get(SESSION_VAR_EMAIL_ADDRESS, not request.user.is_anonymous() and request.user.email)", "def delivery_email(self, delivery_email):\n\n self._delivery_email = delivery_email", "def get_email(self, user_settings=None, user_id=None, login=None, first_name=None, last_name=None,\n allow_multiple=False, display_warnings=True):\n return objects_module.users.get_email(self.khoros_object, user_settings, user_id, login, first_name,\n last_name, allow_multiple, display_warnings)", "def email_address(self) -> str:\n return pulumi.get(self, \"email_address\")", "def delivery(self):\n return self._delivery" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the delivery_email of this UserBase. The user's real email address. This field is present only if [email address visibility](/help/restrictvisibilityofemailaddresses) is limited and you are an administrator with access to real email addresses under the configured policy.
def delivery_email(self, delivery_email): self._delivery_email = delivery_email
[ "def delivery_email(self):\n return self._delivery_email", "def delivery(self, delivery):\n if self.local_vars_configuration.client_side_validation and delivery is None: # noqa: E501\n raise ValueError(\"Invalid value for `delivery`, must not be `None`\") # noqa: E501\n\n self._delivery = delivery", "def user_email(self, user_email):\n self._user_email = user_email", "def delivery_salutation(self, delivery_salutation):\n\n self._delivery_salutation = delivery_salutation", "def user_email(self, user_email):\n\n self._user_email = user_email", "def accountant_email(self, accountant_email):\n\n self._accountant_email = accountant_email", "def delivery_personal(self, delivery_personal):\n\n self._delivery_personal = delivery_personal", "def setEmail(self, email):\n self.email = email", "def email(self, email):\n if self.local_vars_configuration.client_side_validation and email is None: # noqa: E501\n raise ValueError(\"Invalid value for `email`, must not be `None`\") # noqa: E501\n\n self._email = email", "def admin_email(self, admin_email):\n\n self._admin_email = admin_email", "def delivery_country(self, delivery_country):\n\n self._delivery_country = delivery_country", "def customer_email(self, customer_email):\n self._customer_email = customer_email", "def business_owner_email(self, business_owner_email):\n\n self._business_owner_email = business_owner_email", "def recipient_email(self, recipient_email):\n\n self._recipient_email = recipient_email", "def delivery_discount(self, delivery_discount):\n\n self._delivery_discount = delivery_discount", "def change_user_email(sender, **kwargs):\n user = kwargs['user']\n user.email = kwargs['to_email_address'].email\n user.save(update_fields=['email'])", "def business_email(self, business_email):\n\n self._business_email = business_email", "def delivery_company_name(self, delivery_company_name):\n\n self._delivery_company_name = delivery_company_name", "def setEmail(self, *args):\n return _libsbml.ModelCreator_setEmail(self, *args)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Gets the profile_data of this UserBase. A dictionary containing custom profile field data for the user. Each entry maps the integer ID of a custom profile field in the organization to a dictionary containing the user's data for that field. Generally the data includes just a single `value` key; for those custom profile fields supporting Markdown, a `rendered_value` key will also be present.
def profile_data(self): return self._profile_data
[ "def get_user_profile_raw(self):\n payload = {'access_token': self._lr_object.access.token}\n url = SECURE_API_URL + \"api/v2/userprofile/raw/\"\n return self._lr_object._get_json(url, payload)", "def augment_user_data(self, data):\n user_id = data.get('id')\n if user_id:\n data['name'] = user_id\n data['avatar'] = ''\n user_details = api.users.get_user_details(\n self.context,\n user_id,\n portal_url=self.tools['portal_url'],\n portal_membership=self.tools['portal_membership']\n )\n if user_details:\n data['name'] = user_details['fullname']\n data['avatar'] = user_details['portrait']\n return data", "def get_user_data(self):\n return self.user_data", "def get_my_profile(self):\n\n url = self.api_base_url + \"user/profile\"\n\n try:\n raw_response = self.request_handler.make_request(ApiRequestHandler.GET, url)\n except RequestFailed:\n raise\n\n jsonified_response = json.loads(raw_response.text)\n user_profile = jsonified_response\n\n return user_profile", "def _getProfileFromUser(self):\n # Make sure user is authenticated\n user = endpoints.get_current_user()\n if not user:\n raise endpoints.UnauthorizedException('Authorization required')\n # Get Profile from datastore\n user_id = user.email()\n p_key = ndb.Key(Profile, user_id)\n profile = p_key.get()\n # Create new Profile if not there\n if not profile:\n profile = Profile(\n key = p_key,\n displayName = user.nickname(),\n mainEmail= user.email(),\n teeShirtSize = str(TeeShirtSize.NOT_SPECIFIED),\n )\n profile.put()\n return profile", "def get_user_profile(self):\n return self.user.profile", "def user_data(self, access_token, *args, **kwargs):\n return googleapis_profile(GOOGLEAPIS_PROFILE, access_token)", "def _store_profile_details(self, data):\n profile, __ = GmailProfile.objects.get_or_create(user=self.user)\n profile.family_name = data.get('family_name', '')\n profile.given_name = data.get('given_name', '')\n profile.name = data.get('name', '')\n profile.gender = data.get('gender', '')\n profile.email = data.get('email', '')\n profile.verified_email = data.get('verified_email', None)\n profile.locale = data.get('locale', '')\n profile.google_id = data.get('id', '')\n profile.link = data.get('link', '')\n profile.save()", "def _parse_profile_data (self, netflix_page_data):\n profiles = {};\n important_fields = [\n 'profileName',\n 'isActive',\n 'isAccountOwner',\n 'isKids'\n ]\n # values are accessible via dict (sloppy parsing successfull)\n if type(netflix_page_data) == dict:\n for profile_id in netflix_page_data.get('profiles'):\n if self._is_size_key(key=profile_id) == False and type(netflix_page_data['profiles'][profile_id]) == dict and netflix_page_data['profiles'][profile_id].get('avatar', False) != False:\n profile = {'id': profile_id}\n for important_field in important_fields:\n if important_field == 'profileName':\n profile.update({important_field: HTMLParser.HTMLParser().unescape(netflix_page_data['profiles'][profile_id]['summary'][important_field]).encode('utf8')})\n else:\n profile.update({important_field: netflix_page_data['profiles'][profile_id]['summary'][important_field]})\n avatar_base = netflix_page_data['nf'].get(netflix_page_data['profiles'][profile_id]['summary']['avatarName'], False);\n avatar = 'https://secure.netflix.com/ffe/profiles/avatars_v2/320x320/PICON_029.png' if avatar_base == False else avatar_base['images']['byWidth']['320']['value']\n profile.update({'avatar': avatar, 'isFirstUse': False})\n profiles.update({profile_id: profile})\n return profiles\n\n # values are stored in lists (returned from JS parser)\n # TODO: get rid of this christmas tree of doom\n for item in netflix_page_data:\n if 'hasViewedRatingWelcomeModal' in dict(item).keys():\n for profile_id in item:\n if self._is_size_key(key=profile_id) == False and type(item[profile_id]) == dict and item[profile_id].get('avatar', False) != False:\n profile = {'id': profile_id}\n for important_field in important_fields:\n profile.update({important_field: item[profile_id]['summary'][important_field]})\n avatar_base = item['nf'].get(item[profile_id]['summary']['avatarName'], False);\n avatar = 'https://secure.netflix.com/ffe/profiles/avatars_v2/320x320/PICON_029.png' if avatar_base == False else avatar_base['images']['byWidth']['320']['value']\n profile.update({'avatar': avatar})\n profiles.update({profile_id: profile})\n return profiles", "def user_data(self, access_token, *args, **kwargs):\r\n return googleapis_profile(GOOGLEAPIS_PROFILE, access_token)", "def _parse_user_data (self, netflix_page_data):\n user_data = {};\n important_fields = [\n 'authURL',\n 'gpsModel',\n 'guid'\n ]\n\n # values are accessible via dict (sloppy parsing successfull)\n if type(netflix_page_data) == dict:\n for important_field in important_fields:\n user_data.update({important_field: netflix_page_data.get(important_field, '')})\n return user_data\n\n # values are stored in lists (returned from JS parser)\n for item in netflix_page_data:\n if 'memberContext' in dict(item).keys():\n for important_field in important_fields:\n user_data.update({important_field: item['memberContext']['data']['userInfo'][important_field]})\n\n return user_data", "def _getProfileFromUser(self):\n # make sure user is authed\n user = endpoints.get_current_user()\n if not user:\n raise endpoints.UnauthorizedException('Authorization required')\n\n # get Profile from datastore\n user_id = getUserId(user)\n p_key = ndb.Key(Profile, user_id)\n profile = p_key.get()\n # create new Profile if not there\n if not profile:\n profile = Profile(\n key = p_key,\n displayName = user.nickname(),\n mainEmail= user.email(),\n teeShirtSize = str(TeeShirtSize.NOT_SPECIFIED),\n )\n profile.put()\n\n return profile # return Profile", "def map_profile_fields(data, fields):\n profile = {}\n for dst, src in fields.items():\n if callable(src):\n value = src(data)\n else:\n value = data.get(src)\n\n if value is not None and value != '':\n profile[dst] = value\n\n return profile", "def set_profile_data(data, content_user):\n # create empty profile if none exists\n profile, _created = UserProfile.objects.get_or_create(user=content_user)\n location = \"\"\n if profile.city:\n location = profile.city\n if profile.country:\n if profile.city:\n location += \", \"\n location += profile.country\n forms = content_user.xforms.filter(shared__exact=1)\n num_forms = forms.count()\n user_instances = profile.num_of_submissions\n home_page = profile.home_page\n if home_page and re.match(\"http\", home_page) is None:\n home_page = f\"http://{home_page}\"\n\n data.update(\n {\n \"location\": location,\n \"user_instances\": user_instances,\n \"home_page\": home_page,\n \"num_forms\": num_forms,\n \"forms\": forms,\n \"profile\": profile,\n \"content_user\": content_user,\n }\n )", "def user_data(self, access_token, *args, **kwargs):\n url = GOOGLE_APPENGINE_PROFILE_V2\n return self.get_json(url, headers={\n 'Authorization': 'Bearer ' + access_token\n })", "def get_user_data(self):\n if self.user_data:\n pass\n else:\n self.load()\n return deepcopy(self.user_data)", "def get_user_details(self, response):\n user = response\n\n # Rename to what psa expects\n fullname = user.get('name', None)\n if fullname:\n user['fullname'] = fullname\n user.pop('name')\n\n # Get profile photo url if any\n profilephoto_id = user.get('profilephoto', None)\n if profilephoto_id:\n profilephoto_url = '{}/userinfo/v1/user/media/{}'.format(self.API_URL, profilephoto_id)\n user['profilephoto_url'] = profilephoto_url\n\n return user", "def user_data(self, access_token, *args, **kwargs):\n url = GOOGLE_APPENGINE_PROFILE_V1\n auth = self.oauth_auth(access_token)\n return self.get_json(url,\n auth=auth, params=auth\n )", "def parse(self):\n details = self.details()\n\n return Profile(\n book_id=self.book_id,\n title=self.title(),\n user_id=self.user_id(),\n username=self.username(),\n summary=self.summary(),\n published=self.published(),\n updated=self.updated(),\n **details\n )" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets the profile_data of this UserBase. A dictionary containing custom profile field data for the user. Each entry maps the integer ID of a custom profile field in the organization to a dictionary containing the user's data for that field. Generally the data includes just a single `value` key; for those custom profile fields supporting Markdown, a `rendered_value` key will also be present.
def profile_data(self, profile_data): self._profile_data = profile_data
[ "def set_profile_data(data, content_user):\n # create empty profile if none exists\n profile, _created = UserProfile.objects.get_or_create(user=content_user)\n location = \"\"\n if profile.city:\n location = profile.city\n if profile.country:\n if profile.city:\n location += \", \"\n location += profile.country\n forms = content_user.xforms.filter(shared__exact=1)\n num_forms = forms.count()\n user_instances = profile.num_of_submissions\n home_page = profile.home_page\n if home_page and re.match(\"http\", home_page) is None:\n home_page = f\"http://{home_page}\"\n\n data.update(\n {\n \"location\": location,\n \"user_instances\": user_instances,\n \"home_page\": home_page,\n \"num_forms\": num_forms,\n \"forms\": forms,\n \"profile\": profile,\n \"content_user\": content_user,\n }\n )", "def _store_profile_details(self, data):\n profile, __ = GmailProfile.objects.get_or_create(user=self.user)\n profile.family_name = data.get('family_name', '')\n profile.given_name = data.get('given_name', '')\n profile.name = data.get('name', '')\n profile.gender = data.get('gender', '')\n profile.email = data.get('email', '')\n profile.verified_email = data.get('verified_email', None)\n profile.locale = data.get('locale', '')\n profile.google_id = data.get('id', '')\n profile.link = data.get('link', '')\n profile.save()", "def augment_user_data(self, data):\n user_id = data.get('id')\n if user_id:\n data['name'] = user_id\n data['avatar'] = ''\n user_details = api.users.get_user_details(\n self.context,\n user_id,\n portal_url=self.tools['portal_url'],\n portal_membership=self.tools['portal_membership']\n )\n if user_details:\n data['name'] = user_details['fullname']\n data['avatar'] = user_details['portrait']\n return data", "def update_profile(self, user_entity, profile, email):\n\n # set user properties from response\n user_entity.user_id = profile['id']\n user_entity.given_name = profile['given_name']\n user_entity.family_name = profile['family_name']\n user_entity.profile_link = profile['link']\n # set user's email address\n user_entity.user_email = email\n # store user entity\n return user_entity.put()", "def populate_edit_profile_fields(form, user):\n form.email.data = user.email\n form.password.data = user.password\n form.confirmed_password.data = user.password\n form.image_url.data = user.image_url", "def custom_profile_fields(self, custom_profile_fields):\n\n self._custom_profile_fields = custom_profile_fields", "def storeUserProfile(self, user_id, profile_json):\n pass", "def _update_profile_only(self, data, guild):\n try:\n profile = self.guild_profiles[guild.id]\n except KeyError:\n self.guild_profiles[guild.id] = GuildProfile.from_data(data)\n guild.users[self.id] = self\n else:\n profile._update_attributes(data)", "def _parse_profile_data (self, netflix_page_data):\n profiles = {};\n important_fields = [\n 'profileName',\n 'isActive',\n 'isAccountOwner',\n 'isKids'\n ]\n # values are accessible via dict (sloppy parsing successfull)\n if type(netflix_page_data) == dict:\n for profile_id in netflix_page_data.get('profiles'):\n if self._is_size_key(key=profile_id) == False and type(netflix_page_data['profiles'][profile_id]) == dict and netflix_page_data['profiles'][profile_id].get('avatar', False) != False:\n profile = {'id': profile_id}\n for important_field in important_fields:\n if important_field == 'profileName':\n profile.update({important_field: HTMLParser.HTMLParser().unescape(netflix_page_data['profiles'][profile_id]['summary'][important_field]).encode('utf8')})\n else:\n profile.update({important_field: netflix_page_data['profiles'][profile_id]['summary'][important_field]})\n avatar_base = netflix_page_data['nf'].get(netflix_page_data['profiles'][profile_id]['summary']['avatarName'], False);\n avatar = 'https://secure.netflix.com/ffe/profiles/avatars_v2/320x320/PICON_029.png' if avatar_base == False else avatar_base['images']['byWidth']['320']['value']\n profile.update({'avatar': avatar, 'isFirstUse': False})\n profiles.update({profile_id: profile})\n return profiles\n\n # values are stored in lists (returned from JS parser)\n # TODO: get rid of this christmas tree of doom\n for item in netflix_page_data:\n if 'hasViewedRatingWelcomeModal' in dict(item).keys():\n for profile_id in item:\n if self._is_size_key(key=profile_id) == False and type(item[profile_id]) == dict and item[profile_id].get('avatar', False) != False:\n profile = {'id': profile_id}\n for important_field in important_fields:\n profile.update({important_field: item[profile_id]['summary'][important_field]})\n avatar_base = item['nf'].get(item[profile_id]['summary']['avatarName'], False);\n avatar = 'https://secure.netflix.com/ffe/profiles/avatars_v2/320x320/PICON_029.png' if avatar_base == False else avatar_base['images']['byWidth']['320']['value']\n profile.update({'avatar': avatar})\n profiles.update({profile_id: profile})\n return profiles", "def populate_user_info(user):\n user_profile = Profile.objects.get_profile_by_user(user.email)\n if user_profile:\n user.contact_email = user_profile.contact_email\n user.name = user_profile.nickname\n else:\n user.contact_email = ''\n user.name = ''", "def save(self, profile_callback=None):\n\n # First, save the parent form\n new_user = super(BodbRegistrationForm, self).save(profile_callback=profile_callback)\n\n # Update user with first, last names\n new_user.first_name = self.cleaned_data['first_name']\n new_user.last_name = self.cleaned_data['last_name']\n new_user.save()\n\n # Update profile with affiliation\n profile = new_user.get_profile()\n profile.affiliation = self.cleaned_data['affiliation']\n profile.save()\n\n cache.set('%d.profile' % new_user.id, profile)\n\n return new_user", "def map_profile_fields(data, fields):\n profile = {}\n for dst, src in fields.items():\n if callable(src):\n value = src(data)\n else:\n value = data.get(src)\n\n if value is not None and value != '':\n profile[dst] = value\n\n return profile", "def __init__(self, data=None, **kw):\n def _get_class_by_id(profile_id):\n from solariat_bottle.db.user_profiles.social_profile import DELIMITER, TwitterProfile, FacebookProfile\n pos = unicode(profile_id).rfind(DELIMITER) + 1\n if pos == 0:\n return self.__class__\n platform = None\n try:\n index = int(profile_id[pos:])\n except ValueError:\n logger.info(u\"Could not obtain platform from profile id: {}\".format(profile_id))\n else:\n platform = PLATFORM_BY_INDEX.get(index)\n class_ = {\n TwitterProfile.platform: TwitterProfile,\n FacebookProfile.platform: FacebookProfile\n }.get(platform, self.__class__)\n return class_\n\n if data:\n profile_id = data.get('_id')\n else:\n profile_id = kw.get('id')\n if isinstance(profile_id, basestring):\n self.__class__ = _get_class_by_id(profile_id)\n super(UserProfile, self).__init__(data, **kw)", "def set_user_config(self, data):\n config = self.read_config_obj(self.account_file)\n for key, value in data.items():\n config.set(self.user, str(key), value)\n\n self.write_config(self.account_file, config)", "def user_data(self, access_token, *args, **kwargs):\n return googleapis_profile(GOOGLEAPIS_PROFILE, access_token)", "def set_user_info(self, usrs):\r\n logger.info('Starting set user profile info')\r\n user = choice(usrs)\r\n self.title = user['title']\r\n self.fname = user['fname']\r\n self.lname = user['lname']\r\n self.email = user['email']\r\n self.password = user['password']\r\n self.dob = user['dob']\r\n self.company = user['company']\r\n self.address = user['address']\r\n self.city = user['city']\r\n self.postalcode = user['postalcode']\r\n self.phone = user['phone']\r\n logger.info('Ending set user profile info')", "def user_data(self, access_token, *args, **kwargs):\r\n return googleapis_profile(GOOGLEAPIS_PROFILE, access_token)", "def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n if 'profile_pk' in kwargs:\n profile_user = get_object_or_404(User, pk=kwargs['profile_pk'])\n else:\n profile_user = self.request.user\n context['user'] = profile_user\n context['projects'] = profile_user.get_user_projects()\n context['jobs'] = profile_user.get_user_jobs()\n context['skills'] = profile_user.get_user_skills()\n return context", "def user_custom_data(self, user_custom_data):\n\n self._user_custom_data = user_custom_data" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Check the northbound queue for RPC's queued by GUI or SOAP requests. A client should connect triggered by a CONNECTION_REQUEST and any RPC's queued by the northbound will be then added to the session queue by this function.
def add_nb_queue_to_session_queue(self, session): rpc_list = [] client_id = get_element('cid', session['client']) if client_id is not None and client_id in RPCS.Northbound_Queue: # Check if all commands have been serviced if RPCS.Northbound_Queue[client_id]: # Get first request in the client queue, in the form: # (Client_COMMAND, RESPONSE STREAM) # TODO pop might be unresolved nb_request = RPCS.Northbound_Queue[client_id].pop(0) # Parse and queue request(s) client_command = nb_request[0] rpc_list.append(client_command) # Insert nb commands to the front of queue session['queue'] = queued_nb_methods + session['queue'] # Store stream which expects the client response in the session session['nb_response_stream'] = nb_request[1]
[ "def check_in(self):\n etree = self._encapsulate_request(self._generate_ping())\n self.zmq_scheduler_request_queue.put_nowait(etree)", "def _check_queue(self):\n self._process_incoming_queue_messages()\n self._root.after(200, self._check_queue)", "def queryPrivilegedQueue(self, request):\n pass", "def checkRequests( self, reqClient ):\n result = reqClient.readRequestsForJobs( [self.jobID] )\n if not result['OK']:\n raise RuntimeError( \"Failed to check Requests: %s \" % result['Message'] )\n if self.jobID in result['Value']['Successful']:\n request = result['Value']['Successful'][self.jobID]\n requestID = request.RequestID\n dbStatus = reqClient.getRequestStatus( requestID ).get( 'Value', 'Unknown' )\n self.pendingRequest = dbStatus not in (\"Done\",\"Canceled\")", "def check_queue(self):\n while True:\n try:\n cmd, arg = self.ircmq.get_nowait()\n except Empty:\n break\n else:\n if cmd == \"pubmsg\":\n logger.info(\"Sending [{}] {}\".format(self.channel, arg))\n self.connection.privmsg(self.channel, arg)\n elif cmd == \"privmsg\":\n logger.info(\"Sending [{}] {}\".format(arg[0], arg[1]))\n self.connection.privmsg(arg[0], arg[1])", "def queryPrivilegedQueue(self, request):\n self.send_queryPrivilegedQueue(request)\n return self.recv_queryPrivilegedQueue()", "def _check_comm_reply(self):\n if len(self._pending_comms) == 0:\n return\n for comm in self._pending_comms.values():\n self._notify_comm_ready(comm)\n self.kernel.io_loop.call_later(1, self._check_comm_reply)", "def run(self, session):\n rpc = None\n if session['client']['event'] == 'CONNECTION_REQUEST':\n self.add_nb_queue_to_session_queue(session)\n\n while rpc is None and session['queue']:\n try:\n # Loop through queue until there is an RPC to send, or until\n # there are no more RPCs queued, or until an error occurs\n session['rpc']['method'] = session['queue'].pop(0)\n rpc = session['rpc']['method'].send_request(session)\n except ClientMethodException:\n # Failed to send this RPC, move on to the next\n LOG.debug(\"Error during preparation of client method: %s\" % str(session['rpc']['method']))\n continue\n except Exception:\n traceback.print_exc()\n LOG.debug(\"Unexpected error during preparation of client method: %s\" % str(session['rpc']['method']))\n return RPCS.SendingRpc, None\n\n if rpc is not None:\n # RPC ready: Send it and ExpectResponse\n return RPCS.ExpectResponse, rpc\n else:\n # If there are no (more) RPCs to send, log ok\n # and send done, indicating communication is complete\n session['log'] = {'rc': 'ok', 'msg': ''}\n session['db'].clear_dirtyflag(session['client']['cid'])\n return RPCS.Listening, {'method': 'done'}", "def on_bindok(self, unused_frame):\n\n self.logger.info('queue bound')\n if self.acked:\n # if we wish to care about the servers replies, this is were we set up things\n self.logger.info('issuing confirm.select RPC')\n self._channel.confirm_delivery(self.on_delivery_confirmation)\n\n if self.sender:\n pass\n self.send()\n else:\n self.start_consuming()", "def handleRequest(self, conn):\n\n session = self._clientSession[conn]\n\n self._print(\"Handling new request from %s\" % session)\n self._requestQueue.put(session)\n if session in self._activeSession:\n #This should never happen... unless something goes really bad!\n raise ServerException(\"Handle request fail because session %s is \"\\\n \"already active\" % session)\n else:\n self._activeSession.append(session)", "def test_ipcrm_queues_not_isntalled(): # pragma: windows\n IPCComm.ipcrm_queues()", "def check_pool(self):\n if self.conn.queue_len() < MAX_PROXIES:\n return True\n return False", "def _accept_requests(self):\n try:\n request = self.request_queue.get(timeout=self.REQUESTS_TIMEOUT)\n self.logger.debug(\"Adding new requests\")\n for _ in xrange(self.REQUESTS_MAX_AMOUNT):\n self._requests.append(request)\n request = self.request_queue.get_nowait()\n\n except EmptyQueueError:\n return\n\n self.logger.debug(\"Done adding new requests\")", "def on_bindok(self, unused_frame):\n self.logger.debug('queue bound')\n self.start_consuming()", "async def processClientInBox(self):\n while self.clientInBox:\n m = self.clientInBox.popleft()\n req, frm = m\n logger.debug(\"{} processing {} request {}\".\n format(self.clientstack.name, frm, req),\n extra={\"cli\": True,\n \"tags\": [\"node-msg-processing\"]})\n\n try:\n await self.clientMsgRouter.handle(m)\n except InvalidClientMessageException as ex:\n self.handleInvalidClientMsg(ex, m)", "def check_session_queue_full(self) -> None:\n if (\n self.project.sessions_queued is None\n ): # no limit set so always return (success)\n return\n\n queued_request_count = self.project.session_requests.count()\n if queued_request_count >= self.project.sessions_queued:\n raise SessionException(\n \"There are already {}/{} requests for sessions for this project.\".format(\n queued_request_count, self.project.sessions_queued\n )\n )", "def readQ(self):\r\n try:\r\n q = self.inQ.get(timeout=GUI_QUEUE_RATE)\r\n #q = self.inQ.get_nowait()\r\n except queue.Empty:\r\n pass\r\n else:\r\n request = q[0]\r\n if request == \"ERROR\":\r\n self.msg_Error(q)\r\n elif request == \"CONNECTED\":\r\n self.msg_Connected()\r\n elif request == \"CONNECTION_ERROR\":\r\n self.msg_ConnectionError(q)\r\n elif request == \"ACCOUNT_UPDATE\":\r\n self.msg_AccountUpdate(q)\r\n elif request == \"PORTFOLIO_UPDATE\":\r\n self.msg_PortfolioUpdate(q)\r\n elif request == \"POSITION_UPDATE\":\r\n self.msg_PositionUpdate(q)\r\n elif request == \"ACC_PORT_UPDATE_STOPPED\":\r\n self.msg_AccPortStoped() # includes account updates\r\n elif request == \"COMPANY_INFO_END\":\r\n self.msg_CompanyInfoEnd(q)\r\n elif request == \"NEWS\":\r\n self.msg_NewsUpdate(q)\r\n elif request == \"ORDER\":\r\n self.msg_OrderInfo(q)\r\n elif request == \"IB_QUEUE_SIZE\":\r\n self.ibQueue_size = q[1]", "def listQueue(self, request):\n pass", "def wtc_request(self,request,pendingTimeout=MAX_PENDING_DELTA,responseTimeout=5):\n\t\ttry:\n\t\t\tfrom qpaceControl import QPCONTROL as qp\n\t\texcept ImportError:\n\t\t\treturn False\n\n\t\tpendingMAXCount = pendingTimeout // responseTimeout\n\t\tpendingCount = 0\n\t\tresponse = qp['PENDING']\n\t\twhile response is qp['PENDING']:# or response is None: # None implies timeout\n\t\t\tif pendingCount > pendingMAXCount:\n\t\t\t\treturn False\n\t\t\tself.queue.enqueue(request)\n\t\t\tresponse = self.queue.waitForResponse(responseTimeout)\n\t\t\tpendingCount += 1\n\n\t\treturn response is qp['ACCEPTED']" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
When NB requests are queued put them in the session queue. If there are RPCs in queue to be sent, return prepared RPC and move to ExpectResponse. Otherwise, go to Listening and send a 'done' RPC.
def run(self, session): rpc = None if session['client']['event'] == 'CONNECTION_REQUEST': self.add_nb_queue_to_session_queue(session) while rpc is None and session['queue']: try: # Loop through queue until there is an RPC to send, or until # there are no more RPCs queued, or until an error occurs session['rpc']['method'] = session['queue'].pop(0) rpc = session['rpc']['method'].send_request(session) except ClientMethodException: # Failed to send this RPC, move on to the next LOG.debug("Error during preparation of client method: %s" % str(session['rpc']['method'])) continue except Exception: traceback.print_exc() LOG.debug("Unexpected error during preparation of client method: %s" % str(session['rpc']['method'])) return RPCS.SendingRpc, None if rpc is not None: # RPC ready: Send it and ExpectResponse return RPCS.ExpectResponse, rpc else: # If there are no (more) RPCs to send, log ok # and send done, indicating communication is complete session['log'] = {'rc': 'ok', 'msg': ''} session['db'].clear_dirtyflag(session['client']['cid']) return RPCS.Listening, {'method': 'done'}
[ "def add_nb_queue_to_session_queue(self, session):\n rpc_list = []\n client_id = get_element('cid', session['client'])\n\n if client_id is not None and client_id in RPCS.Northbound_Queue:\n # Check if all commands have been serviced\n if RPCS.Northbound_Queue[client_id]:\n # Get first request in the client queue, in the form:\n # (Client_COMMAND, RESPONSE STREAM)\n # TODO pop might be unresolved\n nb_request = RPCS.Northbound_Queue[client_id].pop(0)\n # Parse and queue request(s)\n client_command = nb_request[0]\n rpc_list.append(client_command)\n # Insert nb commands to the front of queue\n session['queue'] = queued_nb_methods + session['queue']\n # Store stream which expects the client response in the session\n session['nb_response_stream'] = nb_request[1]", "def dispatch(self):\n # Indicate that internal queue might change\n self._durty = 1\n\n # Resolve destination hostnames to IP numbers for later comparation\n try:\n self._requests = map(lambda (dst, req): \\\n ((socket.gethostbyname(dst[0]), \\\n dst[1]), req),\\\n self._requests)\n\n except socket.error, why:\n raise BadArgument(why)\n\n # Initialize a list of responses\n self._responses = map(lambda (dst, req): (dst, None), self._requests)\n\n # Initialize retry counter\n retries = self.retries\n \n while retries:\n # Send out requests and prepare for waiting for replies\n for idx in range(len(self._requests)):\n # Skip completed session\n (src, rsp) = self._responses[idx]\n if rsp is not None:\n continue\n\n (dst, req) = self._requests[idx]\n\n try:\n self.transport.send(req.encode(), dst)\n \n except role.Error:\n # Ignore transport errors\n pass\n\n # Collect responses from agents\n for (src, rsp) in self._responses:\n # Skip responded entities\n if rsp is not None:\n continue\n\n # XXX Probably select() based multiplexing would better\n # serve timeouts...\n \n # Wait for response\n (response, src) = self.transport.receive()\n\n # Stop on timeout\n if response is None:\n retries = retries - 1\n break\n\n # Decode response\n (rsp, rest) = v2c.decode(response)\n\n # Try to match response message against pending\n # request messages\n for idx in range(len(self._requests)):\n if (src, rsp) == self._requests[idx]:\n self._responses[idx] = (src, rsp)\n break\n else:\n # Everyone responded\n break\n \n # Replace list of requests with list of replies\n self._requests = self._responses", "def answer_waiting_call(self) -> None:", "def request(self, method, args, response_cb):\n request_id = self._next_request_id\n self._next_request_id = request_id + 1\n self._msgpack_stream.send([0, request_id, method, args])\n self._pending_requests[request_id] = response_cb", "def testQueueing(self):\n session_id = rdfvalue.SessionID(\"aff4:/flows/W:test\")\n\n request = rdfvalue.RequestState(\n id=1, client_id=self.client_id,\n next_state=\"TestState\",\n session_id=session_id)\n\n with queue_manager.QueueManager(token=self.token) as manager:\n manager.QueueRequest(session_id, request)\n\n # We only have one unanswered request on the queue.\n all_requests = list(manager.FetchRequestsAndResponses(session_id))\n self.assertEqual(len(all_requests), 1)\n self.assertEqual(all_requests[0], (request, []))\n\n # FetchCompletedRequests should return nothing now.\n self.assertEqual(list(manager.FetchCompletedRequests(session_id)), [])\n\n # Now queue more requests and responses:\n with queue_manager.QueueManager(token=self.token) as manager:\n # Start with request 2 - leave request 1 un-responded to.\n for request_id in range(2, 5):\n request = rdfvalue.RequestState(\n id=request_id, client_id=self.client_id,\n next_state=\"TestState\", session_id=session_id)\n\n manager.QueueRequest(session_id, request)\n\n response_id = None\n for response_id in range(1, 10):\n # Normal message.\n manager.QueueResponse(session_id, rdfvalue.GrrMessage(\n request_id=request_id, response_id=response_id))\n\n # And a status message.\n manager.QueueResponse(session_id, rdfvalue.GrrMessage(\n request_id=request_id, response_id=response_id+1,\n type=rdfvalue.GrrMessage.Type.STATUS))\n\n completed_requests = list(manager.FetchCompletedRequests(session_id))\n self.assertEqual(len(completed_requests), 3)\n\n # First completed message is request_id = 2 with 10 responses.\n self.assertEqual(completed_requests[0][0].id, 2)\n\n # Last message is the status message.\n self.assertEqual(completed_requests[0][-1].type,\n rdfvalue.GrrMessage.Type.STATUS)\n self.assertEqual(completed_requests[0][-1].response_id, 10)\n\n # Now fetch all the completed responses. Set the limit so we only fetch some\n # of the responses.\n completed_response = list(manager.FetchCompletedResponses(session_id))\n self.assertEqual(len(completed_response), 3)\n for i, (request, responses) in enumerate(completed_response, 2):\n self.assertEqual(request.id, i)\n self.assertEqual(len(responses), 10)\n\n # Now check if the limit is enforced. The limit refers to the total number\n # of responses to return. We ask for maximum 15 responses, so we should get\n # a single request with 10 responses (since 2 requests will exceed the\n # limit).\n more_data = False\n i = 0\n try:\n partial_response = manager.FetchCompletedResponses(session_id, limit=15)\n for i, (request, responses) in enumerate(partial_response, 2):\n self.assertEqual(request.id, i)\n self.assertEqual(len(responses), 10)\n except queue_manager.MoreDataException:\n more_data = True\n\n # Returns the first request that is completed.\n self.assertEqual(i, 3)\n\n # Make sure the manager told us that more data is available.\n self.assertTrue(more_data)", "def send_pending_requests(self):\n while self.pending_requests:\n stream_id = self.pending_requests.popleft()\n\n log.debug(\"initiating request, new stream %s\", stream_id)\n\n # send headers immediately rather than waiting for data. this ensures\n # streams are established with increasing stream ids regardless of when\n # the request data is available\n self.send_headers(stream_id, immediate=True)\n self.send_data(stream_id)", "def __call__(self):\n hub.sleep(random.randint(1, self.interval))\n while True:\n self.send_req()\n self.reply_pending = True\n hub.sleep(self.interval)\n if self.reply_pending:\n self.no_response()", "def _finish_pending_requests(self) -> None:\n while True:\n num_q, ok_list, err_list = self._multi.info_read()\n for curl in ok_list:\n self._finish(curl)\n for curl, errnum, errmsg in err_list:\n self._finish(curl, errnum, errmsg)\n if num_q == 0:\n break\n self._process_queue()", "def _finish_pending_requests(self):\r\n while True:\r\n num_q, ok_list, err_list = self._multi.info_read()\r\n for curl in ok_list:\r\n self._finish(curl)\r\n for curl, errnum, errmsg in err_list:\r\n self._finish(curl, errnum, errmsg)\r\n if num_q == 0:\r\n break\r\n self._process_queue()", "def test_process_reply0(self):\n req1 = FakeRequest(1, True) # expired\n req2 = FakeRequest(2, False) # not expired\n req3 = FakeRequest(3, True)\n req4 = FakeRequest(4, False)\n req5 = FakeRequest(5, False)\n\n self.request_buffer.append(req1)\n self.request_buffer.append(req2)\n self.request_buffer.append(req3)\n self.request_buffer.append(req4)\n self.request_buffer.append(req5)\n\n reply = FakeReply(id=6)\n\n self.request_buffer.process_reply(reply)\n self.assertEqual(len(self.request_buffer.requests), 5)", "def testSendNextMessage(self):\n self.mgr.isGoproBusy = True\n self.mgr.lastRequestSent = monotonic.monotonic()\n self.mgr.queueMsg(3)\n self.mgr.queueMsg(2)\n self.mgr.queueMsg(1)\n self.mgr.processMsgQueue()\n self.v.send_mavlink.assert_called_with(3)\n self.assertEqual( self.mgr.msgQueue.qsize(), 2)", "def _sendQueued(self):\n if not self.isPeerStateConnected():\n warning(\"_sendQueued: current state is '%s', not 'connected'\" % self.state)\n for spkt in self.queued:\n self.send(spkt)", "def on_bindok(self, unused_frame):\n\n self.logger.info('queue bound')\n if self.acked:\n # if we wish to care about the servers replies, this is were we set up things\n self.logger.info('issuing confirm.select RPC')\n self._channel.confirm_delivery(self.on_delivery_confirmation)\n\n if self.sender:\n pass\n self.send()\n else:\n self.start_consuming()", "def on_iteration(self):\n self.send_pending_requests()\n super().on_iteration()", "async def handle_request():\n nonlocal process, process_task\n logger.debug(\"Waiting for request\")\n request = await queue.get()\n\n if request.name == RequestTypes.run_process:\n assert process is None, \"Process must not have been started\"\n process_state = request.contents\n process = self._start_callback(process_state)\n process_task = asyncio.create_task(process.wait())\n pid = process.pid\n logger.debug(\"Running process in handler: %d\", pid)\n await connection.send(Response(pid))\n\n elif request.name == RequestTypes.wait_process_done:\n assert process is not None, \"Process must have been started\"\n logger.debug(\"Waiting for process to exit\")\n # We don't want the process.wait() task to be cancelled in case\n # our connection gets broken.\n exitcode = await asyncio.shield(process_task)\n logger.debug(\"Result: %d\", exitcode)\n await connection.send(Response(exitcode))\n\n return True", "def send_async_requests(self):\n\t\tif len(self._async_http_requests) <= 0:\n\t\t\treturn ()\n\n\t\tif self._session is None:\n\t\t\tself.start_new_session()\n\t\tsession = self._session\n\n\t\tresponses = [None] * len(self._async_http_requests)\n\t\t\":type : list\"\n\n\t\tfutures = []\n\t\tfor req, uri, host, auth, decode, ignored in self._async_http_requests:\n\t\t\tif host is None:\n\t\t\t\thost = self._host\n\t\t\t_log_http_request(req, uri, host, auth, self.log_full_request)\n\t\t\tf = self._async_executor.submit(session.send, req)\n\t\t\t# mini data-structure, Tuple[done_yet, future]\n\t\t\tfutures.append((False, f, decode, ignored))\n\t\tself._async_http_requests = []\n\n\t\t# now wait for them to complete\n\t\twhile len([x for x in futures if not x[0]]) > 0:\n\t\t\tnext_futures = []\n\t\t\tfor idx, f in enumerate(futures):\n\t\t\t\tdone_now = f[0]\n\t\t\t\tif not done_now:\n\t\t\t\t\tif f[1].done():\n\t\t\t\t\t\tr = f[1].result()\n\t\t\t\t\t\t_log_http_response(r, self.log_full_response)\n\t\t\t\t\t\tresponses[idx] = (r, f[2], f[3])\n\t\t\t\t\t\tdone_now = True\n\t\t\t\tnext_futures.append((done_now, f[1], f[2], f[3]))\n\t\t\tfutures = next_futures\n\t\t\ttime.sleep(0.01)\n\t\t# they are now done\n\n\t\t# we need to re-raise any exceptions that occur\n\t\tbad_responses = []\n\t\tfor idx, resp_items in enumerate(responses):\n\t\t\tresp, decode, ignored = resp_items\n\t\t\tif resp.status_code not in ignored:\n\t\t\t\ttry:\n\t\t\t\t\tresp.raise_for_status()\n\t\t\t\texcept requests.HTTPError as e:\n\t\t\t\t\t_log.exception(\"HTTPError in request #\" + str(idx) + \": \" + str(e))\n\t\t\t\t\tbad_responses.append(idx)\n\t\tif len(bad_responses) > 0:\n\t\t\tself._async_transforms = []\n\t\t\traise AsyncHTTPError(bad_responses)\n\n\t\t# finally, call the transform function on each one\n\t\ttransformed = []\n\t\tfor r_items, xform in zip(responses, self._async_transforms):\n\t\t\tr, decode, ignored = r_items\n\t\t\tdata = None\n\t\t\tif r.content is not None:\n\t\t\t\tif decode == 'text':\n\t\t\t\t\tdata = r.text\n\t\t\t\telif decode == 'json':\n\t\t\t\t\tdata = r.json(parse_float=decimal.Decimal)\n\t\t\t\telif decode == 'binary':\n\t\t\t\t\tdata = r.content\n\t\t\t\telse:\n\t\t\t\t\traise ValueError(\"Bad response_payload encoding: \" + decode)\n\t\t\t\tdata = xform(data)\n\t\t\ttransformed.append(data)\n\t\tself._async_transforms = []\n\t\treturn transformed", "def _listen(self):\n while True:\n func, args, kwargs = self._call_queue.get()\n print(\"Ros process is calling\", func)\n self._return_queue.put(getattr(self, func)(*args, **kwargs))", "def _handle_pending(self):\r\n if not self.pending:\r\n self._post_message('')\r\n return\r\n info, desired = self.pending\r\n if desired and self.plugins[desired].busy:\r\n return\r\n self.busy = True\r\n\r\n if desired:\r\n plugins = [self.plugins[desired]]\r\n elif info.name == 'definition' and not info.editor.is_python():\r\n plugins = [p for p in self.plugins.values() if not p.busy]\r\n else:\r\n # use all but the fallback\r\n plugins = [p for p in list(self.plugins.values())[:-1] if not p.busy]\r\n\r\n self.request = RequestHandler(info, plugins)\r\n self.request.introspection_complete.connect(\r\n self._introspection_complete)\r\n self.pending = None", "def requests_notification_callback(msg_queue_in, msg_queue_out):\n log_name = '{0} :: {1}'.format(__name__,\n requests_notification_callback.__name__)\n logging.debug('{0} - STARTING...'.format(log_name))\n\n # TODO - potentially extend with an in-memory cache\n job_list = OrderedDict()\n while 1:\n\n try:\n msg = msg_queue_in.get(True)\n except IOError as e:\n logging.error(__name__ + ' :: Could not block '\n 'on in queue: \"{0}\"'.format(e.message))\n sleep(1)\n continue\n\n try:\n type = msg[0]\n except (KeyError, ValueError):\n logging.error(log_name + ' - No valid type ' \\\n '{0}'.format(str(msg)))\n continue\n\n # Init request\n if type == 0:\n try:\n job_list[msg[1]] = [True, msg[2]]\n logging.debug(log_name + ' - Initialize Request: ' \\\n '{0}.'.format(str(msg)))\n except Exception:\n logging.error(log_name + ' - Initialize Request' \\\n ' failed: {0}'.format(str(msg)))\n\n # Flag request complete - leave on queue\n elif type == 1:\n try:\n job_list[msg[1]][0] = False\n logging.debug(log_name + ' - Set request finished: ' \\\n '{0}.\\n'.format(str(msg)))\n except Exception:\n logging.error(log_name + ' - Set request finished failed: ' \\\n '{0}\\n'.format(str(msg)))\n\n # Is the key in the cache and running?\n elif type == 2:\n try:\n if msg[1] in job_list:\n msg_queue_out.put([job_list[msg[1]][0]], True)\n else:\n msg_queue_out.put([False], True)\n logging.debug(log_name + ' - Get request alive: ' \\\n '{0}.'.format(str(msg)))\n except (KeyError, ValueError):\n logging.error(log_name + ' - Get request alive failed: ' \\\n '{0}'.format(str(msg)))\n\n # Get keys\n elif type == 3:\n msg_queue_out.put(job_list.keys(), True)\n\n # Get url\n elif type == 4:\n try:\n if msg[1] in job_list:\n msg_queue_out.put([job_list[msg[1]][1]], True)\n else:\n logging.error(log_name + ' - Get URL failed: {0}'.\n format(str(msg)))\n except (KeyError, ValueError):\n logging.error(log_name + ' - Get URL failed: {0}'.format(str(msg)))\n else:\n logging.error(log_name + ' - Bad message: {0}'.format(str(msg)))\n\n logging.debug('{0} - SHUTTING DOWN...'.format(log_name))" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
This function determines the common supported protocol version. This is determined by a version supported by the RPCS server that is in the range of numbers that exist between the value of the first integer of the earliest supported protocol version and the value of the first integer of the latest supported protocol version.
def determine_supported_protocol(self, earliest, latest): earliest = int(earliest.split('.')[0]) latest = int(latest.split('.')[0]) if earliest <= latest: supported = range(earliest, latest + 1) for version in (reversed(supported)): if version in RPCS.SUPPORTED_PROTOCOL_VERSIONS: return str(version) # If no common protocol version is found, raise fatal error raise ClientRequestError('NoValidProtocolVersionInCommon')
[ "def compare_protocol_versions(self, session):\n # First parse protocol version strings to check for invalid formatting\n invalid_string = self.parse_protocol_version(\n [self.earliest_protocol_version, self.latest_protocol_version])\n if invalid_string is not None:\n # Error during protocol string parsing\n data = ('earliest_protocol_version'\n if invalid_string == self.earliest_protocol_version else 'latest_protocol_version')\n raise ClientRequestError('InvalidParameterValue', data=data)\n\n # Check if protocol version is supported and define the one to use\n self.protocol_version = self.determine_supported_protocol(\n self.earliest_protocol_version, self.latest_protocol_version)", "def protocol(ver):\r\n if ver == 1:\r\n return 1\r\n\r\n if ver == 2:\r\n return 2\r\n\r\n\r\n raise ValueError", "def get_protocol_version(self):\n return PROTOCOLVER", "def __FindSupportedVersion(protocol, server, port, path, preferredApiVersions,\n sslContext, httpProxyHost, httpProxyPort,\n customHeaders):\n\n serviceVersionDescription = __GetServiceVersionDescription(\n protocol, server, port, path, sslContext, httpProxyHost,\n httpProxyPort, customHeaders)\n if serviceVersionDescription is None:\n return None\n\n if not isinstance(preferredApiVersions, list):\n preferredApiVersions = [preferredApiVersions]\n\n for desiredVersion in preferredApiVersions:\n if __VersionIsSupported(desiredVersion, serviceVersionDescription):\n return desiredVersion\n return None", "def server_api_versions(self):\n res = self._session.get(self._base_url, authenticated=False,\n raise_exc=False)\n # HTTP Not Found is a valid response for older (2.0.0) servers\n if res.status_code >= 400 and res.status_code != 404:\n ClientError.raise_if_needed(res)\n\n min_ver = res.headers.get(_MIN_VERSION_HEADER, '1.0')\n max_ver = res.headers.get(_MAX_VERSION_HEADER, '1.0')\n res = (_parse_version(min_ver), _parse_version(max_ver))\n LOG.debug('Supported API version range for %(url)s is '\n '[%(min)s, %(max)s]',\n {'url': self._base_url, 'min': min_ver, 'max': max_ver})\n return res", "def get_protocol_version_high(self):\n if self.is_module_queried['version_num'] is False:\n self.get_pvt()\n self.is_module_queried['version_num'] = False\n self.is_module_queried['All'] = False\n\n return self.version_high", "def supported_marshaller_api_versions() -> Tuple[str]:\n return (\"1.0\",)", "def getNativeChangesetVersion(protocolVersion):\n # Add more versions as necessary, but do remember to add them to\n # netclient's FILE_CONTAINER_* constants\n if protocolVersion < 38:\n return filecontainer.FILE_CONTAINER_VERSION_NO_REMOVES\n elif protocolVersion < 43:\n return filecontainer.FILE_CONTAINER_VERSION_WITH_REMOVES\n # Add more changeset versions here as the currently newest client is\n # replaced by a newer one\n return filecontainer.FILE_CONTAINER_VERSION_FILEID_IDX", "def get_server_version(self) -> tuple[int, ...]:\n # major.minor.patch format\n version = [0, 0, 0]\n\n pattern = SERVER_VERSION_RE.search(self.server_version)\n if pattern:\n version = [int(comp) for comp in pattern.group().split(\".\")]\n return tuple(version)", "def _sanityCheckProtocolVersions(other):\n if other.minVersion > other.maxVersion:\n raise ValueError(\"Versions set incorrectly\")\n if other.minVersion not in KNOWN_VERSIONS:\n raise ValueError(\"minVersion set incorrectly\")\n if other.maxVersion not in KNOWN_VERSIONS:\n raise ValueError(\"maxVersion set incorrectly\")\n\n if other.maxVersion < (3, 4):\n other.versions = [i for i in other.versions if i < (3, 4)]", "def getProtocolVersion(self):\n return protocol.version", "def get_protocol_version_low(self):\n if self.is_module_queried['version_num'] is False:\n self.get_pvt()\n self.is_module_queried['version_num'] = False\n self.is_module_queried['All'] = False\n\n return self.version_low", "def comm_version(self):\n version = self.interface.comm_version()\n self.logger.debug(\"Returning communication protocol version: \" + str(version))\n return version", "def check_capability_negotiation(\n self, environ, start_response, response_headers):\n ua = sa = None\n if \"HTTP_DATASERVICEVERSION\" in environ:\n major, minor, ua = core.parse_dataservice_version(\n environ[\"HTTP_DATASERVICEVERSION\"])\n else:\n major = 2\n minor = 0\n if \"HTTP_MAXDATASERVICEVERSION\" in environ:\n # (unused max_minor)\n max_major, max_minor, sa = core.parse_max_dataservice_version(\n environ[\"HTTP_MAXDATASERVICEVERSION\"]) # noqa\n else:\n max_major = major\n if major > 2 or (major == 2 and minor > 0):\n # we can't cope with this request\n return None\n elif max_major >= 2:\n response_headers.append(\n ('DataServiceVersion', '2.0; pyslet %s' % info.version))\n return 2\n else:\n response_headers.append(\n ('DataServiceVersion', '1.0; pyslet %s' % info.version))\n return 1", "def test_get_protocol_version_name(self):\n server, client = loopback()\n client_protocol_version_name = client.get_protocol_version_name()\n server_protocol_version_name = server.get_protocol_version_name()\n\n assert isinstance(server_protocol_version_name, str)\n assert isinstance(client_protocol_version_name, str)\n\n assert server_protocol_version_name == client_protocol_version_name", "def ip_version(sel_type):\n random.seed(a=urandom(100)) # Initialize seed urandom\n ipv4_only = False\n ipv6_only = False\n if sel_type == 0: # Random odd selection\n r = random.randint(1, 100)\n if r % 2 == 0:\n version = 6\n else:\n version = 4\n elif sel_type == 1: # Random selection\n version = random.sample([4, 6], 1)[0]\n elif sel_type == 2:\n version = random.choice([4, 6])\n elif sel_type == 3:\n if random.random() >= 0.5:\n version = 6\n else:\n version = 4\n elif sel_type == 4: # IPv4 only\n version = 4\n ipv4_only = True\n elif sel_type == 6: # IPv6 only\n version = 6\n ipv6_only = True\n\n global ip6_sessions_total # Session tracking\n global ip4_sessions_total\n global ip6_sessions\n global ip4_sessions\n if version == 6:\n ip6_sessions += 1\n ip6_sessions_total += 1\n if version == 4:\n ip4_sessions += 1\n ip4_sessions_total += 1\n\n if ip6_sessions > args.max_subsequent_sessions and not ipv6_only:\n version = 4\n ip6_sessions = 0\n ip4_sessions = 1\n ip6_sessions_total -= 1\n ip4_sessions_total += 1\n if args.verbose >= 2:\n print(\n \"[+] Maximum number of subsequent {0}\"\n \" IPv6 sessios reached\".format(\n args.max_subsequent_sessions)\n )\n if ip4_sessions > args.max_subsequent_sessions and not ipv4_only:\n version = 6\n ip4_sessions = 0\n ip6_sessions = 1\n ip4_sessions_total -= 1\n ip6_sessions_total += 1\n if args.verbose >= 2:\n print(\n \"[+] Maximum number of subsequent {0}\"\n \" IPv4 sessios reached\".format(\n args.max_subsequent_sessions)\n )\n\n return(version)", "def fix_bolt_versions(cls, bolt_versions):\n # Establish which protocol versions we want to attempt to use\n if not bolt_versions:\n bolt_versions = sorted(CLIENT.keys(), reverse=True)\n # Ensure we send exactly 4 versions, padding with zeroes if necessary\n return tuple(list(bolt_versions) + [(0, 0), (0, 0), (0, 0), (0, 0)])[:4]", "def protocol_version_9():\n print('Setting protocol version to 9')\n upgrade('protocolversion', 'protocol_version', 9)", "def server_version(self):\n resp = self._message(b\"host:version\")\n return int(resp, 16)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
This function is responsible for parsing, validating and making all necessary comparisons between provided and supported protocol versions.
def compare_protocol_versions(self, session): # First parse protocol version strings to check for invalid formatting invalid_string = self.parse_protocol_version( [self.earliest_protocol_version, self.latest_protocol_version]) if invalid_string is not None: # Error during protocol string parsing data = ('earliest_protocol_version' if invalid_string == self.earliest_protocol_version else 'latest_protocol_version') raise ClientRequestError('InvalidParameterValue', data=data) # Check if protocol version is supported and define the one to use self.protocol_version = self.determine_supported_protocol( self.earliest_protocol_version, self.latest_protocol_version)
[ "def _sanityCheckProtocolVersions(other):\n if other.minVersion > other.maxVersion:\n raise ValueError(\"Versions set incorrectly\")\n if other.minVersion not in KNOWN_VERSIONS:\n raise ValueError(\"minVersion set incorrectly\")\n if other.maxVersion not in KNOWN_VERSIONS:\n raise ValueError(\"maxVersion set incorrectly\")\n\n if other.maxVersion < (3, 4):\n other.versions = [i for i in other.versions if i < (3, 4)]", "def determine_supported_protocol(self, earliest, latest):\n earliest = int(earliest.split('.')[0])\n latest = int(latest.split('.')[0])\n if earliest <= latest:\n supported = range(earliest, latest + 1)\n for version in (reversed(supported)):\n if version in RPCS.SUPPORTED_PROTOCOL_VERSIONS:\n return str(version)\n\n # If no common protocol version is found, raise fatal error\n raise ClientRequestError('NoValidProtocolVersionInCommon')", "def compare_versions(current_version, supported_version):\n try:\n current = current_version.split(\".\")\n supported = supported_version.split(\".\")\n\n if int(current[0]) < int(supported[0]):\n return False\n if int(current[0]) > int(supported[0]):\n return True\n return int(current[1]) >= int(supported[1])\n # pylint: disable=W0703\n except Exception:\n logger.info(\"issues parsing version\")\n return False", "def protocol(ver):\r\n if ver == 1:\r\n return 1\r\n\r\n if ver == 2:\r\n return 2\r\n\r\n\r\n raise ValueError", "def test_check_version():\n assert check_version('0.9.4-1', '0.9.4', '>=')\n assert check_version('3.0.0rc1', '3.0.0', '<')\n assert check_version('1.0', '1.0b2', '>')", "def _check_versions(instance):\n\n header_drpver = instance.header['VERSDRP3']\n isMPL4 = False\n if instance._release == 'MPL-4' and header_drpver == 'v1_5_0':\n header_drpver = 'v1_5_1'\n isMPL4 = True\n assert header_drpver == instance._drpver, ('mismatch between maps._drpver={0} '\n 'and header drpver={1}'\n .format(instance._drpver, header_drpver))\n\n # MPL-4 does not have VERSDAP\n if isMPL4:\n assert 'VERSDAP' not in instance.header, 'mismatch between maps._dapver and header'\n else:\n header_dapver = instance.header['VERSDAP']\n assert header_dapver == instance._dapver, 'mismatch between maps._dapver and header'", "def rpn_version_check(self):", "def test_valid_version_string(self):\n for version_str in (\"0\", \"0.1\", \"0.1.2\", \"0.1.2.3\", \"1!0\"):\n with self.subTest(version_str=version_str):\n version = Version(version_str)\n self.assertEqual(selectors._convert_tpr(version_str), version)", "def version_compare(compare_ver, min_version, max_version):\n if max_version == \"*\":\n return True\n if max_version == \"-\" or not max_version:\n max_version = \"0\"\n if not min_version or min_version == \"*\" or min_version == \"-\":\n min_version = \"0\"\n if compare_ver == \"-\" or compare_ver == \"*\":\n compare_ver = \"0\"\n if compare_ver == min_version or compare_ver == max_version:\n return True\n compare_ver_parts = str(compare_ver).split(\".\")\n min_version_parts = str(min_version).split(\".\")\n max_version_parts = str(max_version).split(\".\")\n\n # If all versions follow proper versioning then perform a simple numerical comparison\n if len(compare_ver_parts) == len(min_version_parts) and len(\n compare_ver_parts\n ) == len(max_version_parts):\n compare_ver_num = normalise_num(compare_ver, len(compare_ver_parts))\n min_version_num = normalise_num(min_version, len(compare_ver_parts))\n max_version_num = normalise_num(max_version, len(compare_ver_parts))\n if compare_ver_num >= min_version_num and compare_ver_num <= max_version_num:\n return True\n\n normal_len = len(compare_ver_parts)\n if len(min_version_parts) > normal_len:\n normal_len = len(min_version_parts)\n if len(max_version_parts) > normal_len:\n normal_len = len(max_version_parts)\n\n # Normalise the version numbers to be of same length\n compare_ver = normalise_version_str(compare_ver, normal_len)\n min_version = normalise_version_str(min_version, normal_len)\n max_version = normalise_version_str(max_version, normal_len)\n\n compare_ver_parts = str(compare_ver).split(\".\")\n min_version_parts = str(min_version).split(\".\")\n max_version_parts = str(max_version).split(\".\")\n\n for i in range(0, normal_len):\n if (\n not compare_ver_parts[i].isdigit()\n or not min_version_parts[i].isdigit()\n or not max_version_parts[i].isdigit()\n ):\n if (\n compare_ver_parts[i] == min_version_parts[i]\n and compare_ver_parts[i] == max_version_parts[i]\n ):\n continue\n else:\n return False\n elif int(compare_ver_parts[i]) >= int(min_version_parts[i]) and int(\n compare_ver_parts[i]\n ) <= int(max_version_parts[i]):\n continue\n else:\n return False\n return True", "def test_VersionOptionalFields():\n # onlyRequiredVersion is a version message that only contains the\n # required versions and all other values set to their default values.\n onlyRequiredVersion = minimumMsgVersion()\n\n onlyRequiredVersionEncoded = baseVersionEncoded()[:-55]\n\n # addrMeVersion is a version message that contains all fields through\n # the AddrMe field.\n addrMe = netaddress.NetAddress(\n ip=\"127.0.0.1\", port=8333, services=wire.SFNodeNetwork, stamp=0,\n )\n addrMeVersion = minimumMsgVersion()\n addrMeVersion.addrMe = addrMe\n\n addrMeVersionEncoded = baseVersionEncoded()[:-29]\n\n # nonceVersion is a version message that contains all fields through\n # the Nonce field.\n nonceVersion = minimumMsgVersion()\n nonceVersion.addrMe = addrMe\n nonceVersion.nonce = 123123 # 0x1e0f3\n nonceVersionEncoded = baseVersionEncoded()[:-21]\n\n # uaVersion is a version message that contains all fields through\n # the UserAgent field.\n uaVersion = minimumMsgVersion()\n uaVersion.addrMe = addrMe\n uaVersion.nonce = 123123\n uaVersion.userAgent = \"/dcrdtest:0.0.1/\"\n uaVersionEncoded = baseVersionEncoded()[:-4]\n\n # lastBlockVersion is a version message that contains all fields\n # through the LastBlock field.\n lastBlockVersion = minimumMsgVersion()\n lastBlockVersion.addrMe = addrMe\n lastBlockVersion.nonce = 123123\n lastBlockVersion.userAgent = \"/dcrdtest:0.0.1/\"\n lastBlockVersion.lastBlock = 234234 # 0x392fa\n lastBlockVersionEncoded = baseVersionEncoded()\n\n tests = [\n (onlyRequiredVersion, onlyRequiredVersionEncoded),\n (addrMeVersion, addrMeVersionEncoded),\n (nonceVersion, nonceVersionEncoded),\n (uaVersion, uaVersionEncoded),\n (lastBlockVersion, lastBlockVersionEncoded),\n ]\n\n for expMsg, buf in tests:\n # Decode the message from wire format.\n msg = msgversion.MsgVersion.btcDecode(buf, wire.ProtocolVersion)\n assert sameMsgVersion(msg, expMsg)", "def checkProtocol(protocol_string):\n\n if protocol_string != \"HTTP/1.1\":\n raise HTTPnotSupported", "def check_protocol_version(self):\n try:\n protocol_version = self.do_command(\"protocol_version\")\n except BadGtpResponse:\n return\n if protocol_version != \"2\":\n raise BadGtpResponse(\"%s reports GTP protocol version %s\" %\n (self.name, protocol_version))", "def test_versions_equal(self):\n self.check_versions_equal('1', '1')\n self.check_versions_equal('1', '1.0')\n self.check_versions_equal('1', '1.0.0')\n self.check_versions_equal('1.0', '1.0.0')\n self.check_versions_equal('1', '1-0')\n self.check_versions_equal('1', '1.0-0')\n self.check_versions_equal('1.0', '1.0-0')\n # no separator between number and character\n self.check_versions_equal('1a', '1-a')\n self.check_versions_equal('1a', '1.0-a')\n self.check_versions_equal('1a', '1.0.0-a')\n self.check_versions_equal('1.0a', '1-a')\n self.check_versions_equal('1.0.0a', '1-a')\n self.check_versions_equal('1x', '1-x')\n self.check_versions_equal('1x', '1.0-x')\n self.check_versions_equal('1x', '1.0.0-x')\n self.check_versions_equal('1.0x', '1-x')\n self.check_versions_equal('1.0.0x', '1-x')\n\n # aliases\n self.check_versions_equal('1ga', '1')\n self.check_versions_equal('1release', '1')\n self.check_versions_equal('1final', '1')\n self.check_versions_equal('1cr', '1rc')\n\n # special 'aliases' a, b and m for alpha, beta and milestone\n self.check_versions_equal('1a1', '1-alpha-1')\n self.check_versions_equal('1b2', '1-beta-2')\n self.check_versions_equal('1m3', '1-milestone-3')\n\n # case insensitive\n self.check_versions_equal('1X', '1x')\n self.check_versions_equal('1A', '1a')\n self.check_versions_equal('1B', '1b')\n self.check_versions_equal('1M', '1m')\n self.check_versions_equal('1Ga', '1')\n self.check_versions_equal('1GA', '1')\n self.check_versions_equal('1RELEASE', '1')\n self.check_versions_equal('1release', '1')\n self.check_versions_equal('1RELeaSE', '1')\n self.check_versions_equal('1Final', '1')\n self.check_versions_equal('1FinaL', '1')\n self.check_versions_equal('1FINAL', '1')\n self.check_versions_equal('1Cr', '1Rc')\n self.check_versions_equal('1cR', '1rC')\n self.check_versions_equal('1m3', '1Milestone3')\n self.check_versions_equal('1m3', '1MileStone3')\n self.check_versions_equal('1m3', '1MILESTONE3')\n\n self.check_versions_equal('1', '01', '001')", "def supports_http_1_1():", "def server_api_versions(self):\n res = self._session.get(self._base_url, authenticated=False,\n raise_exc=False)\n # HTTP Not Found is a valid response for older (2.0.0) servers\n if res.status_code >= 400 and res.status_code != 404:\n ClientError.raise_if_needed(res)\n\n min_ver = res.headers.get(_MIN_VERSION_HEADER, '1.0')\n max_ver = res.headers.get(_MAX_VERSION_HEADER, '1.0')\n res = (_parse_version(min_ver), _parse_version(max_ver))\n LOG.debug('Supported API version range for %(url)s is '\n '[%(min)s, %(max)s]',\n {'url': self._base_url, 'min': min_ver, 'max': max_ver})\n return res", "def test_valid_version():\n v_curr = parse_version(pypkgla01.__version__)\n v_orig = parse_version(\"0.1.0-dev\")\n assert v_curr >= v_orig", "def compare_version(version_str1, version_str2):\n compare_result = 0\n pattern = '([^\\.]+)\\.?([^\\.]*)\\.?([^\\.]*)'\n match1 = re.match(pattern, version_str1.strip())\n match2 = re.match(pattern, version_str2.strip())\n major2 = match2.group(1)\n major1 = match1.group(1)\n minor2 = match2.group(2) if match2.group(2) else '0'\n minor1 = match1.group(2) if match1.group(2) else '0'\n patch2 = match2.group(3) if match2.group(3) else '0'\n patch1 = match1.group(3) if match1.group(3) else '0'\n\n if int(major2) > int(major1):\n return CompareResult.GREATER\n elif int(major2) < int(major1):\n return CompareResult.LESS\n else: # same major version\n if int(minor2) > int(minor1):\n return CompareResult.GREATER\n elif int(minor2) < int(minor1):\n return CompareResult.LESS\n else:\n if patch2 > patch1:\n return CompareResult.GREATER\n elif patch2 < patch1:\n return CompareResult.LESS\n else:\n return CompareResult.EQUAL", "def compareVersion(self, version1, version2):\n \n ver_list1 = version1.split('.')\n ver_list2 = version2.split('.')\n \n \n \n len1 = len(ver_list1)\n len2 = len(ver_list2)\n \n \n if len1 > 1:\n while int(ver_list1[-1]) == 0:\n ver_list1.pop()\n len1 = len(ver_list1)\n if len2 > 1:\n while int(ver_list2[-1]) == 0:\n ver_list2.pop()\n len2 = len(ver_list2)\n \n \n for item1, item2 in zip(ver_list1, ver_list2):\n \n if int(item1) < int(item2):\n return -1\n elif int(item1) > int(item2):\n return 1\n \n \n \n if len1 > len2:\n return 1\n elif len1 < len2:\n return -1\n else:\n return 0", "def supports_version(self, required_version: str) -> bool:\n return parse_version(self.server_version) >= parse_version(required_version)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
If a client_id is provided together with the inform message, Now we have enough information to get the data from the database
def handle_client_id(self, session): if session['client']['cid'] is not None: # A subscriber ID may only contain letters, numbers, spaces and # the following special characters: - _ \ / ( ) # . p = re.compile('^[A-Za-z0-9-_\\\. #/()]+$') if p.match(session['client']['cid']) is None: raise ClientRequestError('InvalidClientId') try: session['client'] = session['db'].client_data_query( session['client']['cid']) except DbException as db_err: session['log'] = {'rc': 'error', 'msg': 'Non matching ClientID'} raise ClientRequestError('UnknownClient', data=session['client']['cid'] + ' does not match data in database') if session['client'] is None: # The client could not be found. # It means that the client is not yet defined in the database. msg = ' cid:' + session['client']['cid'] LOG.info("Client not in database, " + msg) session['log'] = {'rc': 'ok', 'msg': 'Unknown CLIENT '} raise ClientRequestError('UnknownClient', data='No entry for client in database')
[ "def handle_inform(self, message):\n content = json.loads(message.content)\n if content['ref'] == 'R_01':\n display_message(self.agent.aid.name, 'Mensagem INFORM Recebida')", "def detail(client_id):\n try:\n # Fetch client details from the BancBox api and render\n clientId = { 'bancBoxId': client_id }\n request_params = {'subscriberId': subscriber_id, 'clientId': clientId}\n results = api.service.getClient(request_params) \n client = results.client\n except Exception, e:\n logger.error('Error retrieving client [%s]: %s', client_id, e)\n client = {}\n return render_template('detail.html', client=client)", "def get_info(iperf_clients):\n return None", "def getClientInfo(clientNumber):\n sql = \"SELECT * FROM appClientInfo WHERE clientNumber = %s LIMIT 1\"\n values = (clientNumber,)\n return sqlQuery(sql, values)", "def fetch_message(self, message_id, auth):", "def bids_client(self, address_client, id):\n try:\n self.mylogger.log(INFO, \"Listing bids of client {} \".format(id))\n msg = {}\n i = 0\n result = None\n client_exists = False\n\n for auction in self.all_auctions:\n if str(auction.id) == id:\n client_exists = True\n result = auction.bids_client(id)\n\n if client_exists:\n for bid in result:\n bid_number = \"bid_{}\".format(i)\n msg[bid_number] = bid\n i = i + 1\n\n msg = {'payload': msg}\n signature = base64.b64encode(self.certgen.signData(json.dumps(msg['payload']))).decode()\n msg['signature'] = signature\n bytes = self.sock.sendto(json.dumps(msg).encode(), address_client)\n print(\"\\n> sent list of bids of client {}\".format(id))\n else:\n msg = {'payload': {'ack': 'nok'}}\n signature = base64.b64encode(self.certgen.signData(json.dumps(msg['payload']))).decode()\n msg['signature'] = signature\n bytes = self.sock.sendto(json.dumps(msg).encode(), address_client)\n\n except:\n print(\"> can't send list of bids of client {}\".format(id))\n self.mylogger.log(INFO, \"Listing bids of client {} \".format(id))\n raise", "def test_fundinginformations_id_get(self):\n pass", "def recieve_information_from_client():\r\n client_data = request.forms.get('json')\r\n client_data_dict = json.loads(client_data)\r\n return client_data_dict", "def get_info(self,honeypotids):\n req = {\"type\":\"get_info\",\n \"from\":self.network.mc_id,\n \"to\": honeypotids}\n expect_dict = {\"type\":\"send_info\"}\n msg_list = self.send_receive(req,honeypotids,expect_dict)\n answer = {}\n for msg in msg_list:\n answer[msg[\"from\"]] = msg[\"info\"]\n return answer", "def set_client_id(self):\n data = self.receive() # deserialized data\n client_id = data['clientid'] # extracts client id from data\n self.client_id = client_id # sets the client id to this client\n print(\"Successfully connected to server: \" + self.userInfo['host'] + \" / \" + str(self.userInfo['port']))\n print(\"Your client info is:\\n\" + \"Client Name: \" + self.userInfo['name'] + \"\\nClient ID: \" + str(client_id))", "def get_patient_data(self, client):\n for patient in self._monitored_patients.get_patient_list():\n # print(\"Requesting data for \" + patient.first_name+\" \"+patient.last_name+\"...\")\n patient.update_data(client.get_patient_data(patient.id))", "def client_details(self, value):\n self._client_details = value", "def get_medication_dispense(self, id):\n\n query_string = \"\"\"SELECT ROW_ID, SUBJECT_ID, HADM_ID, ICUSTAY_ID, STARTDATE, ENDDATE, DRUG_TYPE, DRUG, \n DRUG_NAME_GENERIC, NDC, PROD_STRENGTH, FORM_VAL_DISP, FORM_UNIT_DISP, ROUTE\n FROM `green-gasket-256323.mimiciii_fullyautomated.PRESCRIPTIONS`\n where SUBJECT_ID = {};\"\"\"\n\n query_string = query_string.format(id)\n results = self.cl.queryRecords(query_string)\n\n r = []\n for row in results:\n res = {}\n for i in row.keys():\n if i not in res:\n res[i] = None\n\n if i in res and res[i] == None:\n res[i] = row[i]\n\n r.append(res)\n\n\n medication_res = []\n for res in r:\n medication_info = {\n \"resourceType\" : \"MedicationDispense\",\n\n \"identifier\" : res['ROW_ID'],\n \"partOf\" : res['ICUSTAY_ID'],\n \"status\" : None,\n\n \"statusReasonCodeableConcept\" : None,\n \"statusReasonReference\" : None,\n \"category\" : res['DRUG_TYPE'],\n \"medicationCodeableConcept\" :res['DRUG_NAME_GENERIC'],\n \"medicationReference\" : res['NDC'],\n \"subject\" : res['SUBJECT_ID'],\n \"context\" : res['HADM_ID'],\n \"supportingInformation\" : None,\n \"performer\" : [{\n \"function\" : None,\n \"actor\" : res['HADM_ID']\n }],\n \"location\" : None,\n \"authorizingPrescription\" : None,\n \"type\" : None,\n \"quantity\" : res['FORM_VAL_DISP'],\n \"daysSupply\" : round(abs(res['ENDDATE'] - res['STARTDATE']).seconds/86400, 2),\n \"whenPrepared\" : str(res['STARTDATE']),\n \"whenHandedOver\" : None,\n \"destination\" : None,\n \"receiver\" : None,\n \"note\" : res['DRUG'] + \" \" + res['FORM_VAL_DISP']\t+ \" \" + res['FORM_UNIT_DISP'],\n \"dosageInstruction\" : res['ROUTE'],\n \"substitution\" : None,\n \"detectedIssue\" : None,\n \"eventHistory\" : None\n }\n\n medication_res.append(medication_info)\n return medication_res", "def get_base_massage_for_client(self, client=None):\n if isinstance(client, Account):\n client_email = client.email if client else None\n content_type = ContentType.CLIENT_OBJECT\n elif isinstance(client, str):\n client_email = client\n content_type = ContentType.EMAIL\n else:\n client_email = \"\"\n content_type = ContentType.NONE\n\n return Message(self.server_account.email, client_email,\n content_type,\n ActionType.RESPONSE,\n 'utf-8', StatusCode.NOTFOUND,\n client)", "def __set_client_detail(self):\r\n ClientDetail = self.client.factory.create('ClientDetail')\r\n ClientDetail.AccountNumber = self.config_obj.account_number\r\n ClientDetail.MeterNumber = self.config_obj.meter_number\r\n ClientDetail.IntegratorId = self.config_obj.integrator_id\r\n if hasattr(ClientDetail, 'Region'):\r\n ClientDetail.Region = self.config_obj.express_region_code\r\n self.ClientDetail = ClientDetail", "def handleServerInfoResponse(data, mqttClient):\n print \"Received server Info Response\"\n response = tricc.ServerInfoResponse()\n response.ParseFromString(data)\n #if mqttClient.params[\"uniqueIdentifier\"] == response.identifier:\n if mqttClient.uuid == response.identifier or str(mqttClient.params[\"userId\"]) == response.identifier:\n print \"Server Info Response was for me\"\n #set system time and id of unit\n setSystemTime(response.serverTime.sec, mqttClient.params[\"sudoPw\"])\n mqttClient.params[\"userId\"] = response.responseId\n mqttClient.client.unsubscribe(\"serverInfoResponse\")\n unitTopic = \"unit\"+str(response.responseId)\n unitTimeSynchroTopic = \"unit\" + str(response.responseId) + \"_timeSynchro\"\n result, mid = mqttClient.client.subscribe(unitTopic, qos= mqttClient.params[\"qos\"])\n result, mid = mqttClient.client.subscribe(unitTimeSynchroTopic, qos=mqttClient.params[\"qos\"])\n if not unitTopic in mqttClient.params[\"topics\"]:\n mqttClient.params[\"topics\"].append(unitTopic)\n if not unitTimeSynchroTopic in mqttClient.params[\"topics\"]:\n mqttClient.params[\"topics\"].append(unitTimeSynchroTopic)\n\n\n if(not mqttClient.registeredAtServer):\n print \"Was not yet registered at server\"\n current_path = os.path.abspath(__file__)\n dir_path = os.path.dirname(os.path.dirname(current_path))\n network_config_path = os.path.join(dir_path, \"routing/network_config.py\")\n apMode = False\n with open(network_config_path, \"r+\") as file:\n lines = file.readlines()\n for i in lines:\n if \"AP_Mode = \" in i:\n mode = i.split(\"=\")[1].strip()\n if mode == \"True\":\n apMode = True\n #set IP of unit\n if mqttClient.mqttInterface.type == \"relay\":\n print \"case relay\"\n Setup.setOLSRIPAddressinNetworkConfig(mqttClient.params[\"userId\"])\n Setup.setupRouting()\n elif mqttClient.mqttInterface.type == \"kit\" and not apMode:\n Setup.setOLSRIPAddressinNetworkConfig(mqttClient.params[\"userId\"])\n Setup.setupRouting()\n elif mqttClient.mqttInterface.type == \"kit\" and apMode:\n #kein extra setup routing etc mehr noetig\n print \"hello\"\n\n\n\n mqttClient.params[\"gotIpFromServer\"] = True\n mqttClient.registeredAtServer = True\n\n mqttClient.setJsonParameters()\n\n #send IP of unit to server\n mqttClient.mqttInterface.sendConnectionMessage(getIp(), \"not used\", \"newIp\")\n else:\n print \"But server info Response was not for me\"", "def choose(self, _id):\n app = App.get_running_app()\n self.manager.client = app.session.query(Client).filter(Client.id == _id).one()\n self.manager.current = 'info'", "def send_info(self):\r\n pass", "def get_client_mess(self, my_client):\n return my_client.recv(self.get_m_transfering_b())" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Validates received protocol compression parameter. Sets protocol compression to be used in session according to result of this validation.
def handle_protocol_compression(self, session): if self.protocol_compression is not None: valid = RPCS.VALID_COMPRESSION_METHODS if self.protocol_compression not in valid: self.protocol_compression = 'NONE' raise ClientRequestError('InvalidParameterValue', data='protocol_compression') else: self.protocol_compression = 'NONE'
[ "def validate_encoding(self) -> None:\n\n codecs.lookup(self.encoding)", "def compression_codec(self) -> Optional[str]:\n return pulumi.get(self, \"compression_codec\")", "def test_compression_level(self):\n test_compression_level = 8\n self.encoder._compression_level = test_compression_level", "def protocol_version(self, value):\n self._protocol_version = safe_is_version(value)", "def enable_compression(self):\r\n self.server.enable_compression()", "def is_valid_compression_type(cls, compression_type):\n return isinstance(compression_type, _CompressionType)", "def compression(self) -> Optional[Any]:\n return pulumi.get(self, \"compression\")", "def check_codec(self):\n codecs = list(set([s[\"codec\"] for s in self.segments]))\n for c in codecs:\n if c != \"h264\":\n raise P1203StandaloneError(\"Unsupported codec: {}\".format(c))", "def validate_subprotocol(subprotocol):\n\n if not subprotocol:\n raise HandshakeException('Invalid subprotocol name: empty')\n\n # Parameter should be encoded HTTP token.\n state = http_header_util.ParsingState(subprotocol)\n token = http_header_util.consume_token(state)\n rest = http_header_util.peek(state)\n # If |rest| is not None, |subprotocol| is not one token or invalid. If\n # |rest| is None, |token| must not be None because |subprotocol| is\n # concatenation of |token| and |rest| and is not None.\n if rest is not None:\n raise HandshakeException('Invalid non-token string in subprotocol '\n 'name: %r' % rest)", "def check_protocol_version(self):\n try:\n protocol_version = self.do_command(\"protocol_version\")\n except BadGtpResponse:\n return\n if protocol_version != \"2\":\n raise BadGtpResponse(\"%s reports GTP protocol version %s\" %\n (self.name, protocol_version))", "def _is_protocol_header_frame(self, value):\r\n return isinstance(value, frame.ProtocolHeader)", "def get_compression_options(self):\n # use deflate compress websocket\n return {}", "def check_encoding_negotiation(self):\n from .telopt import DO, BINARY\n if self._closing:\n return\n\n # encoding negotiation is complete\n if self.outbinary and self.inbinary:\n self.log.debug('negotiated outbinary and inbinary with client.')\n\n # if (WILL, BINARY) requested by begin_negotiation() is answered in\n # the affirmitive, then request (DO, BINARY) to ensure bi-directional\n # transfer of non-ascii characters.\n elif self.outbinary and not self.inbinary and (\n not (DO, BINARY,) in self.stream.pending_option):\n self.log.debug('outbinary=True, requesting inbinary.')\n self.stream.iac(DO, BINARY)\n self._loop.call_later(self.CONNECT_DEFERRED,\n self.check_encoding_negotiation)\n\n elif self.duration > self.CONNECT_MAXWAIT:\n # Perhaps some IAC interpreting servers do not differentiate\n # 'local' from 'remote' options -- they are treated equivalently.\n self.log.debug('failed to negotiate both outbinary and inbinary.')\n\n else:\n self._loop.call_later(self.CONNECT_DEFERRED,\n self.check_encoding_negotiation)", "def _detect_encryption(self) -> bool:\n with open(self._path, mode=\"rb\") as file:\n header = file.read(32)\n\n try:\n # Check that first bit is 1\n if header[0] & 0x80 == 0:\n return False\n\n if header[0] & 0x40 == 0:\n # Old packet format\n\n # Check that packet tag is 1: \"Public-Key Encrypted Session Key Packet\"\n if (header[0] & 0x3C) >> 2 != 1:\n return False\n\n length_type = header[0] & 0x3\n if length_type == 0:\n body_offset = 2\n elif length_type == 1:\n body_offset = 3\n elif length_type == 2:\n body_offset = 5\n else: # if length_type == 3:\n # Indeterminate length\n body_offset = 1\n\n # Version 3\n if header[body_offset] != 3:\n return False\n\n else:\n # New packet format\n\n # Check that packet tag is 1: \"Public-Key Encrypted Session Key Packet\"\n if header[0] & 0x3F != 1:\n return False\n\n # More checks could be done, but GPG doesn't even seem to generate this format\n except IndexError:\n # Packet was too short\n return False\n return True", "def validate(self) -> None:\n super().validate()\n if self.pipe_mode.value is SocketMode.CONNECT and self.pipe_format.value is None:\n raise Error(\"'pipe_format' required for CONNECT pipe mode.\")", "def processHandshake(self):\r\n ## only proceed when we have fully received the HTTP request line and all headers\r\n ##\r\n end_of_header = self.data.find(\"\\x0d\\x0a\\x0d\\x0a\")\r\n if end_of_header >= 0:\r\n\r\n self.http_response_data = self.data[:end_of_header + 4]\r\n if self.debug:\r\n log.msg(\"received HTTP response:\\n\\n%s\\n\\n\" % self.http_response_data)\r\n\r\n ## extract HTTP status line and headers\r\n ##\r\n (self.http_status_line, self.http_headers, http_headers_cnt) = parseHttpHeader(self.http_response_data)\r\n\r\n ## validate WebSocket opening handshake server response\r\n ##\r\n if self.debug:\r\n log.msg(\"received HTTP status line in opening handshake : %s\" % str(self.http_status_line))\r\n log.msg(\"received HTTP headers in opening handshake : %s\" % str(self.http_headers))\r\n\r\n ## Response Line\r\n ##\r\n sl = self.http_status_line.split()\r\n if len(sl) < 2:\r\n return self.failHandshake(\"Bad HTTP response status line '%s'\" % self.http_status_line)\r\n\r\n ## HTTP version\r\n ##\r\n http_version = sl[0].strip()\r\n if http_version != \"HTTP/1.1\":\r\n return self.failHandshake(\"Unsupported HTTP version ('%s')\" % http_version)\r\n\r\n ## HTTP status code\r\n ##\r\n try:\r\n status_code = int(sl[1].strip())\r\n except:\r\n return self.failHandshake(\"Bad HTTP status code ('%s')\" % sl[1].strip())\r\n if status_code != HTTP_STATUS_CODE_SWITCHING_PROTOCOLS[0]:\r\n\r\n ## FIXME: handle redirects\r\n ## FIXME: handle authentication required\r\n\r\n if len(sl) > 2:\r\n reason = \" - %s\" % ''.join(sl[2:])\r\n else:\r\n reason = \"\"\r\n return self.failHandshake(\"WebSocket connection upgrade failed (%d%s)\" % (status_code, reason))\r\n\r\n ## Upgrade\r\n ##\r\n if not self.http_headers.has_key(\"upgrade\"):\r\n return self.failHandshake(\"HTTP Upgrade header missing\")\r\n if self.http_headers[\"upgrade\"].strip().lower() != \"websocket\":\r\n return self.failHandshake(\"HTTP Upgrade header different from 'websocket' (case-insensitive) : %s\" % self.http_headers[\"upgrade\"])\r\n\r\n ## Connection\r\n ##\r\n if not self.http_headers.has_key(\"connection\"):\r\n return self.failHandshake(\"HTTP Connection header missing\")\r\n connectionUpgrade = False\r\n for c in self.http_headers[\"connection\"].split(\",\"):\r\n if c.strip().lower() == \"upgrade\":\r\n connectionUpgrade = True\r\n break\r\n if not connectionUpgrade:\r\n return self.failHandshake(\"HTTP Connection header does not include 'upgrade' value (case-insensitive) : %s\" % self.http_headers[\"connection\"])\r\n\r\n ## compute Sec-WebSocket-Accept\r\n ##\r\n if self.version != 0:\r\n if not self.http_headers.has_key(\"sec-websocket-accept\"):\r\n return self.failHandshake(\"HTTP Sec-WebSocket-Accept header missing in opening handshake reply\")\r\n else:\r\n if http_headers_cnt[\"sec-websocket-accept\"] > 1:\r\n return self.failHandshake(\"HTTP Sec-WebSocket-Accept header appears more than once in opening handshake reply\")\r\n sec_websocket_accept_got = self.http_headers[\"sec-websocket-accept\"].strip()\r\n\r\n sha1 = hashlib.sha1()\r\n sha1.update(self.websocket_key + WebSocketProtocol._WS_MAGIC)\r\n sec_websocket_accept = base64.b64encode(sha1.digest())\r\n\r\n if sec_websocket_accept_got != sec_websocket_accept:\r\n return self.failHandshake(\"HTTP Sec-WebSocket-Accept bogus value : expected %s / got %s\" % (sec_websocket_accept, sec_websocket_accept_got))\r\n\r\n ## handle \"extensions in use\" - if any\r\n ##\r\n self.websocket_extensions_in_use = []\r\n if self.version != 0:\r\n if self.http_headers.has_key(\"sec-websocket-extensions\"):\r\n if http_headers_cnt[\"sec-websocket-extensions\"] > 1:\r\n return self.failHandshake(\"HTTP Sec-WebSocket-Extensions header appears more than once in opening handshake reply\")\r\n exts = self.http_headers[\"sec-websocket-extensions\"].strip()\r\n ##\r\n ## we don't support any extension, but if we did, we needed\r\n ## to set self.websocket_extensions_in_use here, and don't fail the handshake\r\n ##\r\n return self.failHandshake(\"server wants to use extensions (%s), but no extensions implemented\" % exts)\r\n\r\n ## handle \"subprotocol in use\" - if any\r\n ##\r\n self.websocket_protocol_in_use = None\r\n if self.http_headers.has_key(\"sec-websocket-protocol\"):\r\n if http_headers_cnt[\"sec-websocket-protocol\"] > 1:\r\n return self.failHandshake(\"HTTP Sec-WebSocket-Protocol header appears more than once in opening handshake reply\")\r\n sp = str(self.http_headers[\"sec-websocket-protocol\"].strip())\r\n if sp != \"\":\r\n if sp not in self.factory.protocols:\r\n return self.failHandshake(\"subprotocol selected by server (%s) not in subprotocol list requested by client (%s)\" % (sp, str(self.factory.protocols)))\r\n else:\r\n ## ok, subprotocol in use\r\n ##\r\n self.websocket_protocol_in_use = sp\r\n\r\n\r\n ## For Hixie-76, we need 16 octets of HTTP request body to complete HS!\r\n ##\r\n if self.version == 0:\r\n if len(self.data) < end_of_header + 4 + 16:\r\n return\r\n else:\r\n challenge_response = self.data[end_of_header + 4:end_of_header + 4 + 16]\r\n if challenge_response != self.websocket_expected_challenge_response:\r\n return self.failHandshake(\"invalid challenge response received from server (Hixie-76)\")\r\n\r\n ## Ok, got complete HS input, remember rest (if any)\r\n ##\r\n if self.version == 0:\r\n self.data = self.data[end_of_header + 4 + 16:]\r\n else:\r\n self.data = self.data[end_of_header + 4:]\r\n\r\n ## opening handshake completed, move WebSocket connection into OPEN state\r\n ##\r\n self.state = WebSocketProtocol.STATE_OPEN\r\n self.inside_message = False\r\n if self.version != 0:\r\n self.current_frame = None\r\n self.websocket_version = self.version\r\n\r\n ## we handle this symmetrical to server-side .. that is, give the\r\n ## client a chance to bail out .. i.e. on no subprotocol selected\r\n ## by server\r\n try:\r\n connectionResponse = ConnectionResponse(self.peer,\r\n self.peerstr,\r\n self.http_headers,\r\n None, # FIXME\r\n self.websocket_protocol_in_use,\r\n self.websocket_extensions_in_use)\r\n\r\n self.onConnect(connectionResponse)\r\n\r\n except Exception, e:\r\n ## immediately close the WS connection\r\n ##\r\n self.failConnection(1000, str(e))\r\n else:\r\n ## fire handler on derived class\r\n ##\r\n if self.trackedTimings:\r\n self.trackedTimings.track(\"onOpen\")\r\n self.onOpen()\r\n\r\n ## process rest, if any\r\n ##\r\n if len(self.data) > 0:\r\n self.consumeData()", "def resetProtocolOptions(self):\r\n self.versions = WebSocketProtocol.SUPPORTED_PROTOCOL_VERSIONS\r\n self.allowHixie76 = WebSocketProtocol.DEFAULT_ALLOW_HIXIE76\r\n self.webStatus = True\r\n self.utf8validateIncoming = True\r\n self.requireMaskedClientFrames = True\r\n self.maskServerFrames = False\r\n self.applyMask = True\r\n self.maxFramePayloadSize = 0\r\n self.maxMessagePayloadSize = 0\r\n self.autoFragmentSize = 0\r\n self.failByDrop = True\r\n self.echoCloseCodeReason = False\r\n self.openHandshakeTimeout = 5\r\n self.closeHandshakeTimeout = 1\r\n self.tcpNoDelay = True", "def handle_ProtocolHeaderFrame(self,\n frame: amqpframe.ProtocolHeaderFrame):\n self._fsm.trigger('receive_ProtocolHeaderFrame')\n raise exceptions.UnsupportedProtocol(\n frame.payload.protocol_major,\n frame.payload.protocol_minor,\n frame.payload.protocol_revision,\n )", "def validate_handshake_public_key(cls, public_key: bytes) -> None:\n ..." ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
(str) > bool Return True if correct service name
def is_service_name_correct(self, service): return service in self.services
[ "def has_service(self, name):\n services = self.services(filters={'name': name})\n return len(services) > 0", "def istype(client, service_name: str):\n\n if is_client(client):\n return (\n client.meta.service_model.service_name.lower()\n == service_name.strip().lower()\n )\n return False", "def available(name):\n try:\n _get_service(name)\n return True\n except CommandExecutionError:\n return False", "def has_service(self, service_type: ServiceName) -> bool:\n return service_type in self", "def test_get_service_by_name(self):\n entry = self.dsf['service1']\n self.assertEqual(entry.service_name, 'service1')", "def hasService(self, serviceName):\n return serviceName in self.__registry", "def isService(self, serviceInterface: java.lang.Class) -> bool:\n ...", "def _interesting_service(self, service: UpnpService) -> bool:\n service_type = service.service_type\n for service_types in self._SERVICE_TYPES.values():\n if service_type in service_types:\n return True\n\n return False", "def checkServiceAdded(self, name):\n for group in self.pinGroups:\n if group[0] == str(name):\n return True\n\n # otherwise return 0\n return False", "def _always_running_service(name):\n\n # get all the info from the launchctl service\n service_info = show(name)\n\n # get the value for the KeepAlive key in service plist\n try:\n keep_alive = service_info[\"plist\"][\"KeepAlive\"]\n except KeyError:\n return False\n\n # check if KeepAlive is True and not just set.\n\n if isinstance(keep_alive, dict):\n # check for pathstate\n for _file, value in keep_alive.get(\"PathState\", {}).items():\n if value is True and os.path.exists(_file):\n return True\n elif value is False and not os.path.exists(_file):\n return True\n\n if keep_alive is True:\n return True\n\n return False", "def check_service_unique(name):\n cursor.execute(\"SELECT * FROM service WHERE service_name = ? OR shorthand_name = ?;\", (name, name))\n\n return len(cursor.fetchall()) < 1", "def is_service_installed(klass, service):\n return True", "def test_get_service_string(self):\n pass", "def check_if_auth_service():\n salt = token_hex(16)\n signature = sign_data(app.config['SERVICE_PRIVATE_KEY'], salt)\n if app.config['AUTH_STANDALONE']:\n if not verify_signature(app.config['SERVICE_PUBLIC_KEY'], signature, salt):\n return False\n else:\n if not verify_signature(app.config['AUTH_PUB_KEY'], signature, salt):\n return False\n return True", "def is_shortened(self, url):\n parts = urlsplit(url)\n if parts.hostname in self.services:\n if not parts.scheme and not parts.hostname:\n parts = urlsplit(\"http://\" + url)\n return bool(parts.hostname in self.services and parts.path)\n else:\n logging.debug('[-] Service at {} is not supported by this function, but may resolve'.format(url))\n try:\n r = requests.head(url)\n if r.headers['Location'] != url:\n return True\n except:\n return False", "def check_service(self):\r\n response = self.manager_server.connect(\"GET\", self.uri_base + \"/base/\", headers=self.headers)\r\n if response.status_code != 200:\r\n return False\r\n return True", "def __is_test_service(self):\r\n if self.current_service:\r\n return not self.current_service.is_active\r\n return False", "def station_has_service(\n station: dict[str, Any], service_to_find: SpanshStationService\n) -> bool:\n return (\n next(\n (\n service\n for service in station[\"services\"]\n if service[\"name\"].lower() == service_to_find.value.lower()\n ),\n None,\n )\n is not None\n )", "def validate_service_name(config_loader, service_name):\n\n pattern = re.compile(\"^([a-z_])([a-z0-9_-]*)$\")\n if not service_name or service_name.isspace() or not pattern.match(service_name) or len(service_name) > 100:\n log.exception(create_config_fail_message(\"invalid service name\"))\n config_loader.exit_with_config_error(\"Configuration error: invalid service name; not defined or did not match \"\n \"pattern (pattern: %s)\" % pattern.pattern)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
(str) > bool Return True if correct class mail type
def is_class_mail_types_correct(self, class_mail_type): return class_mail_type in self.class_mail_types
[ "def is_mail_types_correct(self, mail_type):\r\n return mail_type in self.mail_types", "def mail_type(self):\n\n if self.machine.get_error():\n return \"error\"\n\n if self.machine._ran:\n return \"success\"\n\n return None", "def translatable(self):\n if type(self.message) != str or self.message == '':\n return False\n return True if self.type in (1, 20, 26) else False", "def get_receive_mail_str(self):\n ret = False\n if self.__mail:\n ret = True\n return ret", "def check_eligible_mimetype(self, ctype, uid):\n self.helper.log_debug(\n 'check_eligible_mimtype: checking content-type %s of msg uid %s' %\n (ctype, uid))\n if ctype == \"application/zip\":\n return True\n elif ctype == \"application/gzip\":\n return True\n elif ctype == \"application/x-gzip\":\n return True\n elif ctype == \"application/octet-stream\":\n # Non-standard mimetype used by Amazon SES dmarc reports\n return True\n elif ctype == \"application-x-gzip\":\n # Non-standard mimetype used by Comcast dmarc reports\n return True\n elif ctype == \"application/x-zip-compressed\":\n # Non-standard mimetype used by Yahoo dmarc reports\n return True\n elif ctype == \"application/xml\":\n return True\n elif ctype == \"text/xml\":\n return True\n else:\n self.helper.log_debug(\n 'check_eligible_mimtype: skipping content-type %s of msg uid %s' %\n (ctype, uid))\n return False", "def protocolType(self, obj):\n try:\n protocolType = getattr(obj, \"protocolType\")\n strippedProtocolType = protocolType.strip()\n if strippedProtocolType == \"Leader Election\":\n return True\n elif strippedProtocolType == \"Leader Election\":\n return True\n elif strippedProtocolType == \"\":\n printRed(\"Missing protocolType's key.\")\n return False\n else:\n printRed(\"Unsupported protocol Type.\")\n return False\n except AttributeError as error:\n printRed(error)\n return False", "def strtype(x):\n if type(x) == str:\n return True\n if type(x) == unicode:\n return True\n return False", "def is_valid_mime_type_format(self, mime_type: str) -> bool:\n return mime_type in mimetypes.types_map.values()", "def is_email(self):\n\n if 'rfc822' in os.path.basename(self.path) and not self.path.endswith('.headers'):\n try:\n if magic.from_file(self.path, mime=True) == 'message/rfc822':\n return True\n except:\n pass\n\n return False", "def verify_mail(self):\n raise NotImplementedError", "def is_string_type(self, datatype):\n if datatype.lower().startswith(\"unicode\"): return True\n if datatype.lower().startswith(\"string\"): return True\n return False", "def validate_type(type):\n\n types_upper = [i.upper() for i in officeTypes]\n if type.upper() in types_upper:\n return True\n return False", "def is_valid_content_type(content_type: str, expected_content_type: str) -> bool:\n return (content_type is not None) and (content_type.strip().lower() == expected_content_type)", "def is_valid_content_type(cls, content_type: str) -> bool:\n return content_type in cls.CONTENT_TYPES.value", "def _is_valid_ct(content_type: str) -> bool:\n content_type = content_type.strip()\n return _is_valid_regex(CT_CONTENT_TYPE_REGEX_PATTERN, content_type)", "def is_type(self, ent_type):\n # type: (str) -> bool\n # its always an entity ...\n if ent_type.lower() in ('entity', self.settings['_type'].lower()):\n return True\n else:\n return False", "def accTypeis(request,string):\r\n if request.user.accType == string:\r\n return True\r\n else:\r\n return False", "def _is_valid_content_type_format(content_type: str) -> bool:\n return (\n _is_valid_ct(content_type)\n or _is_valid_pt(content_type)\n or _is_valid_set(content_type)\n or _is_valid_list(content_type)\n or _is_valid_dict(content_type)\n or _is_valid_union(content_type)\n or _is_valid_optional(content_type)\n )", "def typeIsString(obj):\n return type(obj) is str or _haveTypeUnicode and type(obj) is unicode" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
(str) > bool Return True if correct container type
def is_container_type_correct(self, container): return container in self.containers
[ "def is_container(item):\n if isinstance(item, str):\n return False\n elif hasattr(item, \"__iter__\"):\n return True\n\n return False", "def is_container(x):\n return isinstance(x, Iterable) and not isinstance(x, str)", "def is_container(self):", "def _supported_type(self,obj):\r\n return isinstance(obj, (ObjectContainer,ContainerBase))", "def strtype(x):\n if type(x) == str:\n return True\n if type(x) == unicode:\n return True\n return False", "def has_string_type(obj: _std_typing.Any) -> bool:\n return obj.dtype == sc.DType.string", "def is_string_type(self, datatype):\n if datatype.lower().startswith(\"unicode\"): return True\n if datatype.lower().startswith(\"string\"): return True\n return False", "def isstring(item):\n return type(item).__name__ in STRING_TYPES", "def is_container(value: object) -> TypeGuard[AnyContainer]:\n if isinstance(value, Container):\n return True\n if hasattr(value, \"__pt_container__\"):\n return is_container(cast(\"MagicContainer\", value).__pt_container__())\n return False", "def isdatatype(object):\n return isinstance(object, (str, int, bool, float, type(None)))", "def _multiple_types(self) -> bool:\n return isinstance(self.type, list)", "def is_string(value):\n return isinstance(value, basestring)", "def _is_collection(obj):\n\n if isinstance(obj, basestring):\n return False\n\n return hasattr(obj, '__getitem__')", "def __bool__(self):\n return _RMF_HDF5.StringsList___bool__(self)", "def has_convertor(cls: Union[Type, str]) -> bool:\n return _get_convertor(cls) is not None", "def _is_string(self, value):\n if type(value) in [type(u''), type('')]:\n return True\n elif type(value) in [int, type(2 ** 64)]:\n return False\n else:\n return None", "def __contains__(self, a):\n try:\n self.convert(a)\n except CoercionFailed:\n return False\n\n return True", "def is_type(self, *data_types):\r\n return self.datatype in data_types", "def is_collection(obj: Any) -> bool:\n if obj is None:\n return False\n return isinstance(obj, list)\\\n or isinstance(obj, tuple)\\\n or isinstance(obj, set)\\\n or isinstance(obj, frozenset)\\\n or isinstance(obj, dict)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
(class xml.etree.ElementTree.Element) > list Return dictionary with all Postage information
def get_postage_from_response(self, xml_response): postages = xml_response.find("Package").findall("Postage") postages_list = [] if postages: for postage in postages: postages_list.append(self.get_response_information(postage)) return postages_list
[ "def get_all_personinfos(root: Element) -> dict:\n result = []\n for child in root:\n if child.tag == \"personinfo\":\n d = personinfo_to_dict(child)\n result.append(d)\n return result", "def get_all_tripinfos(root: Element) -> dict:\n result = []\n for child in root:\n if child.tag == \"tripinfo\":\n d = tripinfo_to_dict(child)\n result.append(d)\n return result", "def _getTags(self, elem):\n res = {}\n for tag in elem.iter(\"tag\"):\n res[tag.attrib[\"k\"]] = tag.attrib[\"v\"]\n return res", "def _construct_data_xml(self, xml_file_list):\n award_dict = {}\n award_list = []\n for xml_file in xml_file_list:\n xml_file.seek(0)\n tree = ET.parse(xml_file)\n root = tree.getroot()\n\n for response in root:\n temp_dict = {}\n for award in response:\n if award.tag == 'entry':\n continue\n try:\n # temp_dict[award.tag].append(award.text)\n temp_dict[award.tag] = award.text\n except KeyError:\n print(\"KeyError\")\n # temp_dict[award.tag] = [award.text]\n\n # if 'entry' in temp_dict.keys():\n # del temp_dict['entry']\n if len(temp_dict) > 0:\n award_list.append(temp_dict)\n\n return award_list", "def _get_information(self):\n pros_cons = []\n pros_cons_dict = {}\n\n for i, div in enumerate(self._tab.find_all(\"div\")):\n for p in div.find_all(\"p\"):\n pro_con = p.get_text(strip=True)\n pros_cons.append(pro_con)\n pros_cons_dict.update({self._keys_dict[i]: pros_cons})\n pros_cons = []\n\n return pros_cons_dict", "def pkv_to_dict(xml: str) -> dict:\n pkv_dict = {}\n for p in html.fromstring(xml).findall(\"p\"):\n k = p.find(\"k\").text\n vals = [v.text for v in p.findall(\"v\")]\n if k in pkv_dict:\n pkv_dict[k] = pkv_dict[k] + vals\n else:\n pkv_dict[k] = vals\n\n for k, v in pkv_dict.items():\n if len(v) == 1:\n pkv_dict[k] = v[0]\n\n return pkv_dict", "def get_para_gates_info(self):\n\t\tvalues = list()\n\t\tfor process in self.root.findall('xmlns:process',self.ns):\n\t\t\tfor para_gateway in process.findall('xmlns:parallelGateway',self.ns):\n\t\t\t\tgate_id = para_gateway.get('id')\n\t\t\t\tgate_name = para_gateway.get('name')\n\t\t\t\tgate_dir = para_gateway.get('gatewayDirection')\n\t\t\t\tvalues.append(dict(gate_id=gate_id,gate_name=gate_name,gate_dir=gate_dir))\n\t\treturn values", "def _pus(node: xml.etree.ElementTree.Element) -> List[xml.etree.ElementTree.Element]:\n return node.findall(\".//object[@type='PU']\")", "def xml_children_as_dict(node):\n return dict((e.tag, e.text) for e in node)", "def convert_xml_to_dict():\n pass", "def get_papers_atts(self, papers):\n atts = {}\n for paper in papers:\n title, jornal, conf = db.select_one([\"normal_title\", \"jornal_id\", \"conf_id\"], table=\"papers\", where=\"id='%s'\" % paper)\n title = title if title else \"\"\n venue = jornal if jornal else conf\n atts[paper] = {\"label\": title, \"title\": title, \"venue\": venue}\n\n return atts", "def children(self):\n return {c.Name: PIAFElement(c) for c in self.database.Elements}", "def xmlpost_to_dict(post):\n\n tree = ET.parse(post)\n root = tree.getroot()\n msg = root.find('message')\n\n post_data = {}\n\n board_id = msg.find('board_id')\n post_data['board_id'] = int(board_id.text)\n\n root_post = msg.find('root').attrib['href']\n post_data['root_post'] = root_post.split('/')[-1]\n\n kudos = msg.find('kudos')\n count = kudos.find('count')\n post_data['kudos_count'] = int(count.text)\n\n edit_author_id = msg.find('last_edit_author').attrib['href']\n post_data['edit_author_id'] = int(edit_author_id.split('/')[-1])\n\n post_time = msg.find('post_time')\n post_data['post_time'] = post_time.text\n\n last_edit_time = msg.find('last_edit_time')\n post_data['last_edit_time'] = last_edit_time.text\n\n body = msg.find('body')\n post_data['body'] = body.text\n\n thread = msg.find('thread').attrib['href']\n post_data['thread'] = int(thread.split('/')[-1])\n\n board = msg.find('board').attrib['href']\n post_data['board'] = board.split('/')[-1]\n\n try:\n parent_post = msg.find('parent').attrib['href']\n post_data['parent_post'] = int(parent_post.split('/')[-1])\n except KeyError:\n post_data['parent_post'] = None\n\n views = msg.find('views')\n post_data['views'] = int(views.find('count').text)\n\n subject = msg.find('subject')\n post_data['subject'] = subject.text\n\n post_id = msg.find('id')\n post_data['post_id'] = int(post_id.text)\n\n author_id = msg.find('author').attrib['href']\n post_data['author_id'] = int(author_id.split('/')[-1])\n\n return post_data", "def parse_xml_strings(xmls):\n # utilise une table de hachage pour suprimier le éléments redondants:\n resultmap = {}\n for xml in xmls:\n #tree = ET.parse(filename).getroot()\n tree = ET.fromstring(xml)\n for parcelle in tree:\n param = {attr.replace(\"_\",\"\") : float(get_xml_child_text(parcelle, attr.upper(), \"0\"))\n for attr in\n [\"libellex\", \"libelley\", \"x_min\",\"x_max\",\"y_min\",\"y_max\",\"surface_geom\"]}\n # La commune de Vizille (38) n'as parfois pas de champ\n # libellex et libelley.\n fid = parcelle.attrib['fid'][9:]\n resultmap[fid] = Parcelle(\n fid = parcelle.attrib['fid'][9:],\n nature = parcelle.iter(\"NATURE\").next().text,\n **param)\n return resultmap", "def get_post(row): \n \n try:\n root = etree.fromstring(row.encode('utf-8'))\n \n userid_of_p = int(root.attrib['OwnerUserId'])\n tags = root.attrib['Tags']\n p_type = int(root.attrib['PostTypeId'])\n p_CreatedDate = root.attrib['CreationDate']\n view_ct = float(root.attrib.get('ViewCount', 0.0))\n ans_ct = float(root.attrib.get('AnswerCount', 0.0))\n fav_ct = float(root.attrib.get('FavoriteCount', 0.0))\n score = float(root.attrib.get('Score', 0.0))\n \n\n except Exception: \n return None\n \n else:\n return ( p(userid_of_p, \n p_info_t(tags, p_type, p_CreatedDate, view_ct,ans_ct, \n fav_ct,score)) \n )", "def get_attachments(xml):\r\n items = get_items(xml)\r\n names = {}\r\n attachments = []\r\n\r\n for item in items:\r\n kind = item.find('post_type').string\r\n filename = item.find('post_name').string\r\n post_id = item.find('post_id').string\r\n\r\n if kind == 'attachment':\r\n attachments.append((item.find('post_parent').string,\r\n item.find('attachment_url').string))\r\n else:\r\n filename = get_filename(filename, post_id)\r\n names[post_id] = filename\r\n attachedposts = {}\r\n for parent, url in attachments:\r\n try:\r\n parent_name = names[parent]\r\n except KeyError:\r\n #attachment's parent is not a valid post\r\n parent_name = None\r\n\r\n try:\r\n attachedposts[parent_name].append(url)\r\n except KeyError:\r\n attachedposts[parent_name] = []\r\n attachedposts[parent_name].append(url)\r\n return attachedposts", "def getXmlDict(oxml):\n lines = oxml.split(\"\\n\")\n rrd_d = {}\n # <cf> AVERAGE </cf>\n # <pdp_per_row> 288 </pdp_per_row> <!-- 86400 seconds -->\n\n # parse xml file\n key = \"\"\n rows = [] \n for line in lines:\n if (reMatchCF(line)):\n cf = line.split()[1]\n key += cf\n if (reMatchPDP(line)):\n pdp = line.split()[1]\n key += pdp\n if (reMatchRow(line)):\n ele = line.split()\n time = ele[5]\n val = ele[8]\n rows.append([time,val,line])\n # end of rra is reached, store to dict and rest vals\n if (reMatchDBEnd(line) and key and rows):\n rrd_d[key] = rows\n key = \"\"\n rows = []\n return rrd_d", "def get_pfams(self):\n\n p_data = self.xml_dict['uniprot']['entry']['dbReference']\n pfams = dict()\n for list_item in p_data:\n if list_item.get(\"@type\", '') == 'Pfam':\n pfam_id = ''\n pfam_name = ''\n if '@id' in list_item:\n pfam_id = list_item['@id']\n #print(list_item)\n if 'property' in list_item:\n for property_item in list_item['property']:\n if property_item.get('@type', '') == 'entry name':\n if '@value' in property_item:\n pfam_name = property_item['@value']\n\n if pfam_id != '':\n pfams[pfam_id] = pfam_name\n\n return pfams", "def _get_values(self, element):\n\n output = {}\n\n for ee in element:\n if ee.tag == 'defaultValue':\n output['defaultValue'] = ee.attrib['value']\n elif ee.tag == 'lowerValue':\n output['lowerValue'] = ee.attrib['value']\n elif ee.tag == 'upperValue':\n output['upperValue'] = ee.attrib['value']\n\n if len(output)==0:\n return None\n else:\n return output" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
(str) > bool Return True if correct mail type
def is_mail_types_correct(self, mail_type): return mail_type in self.mail_types
[ "def mail_type(self):\n\n if self.machine.get_error():\n return \"error\"\n\n if self.machine._ran:\n return \"success\"\n\n return None", "def is_class_mail_types_correct(self, class_mail_type):\r\n return class_mail_type in self.class_mail_types", "def get_receive_mail_str(self):\n ret = False\n if self.__mail:\n ret = True\n return ret", "def is_email(self):\n\n if 'rfc822' in os.path.basename(self.path) and not self.path.endswith('.headers'):\n try:\n if magic.from_file(self.path, mime=True) == 'message/rfc822':\n return True\n except:\n pass\n\n return False", "def translatable(self):\n if type(self.message) != str or self.message == '':\n return False\n return True if self.type in (1, 20, 26) else False", "def is_valid_mime_type_format(self, mime_type: str) -> bool:\n return mime_type in mimetypes.types_map.values()", "def verify_mail(self):\n raise NotImplementedError", "def is_email(value: t.Any) -> bool:\n return is_str(value=value, not_empty=True) and bool(EMAIL_RE.fullmatch(value))", "def check_eligible_mimetype(self, ctype, uid):\n self.helper.log_debug(\n 'check_eligible_mimtype: checking content-type %s of msg uid %s' %\n (ctype, uid))\n if ctype == \"application/zip\":\n return True\n elif ctype == \"application/gzip\":\n return True\n elif ctype == \"application/x-gzip\":\n return True\n elif ctype == \"application/octet-stream\":\n # Non-standard mimetype used by Amazon SES dmarc reports\n return True\n elif ctype == \"application-x-gzip\":\n # Non-standard mimetype used by Comcast dmarc reports\n return True\n elif ctype == \"application/x-zip-compressed\":\n # Non-standard mimetype used by Yahoo dmarc reports\n return True\n elif ctype == \"application/xml\":\n return True\n elif ctype == \"text/xml\":\n return True\n else:\n self.helper.log_debug(\n 'check_eligible_mimtype: skipping content-type %s of msg uid %s' %\n (ctype, uid))\n return False", "def is_valid_content_type(content_type: str, expected_content_type: str) -> bool:\n return (content_type is not None) and (content_type.strip().lower() == expected_content_type)", "def check_message(self):\n def check(fld_key):\n if not self[fld_key]:\n string = self._fields[fld_key].string\n raise UserError(\n _(\"%s field required to send an email.\") % string)\n if self.email_type == 'general':\n check('subject')\n check('body')\n elif self.email_type == 'scheduled':\n check('date')\n check('duration')\n check('priority')\n check('sub_subject')\n check('mail_template_id')", "def guess_mime_type_enabled(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"guess_mime_type_enabled\")", "def is_mailing_list_message(self):\n\n\t\tif (self.email_obj.__contains__(\"Mailing-List\") or\n\t\t\tself.email_obj.__contains__(\"List-Id\") or\n\t\t\tself.email_obj.__contains__(\"List-Help\") or\n\t\t\tself.email_obj.__contains__(\"List-Post\") or\n\t\t\tself.email_obj.__contains__(\"Return-Path\") or\n\t\t\t( self.email_obj.__contains__(\"Precedence\") and\n\t\t\t\tre.match(\"list|bulk\", self.email_obj[\"Precedence\"], re.IGNORECASE)\n\t\t\t) or\n\t\t\t( self.email_obj.__contains__(\"From\") and\n\t\t\t\tre.match(\".*(majordomo|listserv|listproc|netserv|owner|bounce|mmgr|autoanswer|request|noreply|nobody).*@\", self.email_obj[\"From\"], re.IGNORECASE)\n\t\t\t)\n\t\t):\n\t\t\treturn 1\n\t\telse:\n\t\t\treturn 0", "def _is_valid_content_type_format(content_type: str) -> bool:\n return (\n _is_valid_ct(content_type)\n or _is_valid_pt(content_type)\n or _is_valid_set(content_type)\n or _is_valid_list(content_type)\n or _is_valid_dict(content_type)\n or _is_valid_union(content_type)\n or _is_valid_optional(content_type)\n )", "def the_mail_is_garygmailcom(context):\n context['obj'] = POM()\n context['obj'].edit_mongo_mail(\"gary@gmail.com\")", "def comprueba_mail(mail_usuario):\n arroba = mail_usuario.count('@')\n if arroba != 1 or mail_usuario.rfind('@') == (len(mail_usuario)-1):\n return False\n else: \n return True", "def strtype(x):\n if type(x) == str:\n return True\n if type(x) == unicode:\n return True\n return False", "def test_email_regex_format(self):", "def validate_type(type):\n\n types_upper = [i.upper() for i in officeTypes]\n if type.upper() in types_upper:\n return True\n return False" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Get the event information for a Betfair market ID.
def get_event_info(self, market_id: str) -> Tuple[str, str, str]: market_filter_ = market_filter(market_ids=[market_id]) event_type = ( self._client .betting .list_event_types(filter=market_filter_)[0] .event_type .name ) event = ( self._client .betting .list_events(filter=market_filter_)[0] .event .name ) competition = ( self._client .betting .list_competitions(filter=market_filter_)[0] .competition .name ) return event_type, event, competition
[ "def get_market_info(\n self, market_id: str\n ) -> Tuple[str, datetime, Dict[int, str]]:\n market_filter_ = market_filter(market_ids=[market_id])\n\n market = (\n self._client\n .betting\n .list_market_catalogue(\n filter=market_filter_,\n market_projection=['MARKET_START_TIME', 'RUNNER_DESCRIPTION']\n )[0]\n )\n\n market_name = market.market_name\n market_start_time = market.market_start_time\n\n selections = {}\n for runner in market.runners:\n selections[runner.selection_id] = runner.runner_name\n\n return market_name, market_start_time, selections", "def get_event(self, event_id):\n return self._events_dict[event_id]", "def retrieve(cls, event_id):\n return Event(Requester.get(cls.endpoint + '/' + event_id))", "def get_event(event_id):\n fields = \"id,dates,title,place,description,location,price,\" \\\n \"is_free,site_url\"\n url = f\"https://kudago.com/public-api/v1.4/events/\" \\\n f\"{event_id}/?lang=&fields{fields}=&expand=\"\n request = requests.get(url).text\n logging.debug(request)\n json_event = json.loads(request).get(\"results\", None)\n\n return json_event", "def get_event_details(eventId):\n response = client.query(\n TableName=\"EventsSingleTable\",\n # IndexName='',\n Select=\"ALL_ATTRIBUTES\",\n KeyConditionExpression=\"pk = :pk\",\n ExpressionAttributeValues={\":pk\": eventId},\n )\n\n items = response[\"Items\"]\n\n # Try serializing multiple entities from a single request\n for item in items:\n if item[\"sk\"] == item[\"pk\"]:\n e = Event(**item)\n pprint.pprint(str(e))\n else:\n c = Comment(**item)\n pprint.pprint(str(c))", "def get_event_info(abi, event):\n for entry in abi:\n if entry.get(u'type', '') != u'event':\n continue\n if str(entry.get(u'name', '')) == event:\n return entry", "def _getEvent(identifier):\r\n \r\n try:\r\n return _eventMap[identifier]\r\n except KeyError:\r\n raise ScriptApiError(\"The event '%s' does not exist.\" % identifier)", "def retrieve_event(event_id, live_mode=False):\n _initialize_stripe(live_mode=live_mode)\n event = safe_stripe_call(\n stripe.Event.retrieve,\n *(\n event_id,\n )\n )\n return event", "def get_event_by_id(event_id):\n db = get_db()\n return db.execute((\n 'SELECT id, name, start_time, end_time, location '\n 'FROM event WHERE id=?'),\n (event_id,)).fetchone()", "def event_id(self) -> str:\n return self._data[EVENT_ID]", "def info_event_json(event_id):\n event = Event.query.filter_by(id=event_id).first_or_404()\n timeuntil = timesince(event.countdown, until=True)\n return jsonify(event=event.data, timeuntil=timeuntil)", "def GetEventIdentifier(self):\n return self._event_identifier", "def get_event_id(self) -> str:\n return self.__event_id", "def query_event_by_id():\n try:\n event_id = request.args['event_id']\n response = requests.put(app.config['EVENTS_ENDPOINT'] + event_id)\n if response.status_code == 200:\n return render_template(\n 'search_results.html',\n auth=is_organizer(get_user()),\n events=parse_events(response.json()),\n app_config=app.config\n )\n else:\n return 'Unable to retrieve events', 500\n except BadRequestKeyError as error:\n return f'Error: {error}.', 400", "def quote_endpoint(self, market_id):\n self._wait_before_call()\n market = self._format_market_id(market_id)\n try:\n data, meta_data = self.TS.get_quote_endpoint(\n symbol=market, outputsize=\"full\"\n )\n return data\n except:\n logging.error(\"AlphaVantage wrong api call for {}\".format(market))\n return None", "def select_event(self, event_id):\n with self.conn:\n self.c.execute(\n \"\"\"SELECT * FROM {table} WHERE {event} = ?\"\"\".format(\n table=TABLE, event=EVENT\n ),\n (event_id,),\n )\n return self.c.fetchone()", "def event_id(self):\n return self._event_id", "def get_event_eid(eid):\n return EventModel.query.get_or_404(eid)", "async def fetch_event(self, id: int) -> Event:\n data = await self._state.http.get_clan_events(self.id, [id])\n event = data[\"events\"][0]\n return await Event(self._state, self, event)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Get the market information from a Betfair market ID.
def get_market_info( self, market_id: str ) -> Tuple[str, datetime, Dict[int, str]]: market_filter_ = market_filter(market_ids=[market_id]) market = ( self._client .betting .list_market_catalogue( filter=market_filter_, market_projection=['MARKET_START_TIME', 'RUNNER_DESCRIPTION'] )[0] ) market_name = market.market_name market_start_time = market.market_start_time selections = {} for runner in market.runners: selections[runner.selection_id] = runner.runner_name return market_name, market_start_time, selections
[ "def getMarket(self):\n return self.market", "def query_bid_price(market_data):\n print(\"Consultando BID\")\n if market_data[\"marketData\"][\"BI\"]:\n bid_price = market_data[\"marketData\"][\"BI\"][0][\"price\"]\n print(f\"Precio de BID: ${bid_price:,.2f}\".replace('.', ','))\n return bid_price\n print(\"No hay BIDs activos\")\n return None", "def market(self):\n return self._market", "def market_info(self, symbol):\n r = requests.get(self.base_url + f'/game/locations/{symbol}/marketplace', headers = self.auth_header)\n return r.text", "def market_id(self) -> int:\n return self.order.market_id", "def quote_endpoint(self, market_id):\n self._wait_before_call()\n market = self._format_market_id(market_id)\n try:\n data, meta_data = self.TS.get_quote_endpoint(\n symbol=market, outputsize=\"full\"\n )\n return data\n except:\n logging.error(\"AlphaVantage wrong api call for {}\".format(market))\n return None", "def get_market_orderbook(self, market):\n return self.__call__('market', 'getmarketorderbook',\n {'marketname': market})", "def get_markets(self):\n\n #\n\n result = self.api_query('info')\n detail = []\n for key, value in result['pairs'].items():\n IsActive = False\n if value['hidden'] ==0:\n IsActive = True\n dict_result = {'MarketCurrency':key.split('_')[0],'BaseCurrency': key.split('_')[1], 'MarketName':key,'IsActive':IsActive}\n detail.append(dict_result)\n\n result={'success' : True, 'message':'', 'result':detail}\n return result", "def get_event_info(self, market_id: str) -> Tuple[str, str, str]:\n market_filter_ = market_filter(market_ids=[market_id])\n\n event_type = (\n self._client\n .betting\n .list_event_types(filter=market_filter_)[0]\n .event_type\n .name\n )\n\n event = (\n self._client\n .betting\n .list_events(filter=market_filter_)[0]\n .event\n .name\n )\n\n competition = (\n self._client\n .betting\n .list_competitions(filter=market_filter_)[0]\n .competition\n .name\n )\n\n return event_type, event, competition", "def populate(self):\n\n endpoint = \"https://bittrex.com/api/v1.1/public/getmarketsummaries\"\n self.prices = {'TIME': [time.time()], 'GLOBAL': [self.get_global()]}\n\n try:\n markets = requests.get(endpoint).json()[\"result\"]\n for market in markets:\n symbol = str(market[\"MarketName\"])\n BID = market[\"Bid\"]\n ASK = market[\"Ask\"]\n VOL = market[\"Volume\"]\n if symbol in self.ignore or symbol[:3] != \"BTC\":\n pass\n else:\n self.prices[symbol] = [(BID, ASK, VOL)]\n except Exception as e:\n raise Exception('Failed to get markets from', e)", "def query_market_data(self, kind_of_price):\n market_data = pyRofex.get_market_data(\n ticker=self.symbol,\n entries=[kind_of_price]\n )\n return market_data", "async def fetch_markets(self, params={}):\n response = await self.publicGetInstrumentActiveAndIndices(params)\n #\n # [\n # {\n # \"symbol\": \"LTCUSDT\",\n # \"rootSymbol\": \"LTC\",\n # \"state\": \"Open\",\n # \"typ\": \"FFWCSX\",\n # \"listing\": \"2021-11-10T04:00:00.000Z\",\n # \"front\": \"2021-11-10T04:00:00.000Z\",\n # \"expiry\": null,\n # \"settle\": null,\n # \"listedSettle\": null,\n # \"relistInterval\": null,\n # \"inverseLeg\": \"\",\n # \"sellLeg\": \"\",\n # \"buyLeg\": \"\",\n # \"optionStrikePcnt\": null,\n # \"optionStrikeRound\": null,\n # \"optionStrikePrice\": null,\n # \"optionMultiplier\": null,\n # \"positionCurrency\": \"LTC\", # can be empty for spot markets\n # \"underlying\": \"LTC\",\n # \"quoteCurrency\": \"USDT\",\n # \"underlyingSymbol\": \"LTCT=\", # can be empty for spot markets\n # \"reference\": \"BMEX\",\n # \"referenceSymbol\": \".BLTCT\", # can be empty for spot markets\n # \"calcInterval\": null,\n # \"publishInterval\": null,\n # \"publishTime\": null,\n # \"maxOrderQty\": 1000000000,\n # \"maxPrice\": 1000000,\n # \"lotSize\": 1000,\n # \"tickSize\": 0.01,\n # \"multiplier\": 100,\n # \"settlCurrency\": \"USDt\", # can be empty for spot markets\n # \"underlyingToPositionMultiplier\": 10000,\n # \"underlyingToSettleMultiplier\": null,\n # \"quoteToSettleMultiplier\": 1000000,\n # \"isQuanto\": False,\n # \"isInverse\": False,\n # \"initMargin\": 0.03,\n # \"maintMargin\": 0.015,\n # \"riskLimit\": 1000000000000, # can be null for spot markets\n # \"riskStep\": 1000000000000, # can be null for spot markets\n # \"limit\": null,\n # \"capped\": False,\n # \"taxed\": True,\n # \"deleverage\": True,\n # \"makerFee\": -0.0001,\n # \"takerFee\": 0.0005,\n # \"settlementFee\": 0,\n # \"insuranceFee\": 0,\n # \"fundingBaseSymbol\": \".LTCBON8H\", # can be empty for spot markets\n # \"fundingQuoteSymbol\": \".USDTBON8H\", # can be empty for spot markets\n # \"fundingPremiumSymbol\": \".LTCUSDTPI8H\", # can be empty for spot markets\n # \"fundingTimestamp\": \"2022-01-14T20:00:00.000Z\",\n # \"fundingInterval\": \"2000-01-01T08:00:00.000Z\",\n # \"fundingRate\": 0.0001,\n # \"indicativeFundingRate\": 0.0001,\n # \"rebalanceTimestamp\": null,\n # \"rebalanceInterval\": null,\n # \"openingTimestamp\": \"2022-01-14T17:00:00.000Z\",\n # \"closingTimestamp\": \"2022-01-14T18:00:00.000Z\",\n # \"sessionInterval\": \"2000-01-01T01:00:00.000Z\",\n # \"prevClosePrice\": 138.511,\n # \"limitDownPrice\": null,\n # \"limitUpPrice\": null,\n # \"bankruptLimitDownPrice\": null,\n # \"bankruptLimitUpPrice\": null,\n # \"prevTotalVolume\": 12699024000,\n # \"totalVolume\": 12702160000,\n # \"volume\": 3136000,\n # \"volume24h\": 114251000,\n # \"prevTotalTurnover\": 232418052349000,\n # \"totalTurnover\": 232463353260000,\n # \"turnover\": 45300911000,\n # \"turnover24h\": 1604331340000,\n # \"homeNotional24h\": 11425.1,\n # \"foreignNotional24h\": 1604331.3400000003,\n # \"prevPrice24h\": 135.48,\n # \"vwap\": 140.42165,\n # \"highPrice\": 146.42,\n # \"lowPrice\": 135.08,\n # \"lastPrice\": 144.36,\n # \"lastPriceProtected\": 144.36,\n # \"lastTickDirection\": \"MinusTick\",\n # \"lastChangePcnt\": 0.0655,\n # \"bidPrice\": 143.75,\n # \"midPrice\": 143.855,\n # \"askPrice\": 143.96,\n # \"impactBidPrice\": 143.75,\n # \"impactMidPrice\": 143.855,\n # \"impactAskPrice\": 143.96,\n # \"hasLiquidity\": True,\n # \"openInterest\": 38103000,\n # \"openValue\": 547963053300,\n # \"fairMethod\": \"FundingRate\",\n # \"fairBasisRate\": 0.1095,\n # \"fairBasis\": 0.004,\n # \"fairPrice\": 143.811,\n # \"markMethod\": \"FairPrice\",\n # \"markPrice\": 143.811,\n # \"indicativeTaxRate\": null,\n # \"indicativeSettlePrice\": 143.807,\n # \"optionUnderlyingPrice\": null,\n # \"settledPriceAdjustmentRate\": null,\n # \"settledPrice\": null,\n # \"timestamp\": \"2022-01-14T17:49:55.000Z\"\n # }\n # ]\n #\n result = []\n for i in range(0, len(response)):\n market = response[i]\n id = self.safe_string(market, 'symbol')\n baseId = self.safe_string(market, 'underlying')\n quoteId = self.safe_string(market, 'quoteCurrency')\n settleId = self.safe_string(market, 'settlCurrency')\n base = self.safe_currency_code(baseId)\n quote = self.safe_currency_code(quoteId)\n settle = self.safe_currency_code(settleId)\n # 'positionCurrency' may be empty(\"\", currently returns for ETHUSD)\n # so let's take the settlCurrency first and then adjust if needed\n typ = self.safe_string(market, 'typ') # type definitions at: https://www.bitmex.com/api/explorer/#not /Instrument/Instrument_get\n types = {\n 'FFWCSX': 'swap',\n 'FFWCSF': 'swap',\n 'IFXXXP': 'spot',\n 'FFCCSX': 'future',\n 'MRBXXX': 'index',\n 'MRCXXX': 'index',\n 'MRFXXX': 'index',\n 'MRRXXX': 'index',\n 'MRIXXX': 'index',\n }\n type = self.safe_string(types, typ, typ)\n swap = type == 'swap'\n future = type == 'future'\n spot = type == 'spot'\n contract = swap or future\n contractSize = None\n index = type == 'index'\n isInverse = self.safe_value(market, 'isInverse') # self is True when BASE and SETTLE are same, i.e. BTC/XXX:BTC\n isQuanto = self.safe_value(market, 'isQuanto') # self is True when BASE and SETTLE are different, i.e. AXS/XXX:BTC\n linear = (not isInverse and not isQuanto) if contract else None\n status = self.safe_string(market, 'state')\n active = status != 'Unlisted'\n expiry = None\n expiryDatetime = None\n symbol = None\n if spot:\n symbol = base + '/' + quote\n elif contract:\n symbol = base + '/' + quote + ':' + settle\n multiplierString = Precise.string_abs(self.safe_string(market, 'multiplier'))\n if linear:\n contractSize = self.parse_number(Precise.string_div('1', market['underlyingToPositionMultiplier']))\n else:\n contractSize = self.parse_number(multiplierString)\n if future:\n expiryDatetime = self.safe_string(market, 'expiry')\n expiry = self.parse8601(expiryDatetime)\n symbol = symbol + '-' + self.yymmdd(expiry)\n else:\n # for index/exotic markets, default to id\n symbol = id\n positionId = self.safe_string_2(market, 'positionCurrency', 'underlying')\n position = self.safe_currency_code(positionId)\n positionIsQuote = (position == quote)\n maxOrderQty = self.safe_number(market, 'maxOrderQty')\n initMargin = self.safe_string(market, 'initMargin', '1')\n maxLeverage = self.parse_number(Precise.string_div('1', initMargin))\n result.append({\n 'id': id,\n 'symbol': symbol,\n 'base': base,\n 'quote': quote,\n 'settle': settle,\n 'baseId': baseId,\n 'quoteId': quoteId,\n 'settleId': settleId,\n 'type': type,\n 'spot': spot,\n 'margin': False,\n 'swap': swap,\n 'future': future,\n 'option': False,\n 'index': index,\n 'active': active,\n 'contract': contract,\n 'linear': linear,\n 'inverse': isInverse,\n 'quanto': isQuanto,\n 'taker': self.safe_number(market, 'takerFee'),\n 'maker': self.safe_number(market, 'makerFee'),\n 'contractSize': contractSize,\n 'expiry': expiry,\n 'expiryDatetime': expiryDatetime,\n 'strike': self.safe_number(market, 'optionStrikePrice'),\n 'optionType': None,\n 'precision': {\n 'amount': self.safe_number(market, 'lotSize'),\n 'price': self.safe_number(market, 'tickSize'),\n 'quote': self.safe_number(market, 'tickSize'),\n 'base': self.safe_number(market, 'tickSize'),\n },\n 'limits': {\n 'leverage': {\n 'min': self.parse_number('1') if contract else None,\n 'max': maxLeverage if contract else None,\n },\n 'amount': {\n 'min': None,\n 'max': None if positionIsQuote else maxOrderQty,\n },\n 'price': {\n 'min': None,\n 'max': self.safe_number(market, 'maxPrice'),\n },\n 'cost': {\n 'min': None,\n 'max': maxOrderQty if positionIsQuote else None,\n },\n },\n 'info': market,\n })\n return result", "def bsp_market(self):\n return self._bsp_market", "def fetch_price():\n\n url = \"https://www.bitstamp.net/api/ticker/\"\n\n response = json.load(urllib2.urlopen(url))\n\n return {\"buy\": response['ask'], \"sell\": response['bid']}", "def get_coins_bittrex():\n\t\t\n\tendpoint = \"https://bittrex.com/api/v1.1/public/getmarkets\"\n\ttry:\n\t\tmarkets = requests.get(endpoint).json()[\"result\"]\n\t\tfor market in markets:\n\t\t\tsymbol = market[\"MarketCurrency\"]\n\t\t\tname = market[\"MarketCurrencyLong\"].lower()\n\t\t\tsymbol_name[symbol] = name\n\t\t\tname_symbol[name] = symbol\n\t\t# print(f'Found {len(markets)} markets.')\n\texcept Exception as e:\n\t\tprint(f'Failed to get markets from {endpoint} ({e})')", "def get_market_data(market, tag=True):\n # market_data = pd.read_html(\"https://coinmarketcap.com/currencies/\" + market +\n # \"/historical-data/?start=20130428&end=\"+time.strftime(\"%Y%m%d\"), flavor='html5lib')[0]\n market_data = pd.read_html(\"https://coinmarketcap.com/currencies/\" + market +\n \"/historical-data/?start=20180412&end=\"+time.strftime(\"%Y%m%d\"), flavor='html5lib')[0]\n market_data.rename(columns={'Open*': 'Open', 'Close**': 'Close'}, inplace=True)\n market_data = market_data.assign(Date=pd.to_datetime(market_data['Date']))\n # print('transferred date market_data is', market_data)\n market_data['Volume'] = (pd.to_numeric(market_data['Volume'], errors='coerce').fillna(0))\n # print('transferred volume market_data is', market_data)\n if tag:\n market_data.columns = [market_data.columns[0]] + [tag + '_' + i for i in market_data.columns[1:]]\n print('mark tag transferred volume market_data is \\n', market_data)\n return market_data", "def get_market_page(session, app, item):\n response = session.get('https://steamcommunity.com/market/pricehistory/?appid={}&market_hash_name={}'.format(app, url(item)))\n if response.status_code == 500: # Some items I have aren't actually on the market and they return 500\n return 'skip'\n elif response.status_code != 200:\n return None\n else:\n return response", "def get_active_market_street(market):\r\n return market[-1]", "def get_market(market_type, **kwargs):\n if market_type == 'pyext':\n return ExternalMarket(**kwargs)\n elif market_type == 'cyext':\n return CyExternalMarket(**kwargs)\n else:\n raise NotImplementedError(market_type)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Stop a running Betfair ladder stream.
def stop_betfair_ladder_stream(self) -> None: if self.stream is not None: logger.info("Stopping the Betfair market stream.") self.stream.stop() self.stream.listener.output_queue.put("Terminate") self.stream = None else: logger.info("No existing Betfair market stream to stop.")
[ "def stop_stream(self):\n pass", "def stop(self):\n\t\tself.stream.stop_stream()", "def stop(self):\n self.stream.stop()\n self.running = False", "def stop(self):\n self.streaming_context.stop()", "def stop(self) -> None:\n self._stopping = True\n for stream in self._streams:\n if stream is not None:\n stream.stop()", "async def stop_livestream(self):\n await self.api.stop_livestream(self.product_type, self.serial_no)\n if self.p2p_stream_thread.is_alive() is True:\n await self.p2p_stream_handler.stop()", "def stop(self):\n self._stop_flag = True", "def stop(self):\n self.__running = False", "def stop():\r\n\t\tglobal running\r\n\t\trunning = False", "def stop(self):\n self.running = False\n self.hop_channel(\"auto\")", "def stop(self) -> None:\n self.is_running = False", "def stop(self):\n return _limesdr_swig.sink_sptr_stop(self)", "def stop(self):\n return _qtgui_swig.waterfall_sink_f_sptr_stop(self)", "async def stop(self) -> None:\n c_future = tankerlib.tanker_stop(self.c_tanker)\n await ffihelpers.handle_tanker_future(c_future)", "def _stop(self):\n\n self.streaming_pull_future.cancel() # Trigger the shutdown.\n self.streaming_pull_future.result() # Block until the shutdown is complete.", "def stop_video_stream(self):\n return self._compare_and_set_streaming(True, False)", "def stop(self):\n return _qtgui_swig.waterfall_sink_c_sptr_stop(self)", "def stop(self):\n\t\tself._run_flag = False\n\t\tself.wait()", "def stop(self):\n self._run_flag = False\n self.wait()" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns class by interpreting input string as module path and class name. Module path should be separated by dots as usual. Separate class name from module by '/'.
def get_class(string): logger = logman.getLogger(__name__) if '/' not in string: logger.error("The string is not properly formatted. Use '/' to separate module path from classname. String is: {}".format(string)) return module_name, class_name = string.split('/') try: logger.debug('Retrieving class {} from module {}'.format(class_name, module_name)) temp_class = getattr(importlib.import_module(module_name), class_name) except ModuleNotFoundError: logger.error("Module not found: {}".format(module_name)) raise except AttributeError: logger.error("Class not found: {}".format(class_name)) raise except: logger.error("Unexpected error while loading {}".format(string)) raise return temp_class
[ "def str_to_class(string: str):\n idx_dot = string.rfind('.')\n if idx_dot < 0:\n module_str = 'fnet.nn_modules'\n class_str = string\n else:\n module_str = string[:idx_dot]\n class_str = string[idx_dot + 1:]\n module = importlib.import_module(module_str)\n return getattr(module, class_str)", "def stringToClass(cls_str):\n import_stg1 = cls_str.split(\" \")[1]\n import_stg2 = import_stg1.replace(\"'\", \"\")\n import_stg3 = import_stg2.replace(\">\", \"\")\n import_parse = import_stg3.split(\".\")\n cls = import_parse[-1]\n import_path = '.'.join(import_parse[:-1])\n import_statement = \"from %s import %s\" % (import_path, cls)\n exec(import_statement)\n assign_statement = \"this_class = %s\" % cls\n exec(assign_statement)\n return this_class", "def get_class_name_from_module_name(module_name):\n return module_name[0].upper() + (module_name[1:]).rstrip('1234567890')", "def load_class(module_and_name): \n module, name = module_and_name.rsplit('.', 1)\n __import__(module)\n return getattr(sys.modules[module], name)", "def str_to_class(referance_name):\n return getattr(sys.modules[__name__], referance_name)", "def load_class(\n fully_qualified_class_name: str\n):\n\n (module_name, fully_qualified_class_name) = fully_qualified_class_name.rsplit('.', 1)\n module_ref = importlib.import_module(module_name)\n class_ref = getattr(module_ref, fully_qualified_class_name)\n\n return class_ref", "def create_class_from_strings( self, module_name, class_name):\r\n if not( self.logger is None ):\r\n self.logger.debug( \"create class {module_name} {class_name}\" )\r\n\r\n# print( \"create class \" + module_name + \" \" + class_name )\r\n\r\n a_class = getattr( importlib.import_module(module_name), class_name )\r\n instance = a_class( )\r\n return instance", "def get_class_from_string(self, classname, module):\n\n myclass = None\n try:\n # Meta language for dinamically import\n myclass = getattr(module, classname)\n except AttributeError as e:\n logger.critical(\"Failed to load resource: \" + str(e))\n\n return myclass", "def import_string(path: str):\n if \".\" not in path:\n return __import__(path)\n\n module_name, class_name = path.rsplit(\".\", 1)\n\n module = __import__(module_name, {}, {}, [class_name])\n try:\n return getattr(module, class_name)\n\n except AttributeError as exc:\n raise ImportError from exc", "def process_path(module_path):\n if module_path == 'numpy.ndarray':\n return 'StorageNumpy', 'hecuba.hnumpy'\n last = 0\n for key, i in enumerate(module_path):\n if i == '.' and key > last:\n last = key\n module = module_path[:last]\n class_name = module_path[last + 1:]\n return class_name, module", "def get_class_by_path(cls_path):\n path_fragments = cls_path.split('.')\n cls_name = path_fragments.pop()\n module_path = '.'.join(path_fragments)\n module = import_module(module_path)\n return getattr(module, cls_name)", "def process_path(module_path):\n\n if module_path == 'numpy.ndarray':\n return 'StorageNumpy', 'hecuba.hnumpy'\n if module_path == 'StorageDict':\n return 'StorageDict', 'hecuba.hdict'\n last = 0\n for key, i in enumerate(module_path):\n if i == '.' and key > last:\n last = key\n module = module_path[:last]\n class_name = module_path[last + 1:]\n return class_name, module", "def import_class(path):\n if ':' in path:\n module_path, class_name = path.split(':')\n else:\n module_path, class_name = path.rsplit('.', 1)\n\n module = __import__(module_path, fromlist=[class_name], level=0)\n return getattr(module, class_name)", "def _get_field_class(self, class_str):\n field_class_path = str(class_str).split('.')\n if len(field_class_path) > 1:\n field_module_name = '.'.join(field_class_path[:-1])\n else:\n field_module_name = '.'\n\n field_module = __import__(field_module_name, {}, {},\n field_class_path[-1])\n return getattr(field_module, field_class_path[-1])", "def load_class(path):\r\n\r\n mod_name, klass_name = path.rsplit('.', 1)\r\n\r\n try:\r\n mod = import_module(mod_name)\r\n except AttributeError as e:\r\n raise ImproperlyConfigured('Error importing {0}: \"{1}\"'.format(mod_name, e))\r\n\r\n try:\r\n klass = getattr(mod, klass_name)\r\n except AttributeError:\r\n raise ImproperlyConfigured('Module \"{0}\" does not define a \"{1}\" class'.format(mod_name, klass_name))\r\n\r\n return klass", "def get_klass_from_str(klass_as_str):\n return _get_klass_or_func_from_str_impl(klass_as_str,\n lambda x: inspect.isclass(x))", "def classname(path):\n return os.path.normpath(path).replace(os.sep, '.')", "def getClassFromName(classname):\n try:\n parts = classname.split('.')\n module = \".\".join(parts[:-1])\n m = __import__( module )\n for comp in parts[1:]:\n m = getattr(m, comp) \n return m\n except ImportError:\n print \"Unable to import %s\" % classname", "def construct_class_by_name(name, *args, **kwargs):\n parts = name.split('.')\n module_name, class_name = '.'.join(parts[:-1]), parts[-1]\n module = importlib.import_module(module_name)\n return getattr(module, class_name)(*args, **kwargs)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
FresnelReflection takes the smallest angle between the ray direction and the normal. Thus the flipped normal will also work.
def test_antinormal_reflection(self): n1 = 1.0 n2 = 1.5 normal = (0.0, 0.0, -1.0) angle = 0.0 ray = Ray(position=(0.0, 0.0, 0.0), direction=(0.0, 0.0, 1.0), wavelength=None) fresnel = FresnelReflection() assert np.isclose(fresnel.reflectivity(angle, n1, n2), 0.04) new_ray = fresnel.transform(ray, {"normal": normal}) assert np.allclose(flip(ray.direction), new_ray.direction)
[ "def reflection_normal(outgoing_ray, incoming_ray):\n ray1 = normalize(-incoming_ray)\n ray2 = normalize(outgoing_ray)\n return normalize((ray1 + ray2)/2)", "def reflectivity(self, surface, ray, geometry, container, adjacent):\n # Get the surface normal to determine which surface has been hit.\n normal = geometry.normal(ray.position)\n \n # Normal are outward facing\n TOP_SURFACE = (0, 0, 1)\n \n # If a ray hits the top surface where x > 0 and y > 0 reflection\n # set the reflectivity to 1.\n if np.allclose(normal, TOP_SURFACE):\n x, y = ray.position[0], ray.position[1]\n if x > 0 and y > 0:\n return 1.0\n \n # Otherwise return the Frensel reflection probability.\n return super(PartialTopSurfaceMirror, self).reflectivity(surface, ray, geometry, container, adjacent) # opt-out of handling custom reflection", "def refract(ray, normal, origin_index, final_index):\n rho = final_index / origin_index\n ray_direction = normalize(ray)\n normal = normalize(normal)\n if normal.dot(ray_direction) > 0:\n normal = -normal\n incidence = dot(-ray_direction, normal)\n complement = (1.0 - (1.0 - incidence**2) / rho**2)**(0.5)\n return (ray_direction / rho) + ((incidence / rho - complement) * normal)", "def refract(self, ray, rho):\n normal = self.normal(ray.position)\n if normal.dot(ray.direction) > 0:\n normal = -normal\n incidence = dot(-ray.direction, normal)\n complement = sqrt(1.0 - (1.0 - incidence**2) / rho**2)\n return Ray((ray.direction / rho +\n (incidence / rho - complement) * normal), ray.position)", "def test_reflection_vector(self):\n\n # A ray approaching at 45 degrees\n v = vectors.Vector(1, -1, 0)\n n = vectors.Vector(0, 1, 0)\n r = v.reflect(n)\n self.assertEqual(r, vectors.Vector(1, 1, 0))\n\n # Ray along an axis hits a surface at an angle\n v = vectors.Vector(0, -1, 0)\n n = vectors.Vector(math.sqrt(2)/2, math.sqrt(2)/2, 0)\n r = v.reflect(n)\n self.assertEqual(r, vectors.Vector(1, 0, 0))", "def reflect_step(r0, step, intersection, normal_vector, step_length):\n \n # Calculate distance to intersection point and update step length\n step_length -= math.sqrt((r0[0] - intersection[0])**2 + (r0[1] - intersection[1])**2 + (r0[2] - intersection[2])**2)\n \n # Calculate reflection off the surface\n reflected_x = -r0[0] + 2*intersection[0] + 2*normal_vector[0]*((r0[0] - intersection[0])*normal_vector[0] + (r0[1] - intersection[1])*normal_vector[1] + (r0[2] - intersection[2])*normal_vector[2])\n reflected_y = -r0[1] + 2*intersection[1] + 2*normal_vector[1]*((r0[0] - intersection[0])*normal_vector[0] + (r0[1] - intersection[1])*normal_vector[1] + (r0[2] - intersection[2])*normal_vector[2])\n reflected_z = -r0[2] + 2*intersection[2] + 2*normal_vector[2]*((r0[0] - intersection[0])*normal_vector[0] + (r0[1] - intersection[1])*normal_vector[1] + (r0[2] - intersection[2])*normal_vector[2])\n \n # Update step direction and spin position\n step[0] = reflected_x - intersection[0]\n step[1] = reflected_y - intersection[1]\n step[2] = reflected_z - intersection[2]\n normalizing_factor = math.sqrt(step[0]**2+step[1]**2+step[2]**2)\n step[0] /= normalizing_factor \n step[1] /= normalizing_factor \n step[2] /= normalizing_factor \n \n epsilon = 1e-6\n \n r0[0] = intersection[0] + epsilon*step_length*step[0]\n r0[1] = intersection[1] + epsilon*step_length*step[1]\n r0[2] = intersection[2] + epsilon*step_length*step[2]\n \n return", "def unitsurfacenormal(self, ray):\t\n\t\tif self.s == \"plane\":\n\t\t\treturn np.array([0,0,-1])\n\t\telse:\n\t\t\tQ = self.intercept(ray)\n\t\t\tsurface_normal = Q - self.centre\n\t\t\treturn surface_normal/np.sqrt(sum(n**2 for n in surface_normal))", "def _cuda_reflection(r0, step, d, normal, epsilon):\n intersection = cuda.local.array(3, numba.float64)\n v = cuda.local.array(3, numba.float64)\n for i in range(3):\n intersection[i] = r0[i] + d * step[i]\n v[i] = intersection[i] - r0[i]\n dp = _cuda_dot_product(v, normal)\n if dp > 0: # Make sure the normal vector points against the step\n for i in range(3):\n normal[i] *= -1\n dp = _cuda_dot_product(v, normal)\n for i in range(3):\n step[i] = (v[i] - 2 * dp * normal[i] + intersection[i]) - intersection[i]\n _cuda_normalize_vector(step)\n for i in range(3): # Move walker slightly away from the surface\n r0[i] = intersection[i] + epsilon * normal[i]\n return", "def reconstruct_mirror_normal(\n outgoing_ray, incoming_ray, surface_normal, inside_index, outside_index):\n outgoing_ray = normalize(outgoing_ray)\n incoming_ray = normalize(incoming_ray)\n surface_normal = normalize(surface_normal)\n inner_downstream = -refract(\n -outgoing_ray, surface_normal, outside_index, inside_index)\n inner_upstream = refract(\n incoming_ray, surface_normal, outside_index, inside_index)\n return reflection_normal(inner_downstream, inner_upstream)", "def reflect(self, normal) -> 'Vector':\n # r = i - (2 * n * dot(i, n))\n i = self\n n = normal\n r = i - (2 * n * i.dot(n))\n return r", "def test_fresnel_propagate_direct_back_and_forward():\n npix = 1024\n wavelen = 2200 * u.nm\n wf = fresnel.FresnelWavefront(\n 0.5 * u.m, wavelength=wavelen, npix=npix, oversample=4\n )\n wf *= optics.CircularAperture(radius=0.5)\n z = ((wf.pixelscale * u.pix) ** 2 * wf.n / (2200 * u.nm)).to(u.m)\n start = wf.wavefront.copy()\n wf.propagate_direct(-z)\n wf.propagate_direct(z)\n xp.testing.assert_array_almost_equal(wf.wavefront, start)", "def invert_normal(plane):\n # flip the normal, and the distance\n return -plane", "def op_fresnel_reflection(m, theta):\n rho_p = pypolar.fresnel.r_par_amplitude(m, theta)\n rho_s = pypolar.fresnel.r_per_amplitude(m, theta)\n a = abs(rho_s)**2 + abs(rho_p)**2\n b = abs(rho_s)**2 - abs(rho_p)**2\n c = 2 * rho_s * rho_p\n mat = np.array([[a, b, 0, 0],\n [b, a, 0, 0],\n [0, 0, c, 0],\n [0, 0, 0, c]])\n return 0.5 * mat", "def test_fresnel_propagate_direct_forward_and_back():\n npix = 1024\n wavelen = 2200 * u.nm\n wf = fresnel.FresnelWavefront(\n 0.5 * u.m, wavelength=wavelen, npix=npix, oversample=4\n )\n wf *= optics.CircularAperture(radius=0.5)\n z = ((wf.pixelscale * u.pix) ** 2 * wf.n / (2200 * u.nm)).to(u.m)\n start = wf.wavefront.copy()\n wf.propagate_direct(z)\n wf.propagate_direct(-z)\n\n xp.testing.assert_array_almost_equal(wf.wavefront, start)", "def test_fresnel_propagate_direct_2forward_and_back():\n npix = 1024\n wavelen = 2200 * u.nm\n wf = fresnel.FresnelWavefront(\n 0.5 * u.m, wavelength=wavelen, npix=npix, oversample=4\n )\n wf *= optics.CircularAperture(radius=0.5)\n z = ((wf.pixelscale * u.pix) ** 2 * wf.n / (2200 * u.nm)).to(u.m)\n\n wf.propagate_direct(z)\n start = wf.wavefront.copy()\n wf.propagate_direct(z)\n wf.propagate_direct(-z)\n xp.testing.assert_array_almost_equal(wf.wavefront, start)", "def propagate(self, ray, index_0, index_1):\n if self._reflective:\n return self.reflect(ray)\n else:\n return self.refract(ray, index_1/index_0)", "def far_clipping_face(self):\n pln = self.tripod.plane\n l, r, b, t, n, f = self.body.dim\n if self.body.fshape == 'p':\n d = f - n\n # far face dimensions\n l, r, b, t = [(i * d) / n + i for i in (l, r, b, t)]\n face = gt.Plin((l, b, -f), (r, b, -f), (r, t, -f), (l, t, -f))\n return pln.TM * face", "def _normalize_raybundle(self, ray_bundle: RayBundle):\n ray_bundle = ray_bundle._replace(directions=torch.nn.functional.normalize(ray_bundle.directions, dim=-1))\n return ray_bundle", "def get_reflection_specular(self, theta_rad):\n cos_factor = self._cos_factor(theta_rad)\n index = np.argwhere( (self.N1==0) & (self.N2==0)).flatten()[0]\n rt = np.square(np.abs(self.E_strength[index,:]))\n return np.sum(rt*cos_factor[index])" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Tests the API endpoint to get hashrate resale details with missing field
def test_mining_hashrate_resale_details_with_missing_field(params): client = Client(key, secret) client.mining_hashrate_resale_details.when.called_with(**params).should.throw( ParameterRequiredError )
[ "def test_mining_hashrate_resale_details():\n\n client = Client(key, secret)\n response = client.mining_hashrate_resale_details(123, \"user_name\")\n response.should.equal(mock_item)", "def test_retire_rate_plan(self):\n pass", "def test_api_projects_id_rates_get(self):\n pass", "def test_validation_get_valid_resampling(self):\n self.assertIsInstance(api.validation.fetch_resampling(), dict)", "def test_get_rate_article_not_found(self):\n self.client.credentials(HTTP_AUTHORIZATION='Bearer ' + self.token)\n response = self.client.get(\n self.view_rates_url + str(2) + \"/\",\n format='json')\n self.assertEqual(\n 0,\n response.data[\"rates\"])\n self.assertEqual(204, status.HTTP_204_NO_CONTENT)", "def test_get_details7(self):\n pass", "def test_api_erx_get_new_get(self):\n pass", "def test_rap_get(self):\n\n # the function to be tested:\n rap1 = self.urihandler.get(self.hmc,\n '/api/cpcs/1/reset-activation-profiles/r1',\n True)\n\n exp_rap1 = {\n 'name': 'r1',\n 'class': 'reset-activation-profile',\n 'parent': '/api/cpcs/1',\n 'element-uri': '/api/cpcs/1/reset-activation-profiles/r1',\n 'description': 'Reset profile #1 in CPC #1',\n }\n assert rap1 == exp_rap1", "def test_get_rate_article(self):\n self.client.credentials(HTTP_AUTHORIZATION='Bearer ' + self.token)\n self.client.post(\n self.rate_url,\n self.rate_details,\n format='json')\n self.client.credentials(HTTP_AUTHORIZATION='Bearer ' + self.token_2)\n self.rate_details[\"user\"]['rate'] = 4\n self.client.post(\n self.rate_url,\n self.rate_details,\n format='json')\n response = self.client.get(\n self.view_rates_url + str(1) + \"/\",\n format='json')\n\n self.assertEqual(response.status_code, status.HTTP_200_OK)", "def test_get_business_exchange_rates(self):\n pass", "def test_get_exchange_rates(self):\n pass", "def test_api_v1_defenders_get(self):\n pass", "def test_mining_hashrate_resale_cancellation():\n\n client = Client(key, secret)\n response = client.mining_hashrate_resale_cancellation(123, \"user_name\")\n response.should.equal(mock_item)", "def test_get_without_price(self):\n #Delete the view cost permission from the user\n self.user.user_permissions.remove(Permission.objects.get(codename='view_cost', content_type=self.ct))\n \n #tests the response\n resp = self.client.get('/api/v1/fabric/1/')\n self.assertEqual(resp.status_code, 200)\n \n #Tests the data returned\n obj = resp.data\n self.assertNotIn(\"cost\", obj)", "def test_recharge_transactions_get(self):\n pass", "def test_get_exchange_rates(self):\n test_service.get_exchange_rates(self)\n\n headers = { \n 'Accept': 'application/json',\n }\n response = self.client.open(\n '/{currency}/rates/{height}'.format(currency='btc', height=1),\n method='GET',\n headers=headers)\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def test_api_v1_defenders_summary_get(self):\n pass", "def test_get_without_price(self):\n #Delete the view cost permission from the user\n self.user.user_permissions.remove(Permission.objects.get(codename='view_cost', content_type=self.ct))\n \n #tests the response\n resp = self.client.get('/api/v1/supply/1/')\n self.assertEqual(resp.status_code, 200)\n \n #Tests the data returned\n obj = resp.data\n self.assertNotIn(\"cost\", obj)", "def test_retrieve_list_resgate_to_user_authenticated(self):\n sample_resgate(user=self.user, value=500)\n sample_resgate(user=self.user, value=200)\n\n response = self.client.get(RESGATE_URL)\n\n resgates = Resgate.objects.all().order_by('quantity')\n serializer = ResgateSerializer(resgates, many=True)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(response.data, serializer.data)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Tests the API endpoint to get hashrate resale details
def test_mining_hashrate_resale_details(): client = Client(key, secret) response = client.mining_hashrate_resale_details(123, "user_name") response.should.equal(mock_item)
[ "def test_get_exchange_rates(self):\n test_service.get_exchange_rates(self)\n\n headers = { \n 'Accept': 'application/json',\n }\n response = self.client.open(\n '/{currency}/rates/{height}'.format(currency='btc', height=1),\n method='GET',\n headers=headers)\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def test_retire_rate_plan(self):\n pass", "def test_retrieve_list_resgate_to_user_authenticated(self):\n sample_resgate(user=self.user, value=500)\n sample_resgate(user=self.user, value=200)\n\n response = self.client.get(RESGATE_URL)\n\n resgates = Resgate.objects.all().order_by('quantity')\n serializer = ResgateSerializer(resgates, many=True)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(response.data, serializer.data)", "def test_rap_get(self):\n\n # the function to be tested:\n rap1 = self.urihandler.get(self.hmc,\n '/api/cpcs/1/reset-activation-profiles/r1',\n True)\n\n exp_rap1 = {\n 'name': 'r1',\n 'class': 'reset-activation-profile',\n 'parent': '/api/cpcs/1',\n 'element-uri': '/api/cpcs/1/reset-activation-profiles/r1',\n 'description': 'Reset profile #1 in CPC #1',\n }\n assert rap1 == exp_rap1", "def test_api_projects_id_rates_get(self):\n pass", "async def test_get_rates_get(client):\n params = [('exchangeType', 'exchange_type_example')]\n headers = { \n 'Accept': 'application/json',\n 'Authorization': 'Bearer special-key',\n }\n response = await client.request(\n method='GET',\n path='/public/exchange/1/getRates',\n headers=headers,\n params=params,\n )\n assert response.status == 200, 'Response body is : ' + (await response.read()).decode('utf-8')", "def test_get_rate_article(self):\n self.client.credentials(HTTP_AUTHORIZATION='Bearer ' + self.token)\n self.client.post(\n self.rate_url,\n self.rate_details,\n format='json')\n self.client.credentials(HTTP_AUTHORIZATION='Bearer ' + self.token_2)\n self.rate_details[\"user\"]['rate'] = 4\n self.client.post(\n self.rate_url,\n self.rate_details,\n format='json')\n response = self.client.get(\n self.view_rates_url + str(1) + \"/\",\n format='json')\n\n self.assertEqual(response.status_code, status.HTTP_200_OK)", "def test_mining_hashrate_resale_cancellation():\n\n client = Client(key, secret)\n response = client.mining_hashrate_resale_cancellation(123, \"user_name\")\n response.should.equal(mock_item)", "def test_get_exchange_rates(self):\n pass", "def test_run_exchange_rates_api(self):\r\n API_URL = \"https://api.exchangeratesapi.io/latest\"\r\n BASE_CURRENCY = \"USD\"\r\n data=run_exchange_rates_api(API_URL, BASE_CURRENCY)\r\n self.assertTrue(len(data['rates']) > 1)\r\n self.assertEqual(data['base'], 'USD')", "def test_get_business_exchange_rates(self):\n pass", "def test_mining_hashrate_resale_details_with_missing_field(params):\n client = Client(key, secret)\n client.mining_hashrate_resale_details.when.called_with(**params).should.throw(\n ParameterRequiredError\n )", "def test_spores_info_get(self):\n\n response = requests.get('{0}/spores_info'.format(URL))\n\n rest_response = response.json()\n func_response = spores_version_info()\n\n self.assertDictEqual(rest_response, func_response)", "def test_api_erx_get_new_get(self):\n pass", "def test_openstack_rest_test_get(self):\n pass", "def test_validation_get_valid_resampling(self):\n self.assertIsInstance(api.validation.fetch_resampling(), dict)", "def test_get_result_by_uuid(self):\n headers = { \n 'Accept': 'application/json',\n }\n response = self.client.open(\n '/RadonCTT/result/{result_uuid}'.format(result_uuid='result_uuid_example'),\n method='GET',\n headers=headers)\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def test_get_business_exchange_rates_key(self):\n pass", "def test_recharge_transactions_get(self):\n pass" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Takes a url and email, sends POST request and display body
def main(): post_url = argv[1] params = { 'email': argv[2] } query_string = parse.urlencode(params) post_data = query_string.encode("ascii") with request.urlopen(post_url, post_data) as post_response: response_text = post_response.read() print(response_text.decode("UTF-8"))
[ "def post(self):\n self.receive(mail.InboundEmailMessage(self.request.body))", "def email_post(request):\n if request.user.is_authenticated:\n messages.error(request, _(\"You are already logged in.\"))\n return redirect(ta_settings.LOGIN_REDIRECT)\n\n form = EmailForm(request.POST)\n if not form.is_valid():\n messages.error(request, _(\"The email address was invalid. Please check the address and try again.\"))\n return redirect(ta_settings.LOGIN_URL)\n\n email = ta_settings.NORMALIZE_EMAIL(form.cleaned_data[\"email\"])\n if not email:\n # The user's normalization function has returned something falsy.\n messages.error(\n request, _(\"That email address is not allowed to authenticate. Please use an alternate address.\")\n )\n return redirect(ta_settings.LOGIN_URL)\n\n email_login_link(request, email, next_url=request.GET.get(\"next\", \"\"))\n\n messages.success(request, _(\"Login email sent! Please check your inbox and click on the link to be logged in.\"))\n return redirect(ta_settings.LOGIN_URL)", "def send_email(html: str) -> None:\n # Set API params\n config = configparser.ConfigParser()\n config.read(os.path.join(\n os.path.abspath(os.path.dirname(__file__)), 'settings.cfg')\n )\n key = config.get('Mailgun', 'api')\n domain = config.get('Mailgun', 'domain')\n\n # Set requests params\n request_url = 'https://api.mailgun.net/v3/sandbox5e559bf297f8421bace259e0f7021069.mailgun.org/messages'.format(domain)\n payload = {\n 'from': 'Jokes Digest <digest@jokes.com>',\n 'to': 'pquadro@gmail.com',\n 'subject': 'Top posts this week',\n 'html': html,\n }\n\n try:\n r = requests.post(request_url, auth=('api', key), data=payload)\n r.raise_for_status()\n print('Success!')\n except HTTPError as e:\n print(f'Error {e.response.status_code}')", "def send_mail(subject, content, mail_to):\r\n D = download.Download(num_retries=2, read_cache=False, write_cache=False)\r\n url = 'http://kpapi.sinaapp.com/send-email/?subject=%(subject)s&content=%(content)s&mailto=%(mailto)s&sc=%(sc)s'\r\n post_data = {}\r\n post_data['subject'] = subject\r\n post_data['content'] = content\r\n post_data['mailto'] = mail_to\r\n post_data['sc'] = config.MAIL_API_SECURITY_CODE\r\n D.get(url='http://kpapi.sinaapp.com/send-email/', data=post_data)", "def email_body_beta_email(url):\n\tmsg = '<table cellspacing=\"0\" cellpadding=\"0\" width=\"100%\" bgcolor=\"#ebebeb\"><tbody><tr><td align=\"center\" valign=\"top\"></td></tr></tbody></table>'\n\tmsg = msg + '<table cellspacing=\"0\" cellpadding=\"0\" width=\"100%\" bgcolor=\"#ebebeb\"><tbody><tr>'\n\tmsg = msg + '<table style=\"border-left: 2px solid #e6e6e6; border-right: 2px solid #e6e6e6; border-top: 2px solid #e6e6e6\" cellspacing=\"0\" cellpadding=\"10\" width=\"600\">'\n\tmsg = msg + '<tbody>'\n\n\tmsg = msg + '\\t<tr><td style=\"background-color: #ffffff; border-top: 0px solid #333333; border-bottom: 10px solid #FFFFFF; padding-top:35px\" align=\"center\" valign=\"middle\">'\n\tmsg = msg + '\\t\\t<a href=\"https://insprite.co\"><img src=\"http://ryanfbaker.com/insprite/inspriteLogoB.png\" border=\"0\" alt=\"Insprite\" align=\"center\" width=\"200px\" height=\"55px\" /></a>'\n\tmsg = msg + '\\t</td></tr>'\n\tmsg = msg + '</tbody>'\n\tmsg = msg + '</table>'\n\n\tmsg = msg + '<table style=\"border-left: 2px solid #e6e6e6; border-right: 2px solid #e6e6e6;\" cellspacing=\"0\" cellpadding=\"0\" width=\"600\">'\n\tmsg = msg + '\\t<tr><td style=\"background-color: #ffffff; border-top: 0px solid #333333; border-bottom: 10px solid #FFFFFF;\" align=\"center\" valign=\"middle\">'\n\tmsg = msg + '\\t\\t<img src=\"http://ryanfbaker.com/insprite/spacer-1.png\">'\n\tmsg = msg + '\\t</td></tr>'\n\tmsg = msg + '</table>'\n\n\tmsg = msg + '<td style=\"background-color: #ffffff; border-top: 0px solid #333333; border-bottom: 10px solid #FFFFFF;padding-top:50px;\" align=\"left\" valign=\"top\">'\n\tmsg = msg + '<font style=\"font-family:Helvetica Neue;color:#555555;font-size:14px;\">Thanks for signing up for Insprite! We are excited that you\\'re interested in what we are doing over here. We are creating Insprite to be a vibrant, friendly community where you can both learn from creative people in your area, and teach your passions to others. We sincerely hope that you will be a part of it!'\n\tmsg = msg + '<br><br>We\\'re currently in the process of finishing up Insprite... and we\\'re nearly there. We\\'re just adding some bells and whistles so it\\'ll be the best possible experience.<br><br>'\n\tmsg = msg + 'We will be in touch when we\\'re ready to launch&mdash;tentatively in late 2014. We can\\'t wait to show you what we\\'ve been working on. You\\'re going to love it.<br><br>'\n\tmsg = msg + 'In the meantime, feel free to drop us a line, or follow us on our <a href=\"#\" style=\"color:#1488CC\">Blog</a>, where we will post lots of cool bloggy things (no, really, we\\'re gonna try and keep it interesting).<br><br>'\n\tmsg = msg + '<br>Spritely yours,<br>'\n\tmsg = msg + 'The Insprite Gang </font>'\n\n\tmsg = msg + '<table style=\"border-left: 2px solid #e6e6e6; border-right: 2px solid #e6e6e6;\" cellspacing=\"0\" cellpadding=\"0\" width=\"600\">'\n\tmsg = msg + '\\t<tr><td style=\"background-color: #ffffff; border-top: 0px solid #333333; border-bottom: 5px solid #FFFFFF;\" align=\"center\" valign=\"middle\">'\n\tmsg = msg + '\\t\\t<img src=\"http://ryanfbaker.com/insprite/facebookIcon.png\">'\n\tmsg = msg + '\\t\\t<img src=\"http://ryanfbaker.com/insprite/twitterIcon.png\">'\n\tmsg = msg + '\\t\\t<img src=\"http://ryanfbaker.com/insprite/instagramIcon.png\">'\n\tmsg = msg + '\\t</td></tr>'\n\tmsg = msg + '</table>'\n\n\tmsg = msg + '<table style=\"border-left: 2px solid #e6e6e6; border-right: 2px solid #e6e6e6;\" cellspacing=\"0\" cellpadding=\"0\" width=\"600\">'\n\tmsg = msg + '\\t<tr><td style=\"background-color: #ffffff; border-top: 0px solid #333333; border-bottom: 5px solid #FFFFFF;\" align=\"center\" valign=\"middle\">'\n\tmsg = msg + '\\t\\t<img src=\"http://ryanfbaker.com/insprite/spacer-2.png\">'\n\tmsg = msg + '\\t</td></tr>'\n\tmsg = msg + '</table>'", "def openemail(event):\n import webbrowser\n webbrowser.open(emailurl)\n close(event)", "def send_email(self, context, email_payload):\n email_entity = PostmarkSendEmail(**email_payload)\n data ={\n \"From\":email_entity.from_email_address,\n \"To\":email_entity.to_email_address,\n \"Subject\":email_entity.email_subject,\n \"CC\":email_entity.cc_email_address,\n \"BCC\":email_entity.bcc_email_address\n }\n if email_entity.text_email_body:\n data[\"TextBody\"] = email_entity.text_email_body\n if email_entity.html_email_body:\n data[\"HtmlBody\"] = email_entity.html_email_body\n response = util.rest(\"POST\",\"email\",data,context[\"headers\"][\"server_token\"])\n return json.loads(response)", "def send_submission(url, payload, pre_submission, row_id, survey_dict):\n response = post(url, data=payload, cookies=pre_submission.cookies)\n filename = '%s/submissions/%s--%s.html' % (\n survey_dict['name'], survey_dict['filename_prefix'], row_id)\n database.save_file(response.content, name=filename)\n tree = html.fromstring(response.content)\n success_response = get_success_response(tree)\n return success_response", "def test_email_template_post(self):\n pass", "def submitRequest(url):\n\n h = httplib2.Http()\n return h.request(url, 'GET')[1]", "def post(self):\n global main_email_handler\n global validator\n try:\n response = AjaxResponse()\n user_id = self.get_current_user()\n\n data = tornado.escape.json_decode(\n self.request.body)\n\n to_addr = data.get('to', None)\n cc_addr = data.get('cc', None)\n bcc_addr = data.get('bcc', None)\n topic = data.get('subject', None)\n text = data.get('text', None)\n\n valid, message = validator.is_email_request_valid(\n to_addr, cc_addr, bcc_addr, topic, text)\n\n if not valid:\n response.add_code(config.RESPONSE_ERROR)\n response.add_msg(message)\n return\n yield\n\n # valid, conversion safe, make sure no duplicates\n if cc_addr and len(cc_addr):\n cc_addr = [str(x) for x in set(cc_addr)]\n if bcc_addr and len(bcc_addr):\n bcc_addr = [str(x) for x in set(cc_addr)]\n\n cb_result = yield tornado.gen.Task(\n main_email_handler.send_email,\n to_addr, cc_addr, bcc_addr,\n topic, text, user_id\n )\n # cb_result[0] - args, cb_result[1] - kwargs\n # http://www.tornadoweb.org/en/stable/gen.html#tornado.gen.Arguments\n status, handler_id, external_id = cb_result[0][0], cb_result[0][1], cb_result[0][2]\n\n main_logger.debug(\n \"Email %s to %s by user %s sent with status %s.\"\n % (topic, to_addr, user_id, status))\n\n if status == config.SEND_STATUS.FAILED:\n response.add_code(config.RESPONSE_ERROR)\n else:\n response.add_code(config.RESPONSE_OK)\n response.add_field('id', \"%s:%s\" % (handler_id, external_id))\n response.add_field('send_status', status.name)\n\n except Exception, e:\n main_logger.exception(e)\n response.add_code(config.RESPONSE_ERROR)\n finally:\n json_ = tornado.escape.json_encode(response.get())\n self.write(json_)\n self.finish()", "def sendTheDamnEmail(f):\n \n subject = f[\"subject\"].value\n toEmails = f[\"toEmail\"].value\n msg = f[\"msg\"].value\n \n #try:\n #mimeMsg = MIMEText(msg, \"plain\", \"utf-8\")\n #mimeMsg['Subject'] = subject\n #mimeMsg['From'] = fromEmail\n #mimeMsg['To'] = toEmails\n \n mimeMsg = MIMEMultipart('alternative')\n mimeMsg['Subject'] = Header(subject, 'UTF-8').encode()\n mimeMsg['To'] = Header(toEmails, 'UTF-8').encode()\n mimeMsg['From'] = Header(fromEmail, 'UTF-8').encode()\n\t\n part1 = MIMEText(msg, 'plain', \"utf-8\")\n #part2 = MIMEText(msg, 'html') # If you want to send a fancy HTML email, use this one also\n\t\n mimeMsg.attach(part1)\n\n sendEmail.sendEmail(fromEmail, password, toEmails,\\\n smtp, port=port, msg=mimeMsg)\n\n if logPath!=\"null\":\n logger = logEmail.EmailLogger(logPath)\n stored = logger.storePost(ip, msg, toEmails)\n\tprint \"stored\"\n print \"success\"", "def email_body_recover_your_password(url):\n\tmsg = '<table cellspacing=\"0\" cellpadding=\"0\" width=\"100%\" bgcolor=\"#ebebeb\"><tbody><tr><td align=\"center\" valign=\"top\"></td></tr></tbody></table>'\n\tmsg = msg + '<table cellspacing=\"0\" cellpadding=\"0\" width=\"100%\" bgcolor=\"#ebebeb\"><tbody><tr>'\n\tmsg = msg + '<table style=\"border-left: 2px solid #e6e6e6; border-right: 2px solid #e6e6e6; border-top: 2px solid #e6e6e6\" cellspacing=\"0\" cellpadding=\"10\" width=\"600\">'\n\tmsg = msg + '<tbody>'\n\n\tmsg = msg + '<tr><td style=\"background-color: #ffffff; border-top: 0px solid #333333; border-bottom: 10px solid #FFFFFF; padding-top:35px\" align=\"center\" valign=\"middle\">'\n\tmsg = msg + '<a href=\"https://insprite.co\"><img src=\"http://ryanfbaker.com/insprite/inspriteLogoB.png\" border=\"0\" alt=\"Insprite\" align=\"center\" width=\"200px\" height=\"55px\" /></a>'\n\tmsg = msg + '</td></tr>'\n\tmsg = msg + '</tbody>'\n\tmsg = msg + '</table>'\n\n\tmsg = msg + '<table style=\"border-left: 2px solid #e6e6e6; border-right: 2px solid #e6e6e6;\" cellspacing=\"0\" cellpadding=\"0\" width=\"600\">'\n\tmsg = msg + '<tr><td style=\"background-color: #ffffff; border-top: 0px solid #333333; border-bottom: 10px solid #FFFFFF;\" align=\"center\" valign=\"middle\">'\n\tmsg = msg + '<img src=\"http://ryanfbaker.com/insprite/spacer-1.png\">'\n\tmsg = msg + '</td></tr>'\n\tmsg = msg + '</table>'\n\n\tmsg = msg + '<table style=\"border-left: 2px solid #e6e6e6; border-right: 2px solid #e6e6e6;\" cellspacing=\"0\" cellpadding=\"110\" width=\"600\" height=\"350\">'\n\tmsg = msg + '<tr><td style=\"background-color: #ffffff; border-top: 0px solid #333333; border-bottom: 10px solid #FFFFFF;padding-top:50px;\" align=\"left\" valign=\"top\">'\n\tmsg = msg + '<font style=\"font-family:Helvetica Neue;color:#555555;font-size:16px;\">We get it&mdash;strong passwords can be tough to remember.<br><br>'\n\tmsg = msg + 'No biggie, simply <a href=\\\"' + url + '\\\" style=\"color:#1488CC\">follow the instructions to change it.</a> and you\\'ll be good to go.<br><br>'\n\tmsg = msg + 'Didn\\'t request for a password reset? <a href=\"mailto:thegang@insprite.co\" style=\"color:#1488CC\">Give us a holler ASAP</a>.</font>'\n\tmsg = msg + '</td></tr>'\n\tmsg = msg + '</table>'\n\n\tmsg = msg + '<table style=\"border-left: 2px solid #e6e6e6; border-right: 2px solid #e6e6e6;\" cellspacing=\"0\" cellpadding=\"0\" width=\"600\">'\n\tmsg = msg + '<tr><td style=\"background-color: #ffffff; border-top: 0px solid #333333; border-bottom: 5px solid #FFFFFF;\" align=\"center\" valign=\"middle\">'\n\tmsg = msg + '<img src=\"http://ryanfbaker.com/insprite/facebookIcon.png\">'\n\tmsg = msg + '<img src=\"http://ryanfbaker.com/insprite/twitterIcon.png\">'\n\tmsg = msg + '<img src=\"http://ryanfbaker.com/insprite/instagramIcon.png\">'\n\tmsg = msg + '</td></tr>'\n\tmsg = msg + '</table>'\n\n\tmsg = msg + '<table style=\"border-left: 2px solid #e6e6e6; border-right: 2px solid #e6e6e6;\" cellspacing=\"0\" cellpadding=\"0\" width=\"600\">'\n\tmsg = msg + '<tr><td style=\"background-color: #ffffff; border-top: 0px solid #333333; border-bottom: 5px solid #FFFFFF;\" align=\"center\" valign=\"middle\">'\n\tmsg = msg + '<img src=\"http://ryanfbaker.com/insprite/spacer-2.png\">'\n\tmsg = msg + '</td></tr>'\n\tmsg = msg + '</table>'\n\n\n\tmsg = msg + '<table style=\"border-left: 2px solid #e6e6e6; border-right: 2px solid #e6e6e6;\" cellspacing=\"0\" cellpadding=\"0\" width=\"600\">'\n\tmsg = msg + '<tr><td style=\"background-color: #ffffff; border-top: 0px solid #333333; border-bottom: 10px solid #FFFFFF;\" align=\"center\" valign=\"middle\">'\n\tmsg = msg + '<font style=\"font-family:Helvetica Neue;color:#555555;font-size:10px;\"> <a href=\"mailto:thegang@insprite.co\" style=\"color:#1488CC\">Contact Us</a>'\n\tmsg = msg + '| Sent by <a href=\\\"https://insprite.co\\\">Insprite</a>, California, USA. | <a href=\"#\" style=\"color:#1488CC\">Unsubscribe</a></font>'\n\tmsg = msg + '</td></tr>'\n\tmsg = msg + '</table>'\n\n\tmsg = msg + '<table style=\"border-left: 2px solid #e6e6e6; border-right: 2px solid #e6e6e6;\" cellspacing=\"0\" cellpadding=\"0\" width=\"600\">'\n\tmsg = msg + '<tr> <td style=\"border-top: 0px solid #333333; border-bottom: 0px solid #FFFFFF;\">'\n\tmsg = msg + '<img width=\"596px\" src=\"http://ryanfbaker.com/insprite/footerImage.png\">'\n\tmsg = msg + '</td></tr>'\n\tmsg = msg + '</table>'\n\treturn msg", "def test_receive_url_post(self):\n body = Body()\n response = self.client.open(\n '/apis/puzderd1/MattermostChatbot/1.0.0/receive_url',\n method='POST',\n data=json.dumps(body),\n content_type='application/json')\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def post_requests():", "def send_mailshot(mailshot_data):\n\n url = settings.mailer_endpoint\n headers = {'Content-Type': 'application/json'}\n response = requests.post(url, headers=headers, data=mailshot_data)", "def post_form(url, headers, payload):\n\n headers['Content-Type'] = 'application/x-www-form-urlencoded'\n\n return RestClient.make_post_request(url, headers=headers, data=payload)", "def submit(self):\n try:\n post = PostUrl(self.submit_url, self.getParams())\n except:\n print 'The form post failed.'\n return None\n return post", "def send_reminder(self, url):\n variables = {\"url\": url, \"username\": self.contact.user.alias}\n send_template_email(recipients=[self.identifier],\n subject=\"Reminder from Rmnd.in!\",\n from_address=\"reminders@rmnd.in\",\n variables=variables,\n template=\"email/reminder_email\")" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return stock move name by type.
def next_move(ttype): count = db.session.query(StockMove.id).count() + 1 return str('SO/' if ttype =='sale' else 'PO/') + str(count)
[ "def typeToName(type: int) -> unicode:\n ...", "def _get_type_name(ot):\n if hasattr(ot, \"_name\") and ot._name:\n return ot._name\n elif hasattr(ot, \"__name__\") and ot.__name__:\n return ot.__name__\n else:\n return str(ot)", "def getEventTypeNameFromEnum(self, type_):\r\n fn = self.function_table.getEventTypeNameFromEnum\r\n result = fn(type_)\r\n return result.decode('utf-8')", "def onchange_move_type(self):\n if not self.location_id and not self.location_dest_id:\n location_source_id = \"stock_location_stock\"\n location_dest_id = \"stock_location_stock\"\n if self.picking_type_id and self.picking_type_id.code == \"incoming\":\n location_source_id = \"stock_location_suppliers\"\n location_dest_id = \"stock_location_stock\"\n elif self.picking_type_id and self.picking_type_id.code == \"outgoing\":\n location_source_id = \"stock_location_stock\"\n location_dest_id = \"stock_location_customers\"\n source_location = self.env.ref(\"stock.%s\" % location_source_id)\n dest_location = self.env.ref(\"stock.%s\" % location_dest_id)\n self.location_id = source_location and source_location[1] or False\n self.location_dest_id = dest_location and dest_location[1] or False", "def move_names(self) -> list:\n return [move.name if isinstance(move, PokemonMove)\n else move[0]\n for move in self.moves]", "def onchange_move_type(self, cr, uid, ids, type, context=None):\n if context is None:\n context = {}\n location_id = False\n location_dest_id = False\n if context.get('location_id') or context.get('location_dest_id'):\n location_id = context.get('location_id')\n location_dest_id = context.get('location_dest_id')\n return {\n 'value': {\n 'location_id': location_id or self._get_default_location(cr, uid, field='location_id', context=context),\n 'location_dest_id': location_dest_id or self._get_default_location(cr, uid, field='location_dest_id', context=context)}\n }\n elif context.get('picking_id'):\n return {\n 'value': {\n 'location_id': self._get_default_location(cr, uid, field='location_id', context=context),\n 'location_dest_id': self._get_default_location(cr, uid, field='location_dest_id', context=context)}\n }\n else:\n return super(stock_move, self).onchange_move_type(cr, uid, ids, type, context=context)\n return {'value':{'location_id': source_location and source_location[1] or False, 'location_dest_id': dest_location and dest_location[1] or False}}", "def filterToName(type: int) -> unicode:\n ...", "def get_move(self) -> str:\n\n return self.moves.pop(0)", "def _get_type_name(self, st_type):\n if st_type <= 2045: return 'str' + str(st_type)\n return self._type_names[st_type]", "def get_units_title(unit_type):\n units_title = \"m\"\n if unit_type == 'english':\n units_title = \"ft\"\n return units_title", "def _ros2_type_to_type_name(ros2_type):\n try:\n first_dot = ros2_type.__module__.find(\".\")\n return ros2_type[0:first_dot] + \"/\" + ros2_type.__name__\n except:\n # this shouldn't happen but try harder, don't crash the robot for something silly like this\n return str(ros2_type).replace(\"<class '\", \"\").replace(\"'>\", \"\")", "def getNameFromType(self, *args):\n return _libsbml.ASTBasePlugin_getNameFromType(self, *args)", "def get_by_move_type(character: dict, move_type: str) -> list:\n\n move_json = get_character_movelist(character)\n moves = list(filter(lambda x: (move_type in x[\"Tags\"]), move_json))\n\n if moves:\n move_list = []\n for move in moves:\n move_list.append(move['Command'])\n return list(set(move_list))\n else:\n return []", "def str_to_move(self, move: str) -> str:\n return move", "def get_ctor(piece_type_str: str):\n if piece_type_str == \"PAWN\":\n return Pawn\n if piece_type_str == \"ROOK\":\n return Rook\n if piece_type_str == \"HORSE\":\n return Horse\n if piece_type_str == \"BISHOP\":\n return Bishop\n if piece_type_str == \"KING\":\n return King\n if piece_type_str == \"QUEEN\":\n return Queen", "def _get_type_name(self):\n return self._type_name", "def event_type_name(self, event_type):\n return irfman.IrfManager.event_type_names[event_type]", "def getLoadCommentTypeName(type: int) -> unicode:\n ...", "def get_unit_name(class_name, unit_type='JMU'):\r\n if unit_type == 'JMU':\r\n return class_name.replace('.','_')+'.jmu' \r\n elif unit_type == 'FMU':\r\n return class_name.replace('.','_')+'.fmu' \r\n elif unit_type == 'FMUX':\r\n return class_name.replace('.','_')+'.fmux'\r\n else:\r\n raise Exception(\"The unit type %s is unknown\" %unit_type)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Load and return the vowel training dataset. Returns (X_train, X_test, y_train, y_test) Tuple A tuple of data and target
def load_vowel(): train = _load_vowel_train() test = _load_vowel_test() return (train[0], train[1].reshape(-1, 1), test[0], test[1].reshape(-1, 1))
[ "def _load_vowel_test():\n vowel_data = np.loadtxt(_VOWEL_TEST_PATH, delimiter=',', skiprows=1)\n X = vowel_data[:, -10:]\n y = vowel_data[:, 1].astype(int)\n return (X, y)", "def load_data():\n\n contents = []\n with open('train.csv') as csv_file:\n csv_reader = csv.reader(csv_file, delimiter=',',)\n next(csv_reader)\n for row in csv_reader:\n contents += [row]\n\n cont_np = np.asarray(contents, dtype=np.float64)\n train_x = cont_np[:, :-1]\n train_y = cont_np[:, -1]\n\n contents = []\n with open('test.csv') as csv_file:\n csv_reader = csv.reader(csv_file, delimiter=',',)\n next(csv_reader)\n for row in csv_reader:\n contents += [row]\n\n test_x = np.asarray(contents, dtype=np.float64)\n\n return train_x, train_y, test_x", "def load_data(): \n\tdf = pandas.read_csv('data/iris.data', header=None)\n\ty = df.iloc[0:df.shape[0], 4].values\n\ty = np.where(y == 'Iris-setosa', 0, y)\n\ty = np.where(y == 'Iris-versicolor', 1, y)\n\ty = np.where(y == 'Iris-virginica', 2, y)\n\tx = df.iloc[0:df.shape[0], 0:4].values\n\tx = tuple(x)\n\ty = tuple(y)\n\ttraining_inputs = x[0:40] + x[50:90] + x[100:140]\n\ttraining_results = y[0:40] + y[50:90] + y[100:140]\n\ttraining_data = (training_inputs, training_results)\n\ttest_inputs = x[40:50] + x[90:100] + x[140:150]\n\ttest_results = y[40:50] + y[90:1000] + y[140:150]\n\ttest_data = (test_inputs, test_results)\n\treturn (training_data, test_data)", "def load(self):\n with open('test.csv') as csvfile:\n readCSV = csv.reader(csvfile, delimiter='\\t')\n all_rows = list(readCSV)\n for i in all_rows:\n if i == 28823:\n print(all_rows[i])\n X_test = [a[0] for a in all_rows]\n y_test = [a[1] for a in all_rows]\n\n with open('train.csv', encoding=\"utf8\") as csvfile:\n readCSV = csv.reader(csvfile, delimiter='\\t')\n all_rows = list(readCSV)\n X_train = [a[0] for a in all_rows]\n y_train = [a[1] for a in all_rows]\n return X_test, y_test, X_train, y_train", "def generate_train_test(self):\n x, y = self.read_data()\n x_train, y_train, x_test, y_test = self.sample_data(x, y)\n self.train = (x_train, y_train)\n self.test = (x_test, y_test)", "def convert_data_to_examples(train, test, data_column, label_column):\r\n train_InputExamples = train.apply(lambda x: InputExample(guid=None,\r\n text_a = x[data_column],\r\n text_b = None,\r\n label = x[label_column]), axis = 1)\r\n\r\n\r\n validation_InputExamples = test.apply(lambda x: InputExample(guid=None,\r\n text_a = x[data_column],\r\n text_b = None,\r\n label = x[label_column]), axis = 1)\r\n\r\n return train_InputExamples, validation_InputExamples", "def load_data():\n # Load and preprocess data\n sentences, labels = load_data_and_labels()\n sentences_padded = pad_sentences(sentences)\n vocabulary, vocabulary_inv = build_vocab(sentences_padded)\n x, y = build_input_data(sentences_padded, labels, vocabulary)\n return [x, y, vocabulary, vocabulary_inv]", "def load_dataset():\n\n train_dataset = h5py.File('datasets/train_catvnoncat.h5', \"r\")\n train_set_x_orig = np.array(train_dataset[\"train_set_x\"][:]) # your train set features\n train_set_y_orig = np.array(train_dataset[\"train_set_y\"][:]) # your train set labels\n\n test_dataset = h5py.File('datasets/test_catvnoncat.h5', \"r\")\n test_set_x_orig = np.array(test_dataset[\"test_set_x\"][:]) # your test set features\n test_set_y_orig = np.array(test_dataset[\"test_set_y\"][:]) # your test set labels\n\n classes = np.array(test_dataset[\"list_classes\"][:]) # the list of classes\n \n train_set_y_orig = train_set_y_orig.reshape((1, train_set_y_orig.shape[0]))\n test_set_y_orig = test_set_y_orig.reshape((1, test_set_y_orig.shape[0]))\n \n return train_set_x_orig, train_set_y_orig, test_set_x_orig, test_set_y_orig, classes", "def readUNetData(save_path='../img/unet/'):\n # Read Training data\n train_x, train_y = readDataSingleFolder(save_path + 'train/')\n\n # Read Testing data\n test_x, test_y = readDataSingleFolder(save_path + 'test/')\n\n return (train_x, train_y), (test_x, test_y)", "def load_data():\n df_raw_train_data = pd.read_csv(\"../data/train_users_2.csv\")\n df_test = pd.read_csv(\"../data/test_users.csv\")\n target = df_raw_train_data['country_destination']\n df_train = df_raw_train_data.drop(['country_destination'], axis=1)\n return (df_train, target, df_test)", "def load_data():\n global TEST_SET, DATA_DIM, CLASSES\n\n train_x_ori, train_y, test_x_ori, test_y, classes = \\\n load_data_sets()\n m_test = test_x_ori.shape[0]\n num_px = train_x_ori.shape[1]\n\n # 定义纬度\n DATA_DIM = num_px * num_px * 3\n\n # 展开数据\n test_x_flatten = test_x_ori.reshape(m_test, -1)\n\n # 归一化数据\n test_x = test_x_flatten / 255.\n\n TEST_SET = np.hstack((test_x, test_y.T))\n\n CLASSES = classes", "def load_train_data():\r\n X_train = np.load('data/train/X_train.npy')\r\n scaling_train = np.load('data/train/scaling_train.npy')\r\n ids_train = np.load('data/train/ids_train.npy')\r\n y_train = np.load('data/train/y_train.npy')\r\n\r\n seed = np.random.randint(1, 10e6)\r\n np.random.seed(seed)\r\n np.random.shuffle(X_train)\r\n np.random.seed(seed)\r\n np.random.shuffle(scaling_train)\r\n np.random.seed(seed)\r\n np.random.shuffle(ids_train)\r\n np.random.seed(seed)\r\n np.random.shuffle(y_train)\r\n\r\n return X_train, scaling_train, ids_train, y_train", "def load_test_data():\n\n images, cls = _load_data(filename=\"test_batch\")\n\n return images, cls, one_hot_encoded(class_numbers=cls, num_classes=num_classes)", "def loadTrainingData(self, fname, ename, delim):\n\n try:\n self.trainingData = np.loadtxt(fname, delimiter=delim)\n trainingDataExpected = np.loadtxt(ename, delimiter=delim)\n \n # Combine the data together into 1 array.\n self.combineData(trainingDataExpected)\n except:\n sys.stderr.write(\"ERROR: Issue loading training data, please double check file name and delimiter\")", "def learn_vowels(self, data=None):\n #pdb.set_trace()\n if not data:\n data = self.memory\n # find acoustic prototypes by clustering over stored acoustic reps\n raw_data = data.reshape(4 * len(self.stems), 2)\n ac_vowels, ac_spread = vq.kmeans(raw_data, 4)\n # find articulatory reps by comparing synthesized output vowels to\n # acoustic prototypes\n # start with candidate list of \"all possible\" articulations\n tmp_ar = N.empty((1, 3))\n rd = 0.0\n for hi in [0.0, 1.0]:\n for bk in [0.0, 1.0]:\n tmp_ar = N.vstack((tmp_ar, N.array([hi, bk, rd])))\n tmp_ar = tmp_ar[1:]\n while len(self.vowel_map) < 4:\n # no noise (since this shouldn't be running through the \"mouth\")\n tmp_ac = self.perceive(self.acoustify(tmp_ar))\n for v in ac_vowels:\n dists = N.sqrt(N.sum((v - tmp_ac)**2, axis=1))\n d = 0\n while True:\n if dists[d] < (2 * ac_spread):\n # found an articulatory prototype\n self.vowel_map[tuple(v)] = tmp_ar[d]\n # remove it from the candidate list\n tmp_ar = N.vstack((tmp_ar[:d], tmp_ar[d + 1:]))\n tmp_ac = N.vstack((tmp_ac[:d], tmp_ac[d + 1:]))\n break\n d += 1\n if d == len(dists):\n # take the best of the bad ones\n index = N.argmin(dists)\n self.vowel_map[tuple(v)] = tmp_ar[index]\n break\n self.vowel_spread = ac_spread\n return self.vowel_map", "def load_data():\n\n print('Loading and Visualizing Data ...')\n\n file_name = path.join(getcwd(), 'ex3', 'src', 'data', 'ex3data1')\n data = scipy.io.loadmat(file_name)\n\n # training data stored in arrays X, y\n # y should be a row vector of labels\n return data['X'], data['y'].T[0]", "def letter_recognition_data():\n # Fetch the data from the file\n with open(os.path.join(os.path.dirname(__file__),\n './data/letter_recognition.data.txt'),\n newline='') as data_file:\n data_reader = csv.reader(data_file, delimiter=',', quotechar='|')\n data = [row for row in data_reader]\n data_file.close()\n\n # Pull the inputs and target outputs out of the data\n examples = []\n for row in data:\n # first column contains the target output\n input_vector = [int(x) for x in row[1:]]\n character = row[0].lower() # this is a letter of alphabet\n number = ord(character) - 96 # ascii - 96 = letter number\n target_vector = [0] * 26 # 26 letters\n target_vector[number-1] = 1 # set to 1\n examples.append([input_vector,target_vector])\n\n # # train on first 16000\n # training_inputs = inputs[:16000]\n # training_target_outputs = target_outputs[:16000]\n # testing_inputs = inputs[16000:]\n # testing_target_outputs = target_outputs[16000:]\n\n # train on first 500\n training_examples = examples[:16000]\n testing_examples = examples[16000:18000]\n\n return training_examples, testing_examples", "def load_data(self):\n\n train_data, eval_data = DataHelper(self.config.train_data_path,\n self.config.eval_data_path,\n self.config.output_path,\n self.config.max_sequence_length).gen_data()\n return train_data, eval_data", "def test_dataset_from_file(train_dataset):\n dummy = \"justo. Praesent luctus. Curabitur egestas nunc sed libero. Proin sed\"\n assert train_dataset[0][0] == dummy\n assert train_dataset[0][1] == '6'" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Load and return the vowel testing dataset. Returns (X, y) Tuple A tuple of data and target
def _load_vowel_test(): vowel_data = np.loadtxt(_VOWEL_TEST_PATH, delimiter=',', skiprows=1) X = vowel_data[:, -10:] y = vowel_data[:, 1].astype(int) return (X, y)
[ "def load_vowel():\n train = _load_vowel_train()\n test = _load_vowel_test()\n return (train[0], train[1].reshape(-1, 1), test[0], test[1].reshape(-1, 1))", "def learn_vowels(self, data=None):\n #pdb.set_trace()\n if not data:\n data = self.memory\n # find acoustic prototypes by clustering over stored acoustic reps\n raw_data = data.reshape(4 * len(self.stems), 2)\n ac_vowels, ac_spread = vq.kmeans(raw_data, 4)\n # find articulatory reps by comparing synthesized output vowels to\n # acoustic prototypes\n # start with candidate list of \"all possible\" articulations\n tmp_ar = N.empty((1, 3))\n rd = 0.0\n for hi in [0.0, 1.0]:\n for bk in [0.0, 1.0]:\n tmp_ar = N.vstack((tmp_ar, N.array([hi, bk, rd])))\n tmp_ar = tmp_ar[1:]\n while len(self.vowel_map) < 4:\n # no noise (since this shouldn't be running through the \"mouth\")\n tmp_ac = self.perceive(self.acoustify(tmp_ar))\n for v in ac_vowels:\n dists = N.sqrt(N.sum((v - tmp_ac)**2, axis=1))\n d = 0\n while True:\n if dists[d] < (2 * ac_spread):\n # found an articulatory prototype\n self.vowel_map[tuple(v)] = tmp_ar[d]\n # remove it from the candidate list\n tmp_ar = N.vstack((tmp_ar[:d], tmp_ar[d + 1:]))\n tmp_ac = N.vstack((tmp_ac[:d], tmp_ac[d + 1:]))\n break\n d += 1\n if d == len(dists):\n # take the best of the bad ones\n index = N.argmin(dists)\n self.vowel_map[tuple(v)] = tmp_ar[index]\n break\n self.vowel_spread = ac_spread\n return self.vowel_map", "def test_load_UCR_UEA_dataset():\n X, y = load_UCR_UEA_dataset(name=\"UnitTest\")\n assert isinstance(X, pd.DataFrame) and isinstance(y, np.ndarray)\n assert X.shape == (42, 1) and y.shape == (42,)", "def generate_vowel():\n return random.sample(['a', 'e', 'i', 'o', 'u', 'y'], 1)", "def test_load_data_ORL():\n x, y = load_data('data/ORL')\n assert x.shape[0] == 400\n assert y.shape == (400,)", "def load_japanese_vowels(split=None, return_X_y=True):\n name = \"JapaneseVowels\"\n return _load_provided_dataset(name, split, return_X_y)", "def load_data_test(self):\n data_set = list(open(self.DATA_DIR + 'TREC_10.label', encoding='utf-8', errors='replace').readlines())\n data_set_cleaned = [self.clean_str(sent) for sent in data_set]\n Y_Test = [s.split(' ')[0].split(':')[0] for s in data_set_cleaned]\n X_Test = [s.split(\" \")[1:] for s in data_set_cleaned]\n return X_Test, Y_Test", "def letter_recognition_data():\n # Fetch the data from the file\n with open(os.path.join(os.path.dirname(__file__),\n './data/letter_recognition.data.txt'),\n newline='') as data_file:\n data_reader = csv.reader(data_file, delimiter=',', quotechar='|')\n data = [row for row in data_reader]\n data_file.close()\n\n # Pull the inputs and target outputs out of the data\n examples = []\n for row in data:\n # first column contains the target output\n input_vector = [int(x) for x in row[1:]]\n character = row[0].lower() # this is a letter of alphabet\n number = ord(character) - 96 # ascii - 96 = letter number\n target_vector = [0] * 26 # 26 letters\n target_vector[number-1] = 1 # set to 1\n examples.append([input_vector,target_vector])\n\n # # train on first 16000\n # training_inputs = inputs[:16000]\n # training_target_outputs = target_outputs[:16000]\n # testing_inputs = inputs[16000:]\n # testing_target_outputs = target_outputs[16000:]\n\n # train on first 500\n training_examples = examples[:16000]\n testing_examples = examples[16000:18000]\n\n return training_examples, testing_examples", "def getVowels():\n return ['a', 'e', 'i', 'o', 'u']", "def load_test_data():\r\n X_test = np.load('data/test/X_test.npy')\r\n scaling_test = np.load('data/test/scaling_test.npy')\r\n ids_test = np.load('data/test/ids_test.npy')\r\n y_test = np.load('data/test/y_test.npy')\r\n\r\n seed = np.random.randint(1, 10e6)\r\n np.random.seed(seed)\r\n np.random.shuffle(X_test)\r\n np.random.seed(seed)\r\n np.random.shuffle(scaling_test)\r\n np.random.seed(seed)\r\n np.random.shuffle(ids_test)\r\n np.random.seed(seed)\r\n np.random.shuffle(y_test)\r\n\r\n return X_test, scaling_test, ids_test, y_test", "def load_test_data():\n print(\"Loading test data...\")\n parser = MyHTMLParser()\n parser.feed(codecs.open(test_data_with_label, \"r\", \"utf-8\").read())\n test_list = data_list[1:]\n data_list.clear()\n label = np.array(label_list)\n test_examples = [[item for item in jieba.cut(s, cut_all=False)] for s in test_list]\n data_analysis(test_examples, 'test')\n return test_examples, label", "def get_vowel_names():", "def convert_data_to_examples(train, test, data_column, label_column):\r\n train_InputExamples = train.apply(lambda x: InputExample(guid=None,\r\n text_a = x[data_column],\r\n text_b = None,\r\n label = x[label_column]), axis = 1)\r\n\r\n\r\n validation_InputExamples = test.apply(lambda x: InputExample(guid=None,\r\n text_a = x[data_column],\r\n text_b = None,\r\n label = x[label_column]), axis = 1)\r\n\r\n return train_InputExamples, validation_InputExamples", "def load_energy():\n data = load_data('energy')\n data._target_set = {'Y2': data.target}\n data._target_set['Y1'] = data.data[:, -1]\n data.data = data.data[:, 0:-1]\n\n data.target = lambda k: data._target_set[k]\n\n return data", "def test_dataset_from_file(train_dataset):\n dummy = \"justo. Praesent luctus. Curabitur egestas nunc sed libero. Proin sed\"\n assert train_dataset[0][0] == dummy\n assert train_dataset[0][1] == '6'", "def load_data(): \n\tdf = pandas.read_csv('data/iris.data', header=None)\n\ty = df.iloc[0:df.shape[0], 4].values\n\ty = np.where(y == 'Iris-setosa', 0, y)\n\ty = np.where(y == 'Iris-versicolor', 1, y)\n\ty = np.where(y == 'Iris-virginica', 2, y)\n\tx = df.iloc[0:df.shape[0], 0:4].values\n\tx = tuple(x)\n\ty = tuple(y)\n\ttraining_inputs = x[0:40] + x[50:90] + x[100:140]\n\ttraining_results = y[0:40] + y[50:90] + y[100:140]\n\ttraining_data = (training_inputs, training_results)\n\ttest_inputs = x[40:50] + x[90:100] + x[140:150]\n\ttest_results = y[40:50] + y[90:1000] + y[140:150]\n\ttest_data = (test_inputs, test_results)\n\treturn (training_data, test_data)", "def load_data_test(self, size, a_low, a_high=None):\n\n if a_high is None:\n a_high = self.a;\n\n data, label = self._generate_test_set(size, a_low, a_high, flip_structure=False);\n\n return data, label;", "def load_test_data():\n\n images, cls = _load_data(filename=\"test_batch\")\n\n return images, cls, one_hot_encoded(class_numbers=cls, num_classes=num_classes)", "def test_load_data():\n\t[x_train, y_train, x_test, y_test], _ = main_aqi.load_data(ROOT)\n\tassert x_train.shape == (821, 1, 38)\n\tassert y_train.shape == (821,)\n\tassert x_test.shape == (205, 1, 38)\n\tassert y_test.shape == (205,)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Load and return the breast cancer wisconsin dataset (classification). The breast cancer dataset is a classic and very easy binary classification dataset. Returns (X_train, X_test, y_train, y_test) Tuple A tuple of data and target The copy of UCI ML Breast Cancer Wisconsin (Original) dataset is
def load_breast_cancer(): bc_data_train = np.load(_BREAST_CANCER_FOLDER+'bc_data.train') bc_data_test = np.load(_BREAST_CANCER_FOLDER+'bc_data.test') bc_target_train = np.load(_BREAST_CANCER_FOLDER+'bc_target.train') bc_target_test = np.load(_BREAST_CANCER_FOLDER+'bc_target.test') for i in range(len(bc_target_test)): if bc_target_test[i] == 2: bc_target_test[i] = 0 elif bc_target_test[i] == 4: bc_target_test[i] = 1 for i in range(len(bc_target_train)): if bc_target_train[i] == 2: bc_target_train[i] = 0 elif bc_target_train[i] == 4: bc_target_train[i] = 1 return (bc_data_train, bc_target_train.reshape(-1, 1), bc_data_test, bc_target_test.reshape(-1, 1))
[ "def wisconsin_breast_cancer_data():\n\n # uncomment these lines to read directly from original file\n # data_test = pandas.read_csv('../data/breast-cancer-wisconsin.csv', header=None)\n # # replacing Y values with -1 and 1\n # data_test.loc[data_test[10] == 2, 10] = -1\n # data_test.loc[data_test[10] == 4, 10] = 1\n # data_test = data_test.convert_objects(convert_numeric=True).dropna()\n # X = data_test.ix[:, 1:9]\n # Y = data_test.ix[:, 10]\n # return np.array(X), Y[:, np.newaxis]\n data = []\n for i in range(1, 6):\n train = pandas.read_csv('data/wisconsin_cancer/train_' + str(i) + '.csv', header=None)\n test = pandas.read_csv('data/wisconsin_cancer/test_' + str(i) + '.csv', header=None)\n data.append({\n 'train_Y': train.ix[:, 0].values[:, np.newaxis],\n 'train_X': train.ix[:, 1:].values,\n 'test_Y': test.ix[:, 0].values[:, np.newaxis],\n 'test_X': test.ix[:, 1:].values,\n 'id': i\n })\n\n return data", "def load_data ():\n \n data_set = datasets.load_breast_cancer()\n return data_set", "def transform_breast_cancer(self):\n # Remove missing data points\n self.data = self.data.loc[self.data['Bare_Nuclei'] != '?']\n self.data.reset_index(inplace=True, drop=True)\n\n # We'll make a deep copy of our data set\n temp_df = pd.DataFrame.copy(self.data, deep=True)\n\n # We don't need ID so let's drop that\n temp_df.drop(columns='ID', inplace=True)\n\n # Save class column for Naive Bayes\n self.class_column = temp_df['Class']\n\n # Get dummies of the binned data\n temp_df = pd.get_dummies(temp_df, columns=['Clump_Thickness', 'Uniformity_Cell_Size', 'Uniformity_Cell_Shape',\n 'Marginal_Adhesion', 'Single_Epithelial_Cell_Size', 'Bare_Nuclei',\n 'Bland_Chromatin', 'Normal_Nucleoli', 'Mitoses', 'Class'])\n\n # Reset the index since dropping some of the data points will mess with the index\n temp_df.reset_index(inplace=True, drop=True)\n self.class_column.reset_index(inplace=True, drop=True)\n\n # We only need one target variable, the other can be dropped\n temp_df.drop(columns='Class_2', inplace=True)\n\n # Set attributes for ETL object, there are two total classes so this is a singular classifier\n self.classes = 2\n self.transformed_data = temp_df", "def load_benzene_concentration_sample():\n file = Path(__file__).parent.parent / \"data/benzene_concentration_sample.csv\"\n df = pd.read_csv(file)\n y = df[\"target\"].to_numpy()\n X = df.drop(columns=\"target\").to_numpy()\n X = np.expand_dims(X, axis=1)\n return X, y", "def get_breast_cancer_data(target=\"diagnosis\"):\n data = load_breast_cancer()\n df = pd.DataFrame(data=data.data, columns=[_.replace(\" \", \"_\") for _ in data.feature_names])\n df[target] = data.target\n return df", "def get_data():\n no_features = 30\n redundant_features = int(0.1*no_features)\n informative_features = int(0.6*no_features)\n repeated_features = int(0.1*no_features)\n x,y = make_classification(n_samples=500,n_features=no_features,flip_y=0.03,\\\n n_informative = informative_features, n_redundant = redundant_features \\\n ,n_repeated = repeated_features,random_state=7)\n return x,y", "def load_data():\n\n boston = datasets.load_boston()\n return boston", "def load_dataset():\n\n train_dataset = h5py.File('datasets/train_catvnoncat.h5', \"r\")\n train_set_x_orig = np.array(train_dataset[\"train_set_x\"][:]) # your train set features\n train_set_y_orig = np.array(train_dataset[\"train_set_y\"][:]) # your train set labels\n\n test_dataset = h5py.File('datasets/test_catvnoncat.h5', \"r\")\n test_set_x_orig = np.array(test_dataset[\"test_set_x\"][:]) # your test set features\n test_set_y_orig = np.array(test_dataset[\"test_set_y\"][:]) # your test set labels\n\n classes = np.array(test_dataset[\"list_classes\"][:]) # the list of classes\n \n train_set_y_orig = train_set_y_orig.reshape((1, train_set_y_orig.shape[0]))\n test_set_y_orig = test_set_y_orig.reshape((1, test_set_y_orig.shape[0]))\n \n return train_set_x_orig, train_set_y_orig, test_set_x_orig, test_set_y_orig, classes", "def data_set_maker():\n\n # crate a folder in your code directory and name it: \"files\". put the .npy files iside that folder\n\n x_all = np.load(path + '/files/tinyX.npy', 'r') # reads the input file\n y_all = np.load(path + '/files/tinyY.npy', 'r') # reads the input file\n\n # split the data into 10% validation-set and 90% training set\n raw_train, raw_valid, y_train, y_valid = train_test_split(x_all, y_all, test_size=0.2, random_state=43)\n return raw_train, raw_valid, y_train, y_valid", "def classifyData():\n summaries, data = loadTrainingData()\n tags = tagSentences(summaries, data)\n tuples = matchTagsWithFeatures(tags, data)\n classifier = nltk.NaiveBayesClassifier.train(tuples)\n return classifier", "def main():\n\n print('Breast Cancer')\n cancer = Org('Data/breast-cancer-wisconsin.data', [-1], -1, [-1])\n df = cancer.open()\n # ##NN(file, number hidden layers, number hidden nodes per layer)\n NeuralNet(df, 2, 13, 'classification')\n\n #print('glass')\n #glass = Org('Data/glass.data', [-1], -1, [-1])\n #df = glass.open()\n #NeuralNet(df, 2, 6, 'classification')\n\n #print('soybean')\n #soybean = Org('Data/soybean-small.data', [-1], -1, [-1])\n #df = soybean.open()\n #NeuralNet(df, 2, 11, 'classification')\n\n #print('abalone')\n #abalone = Org('Data/abalone.data', [-1], -1, [0])\n #df = abalone.open()\n #NeuralNet(df, 2, 2, 'regression')\n\n #print('machine')\n #machine = Org('Data/machine.data', [-1], -1, [-1])\n #df = machine.open()\n #NeuralNet(df, 2, 3, 'regression')\n #print(df)\n\n #print('forest')\n #forest = Org('Data/forestfires.data', [0], -1, [2,3])\n #df = forest.open()\n #NeuralNet(df, 0, 3, 'regression')", "def load_data(train_file, test_file):\n\n # load train and test data\n data_train = pd.read_csv(train_file)\n data_test = pd.read_csv(test_file)\n\n # concat and label\n data_out = pd.concat([data_train, data_test], keys=['train', 'test'])\n\n return data_out", "def get_train(self, preprocess=False):\n return self._dataset(\n 'train',\n self._directory,\n 'smallnorb-5x46789x9x18x6x2x96x96-training-dat.mat',\n 'smallnorb-5x46789x9x18x6x2x96x96-training-cat.mat',\n preprocess)", "def load_binary_imbalanced(classes=(1,7), ratio=0.1):\r\n train_set, train_set_target = load_data()\r\n \r\n # binarize\r\n mask_train_set_imb = np.logical_or(train_set_target == classes[0],train_set_target == classes[1])\r\n (data_set_imb,data_set_imb_target)= (train_set[mask_train_set_imb], train_set_target[mask_train_set_imb])\r\n\r\n # imbalance\r\n data_minority = data_set_imb[data_set_imb_target == classes[1]]\r\n data_minority_target = data_set_imb_target[data_set_imb_target == classes[1]]\r\n data_majority = data_set_imb[data_set_imb_target == classes[0]]\r\n data_majority_target = data_set_imb_target[data_set_imb_target == classes[0]]\r\n original_size = data_minority_target.shape[0]\r\n majority_size = data_majority_target.shape[0]\r\n target_size = int(np.floor(majority_size * ratio))\r\n indices = np.random.choice(original_size, size=target_size)\r\n data_minority = data_minority[indices]\r\n data_minority_target = data_minority_target[indices]\r\n\r\n # merge\r\n train_set = np.concatenate([data_minority, data_majority])\r\n train_set_target = np.concatenate([data_minority_target, data_majority_target])\r\n\r\n #shuffle\r\n train_set, train_set_target = np.hsplit(\r\n np.random.permutation(\r\n np.hstack((train_set, train_set_target.reshape((train_set_target.shape[0], 1))))\r\n ), [-1]\r\n )\r\n train_set_target = np.asarray(train_set_target, dtype='int').reshape((train_set_target.shape[0],))\r\n return (train_set[:],train_set_target[:])", "def boston():\n return BostonDataset()", "def train_model(data, target):\n\n # Using cross-validation\n # TO TRY: stratification for dividing preclassified tweets into homogenous subgroups before\n # sampling in order to improve the representativeness of the sampling\n\n train_tweets, validation_tweets, train_sentiment, validation_sentiment = cross_validation.train_test_split(data, \n target,\n test_size=0.4)\n\n \n # Fitting the Naive Bayes classifier wtih the training tweets and corresponding sentiment\n classifier = BernoulliNB().fit(train_tweets, train_sentiment)\n\n\n predicted = classifier.predict(validation_tweets)\n\n # Using the cross-validation split, evaluate the accuracy of the predicted tweets\n evaluate_model(validation_sentiment, predicted)\n\n # Pickling the classifier\n pickle_file = open('nb_classifier.pickle', 'wb')\n pickle.dump(classifier, pickle_file)\n pickle_file.close()\n\n return classifier", "def get_naive_Bayes_classificator(self):\n try:\n with open(TWEET_BAYES_FILENAME, 'rb') as f:\n self.classifier, self.bayes_accuracy = pickle.load(f)\n print('It was read sucessfully!')\n except IOError:\n self.train_naive_Bayes_classificator()", "def prepare_dataset():\n with open('gold-posts.txt', encoding='utf-8') as f:\n posts = f.readlines()\n with open('gold-labels.txt', encoding='utf-8') as f:\n labels = f.readlines()\n\n def to_cat(x: str) -> int:\n if x == 'p':\n return 1\n elif x == 'n':\n return 2\n else:\n return 0\n X = np.array([x.strip() for x in posts])\n y = np.array([to_cat(x.strip()) for x in labels])\n\n # DOES NOT WORK - too imbalanced\n #skf = StratifiedKFold(n_splits=5, random_state=None, shuffle=False)\n #for train_index, test_index in skf.split(X, y):\n # X_train, X_test = X[train_index], X[test_index]\n # y_train, y_test = y[train_index], y[test_index]\n # break\n\n # WORKS better\n trI, teI = balanced_split(y)\n\n train_texts = X[trI].tolist()\n train_labels = y[trI].tolist()\n valid_texts = X[teI].tolist()\n valid_labels = y[teI].tolist()\n return train_texts, train_labels, valid_texts, valid_labels", "def get_train_data():\n # train set\n train = pd.read_csv(\"train.csv\")\n\n return train" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
sub(Vector,Vector) subtracts second vector from first one
def sub(first, other): if isinstance(first,FreeCAD.Vector) and isinstance(other,FreeCAD.Vector): return FreeCAD.Vector(first.x-other.x, first.y-other.y, first.z-other.z)
[ "def vector_sub(v1,v2):\n return Vector(v1.x - v2.x, v1.y - v2.y, v1.z - v2.z)", "def subtract_vect(a, b):\n return (a[0] - b[0],\n a[1] - b[1],\n a[2] - b[2])", "def vector_subtract(v1, v2):\n return v1[0] - v2[0], v1[1] - v2[1]", "def __sub__(vec1, vec2):\n if len(vec1) == len(vec2):\n return Vector(vec1[i] - vec2[i] for i in range(len(vec1)))\n raise DimentionMissmatchException()", "def sub(self, a, b):\n return a - b", "def subtract(v1: Vector, v2: Vector) -> Vector:\n if v1.dim != v2.dim:\n msg = \"Vectors must have the same dimension, got {} and {}\"\n raise ValueError(msg.format(v1.dim, v2.dim))\n ## homework:start\n output_vector = Vector(i-j for i,j in zip(v1,v2))\n ## homework:end\n return output_vector", "def subtract_vector(*args):\n return _vector.subtract_vector(*args)", "def sub(self, V1, V2):\n \n x = V1[0] - V2[0]\n y = V1[1] - V2[1]\n \n return x, y", "def sub(a, b):\n return a - b", "def _subVectors(X1,X2):\n _checkSize(X1,X2)\n return [ X1[i] - X2[i] for i in range(len(X1))]", "def test__vector_subtraction__given_two_vectors__return_correct_vector():\n assert Vector((0, 1, 2)) - Vector((3, 4, 5)) == Vector((-3, -3, -3))", "def vector(A, B):\n return (B[0] - A[0], B[1] - A[1])", "def pairwise_sub(a, b):\n return [a[i]-b[i] for i in xrange(0, min(len(a), len(b)))]", "def subtract(a, b):\n return a - b", "def subtract_vectors(vector_1, vector_2):\n new_coordinates = []\n index = 0\n while index < vector_1.dimension:\n new_value = vector_1.coordinates[index] - vector_2.coordinates[index]\n new_coordinates.append(new_value)\n index += 1\n new_vector = Vector(new_coordinates)\n return new_vector", "def __sub__(self, other):\n if isinstance(other, Vec2Array):\n if len(self) == len(other):\n return self.from_points(\n a - b for a, b in zip(self, other))\n else:\n raise ValueError(\n \"cannot subtract arrays with different lengths\")\n else:\n try:\n b = Vec2(*other)\n except Exception:\n return NotImplemented\n return self.from_points(a - b for a in self)", "def sub(self,a,b):\n\t\tc = int(a) - int(b)\n\t\treturn c", "def subVector(self, *args):\n return _yarp.Vector_subVector(self, *args)", "def subtract(a, b):\n return a - b" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test that a valid message is sent to a valid webhook.
def test_valid_webhook(self, mock_send): send_notification("valid_webhook", self.message) mock_send.assert_called()
[ "def test_slackWH_send_good(get_slackwebhook, capsys):\n s = get_slackwebhook\n s.send()\n out, err = capsys.readouterr()\n assert \"Message sent\" in out", "def test_create_webhook(self):\n pass", "def test_webhook():\n data = {\n \"username\": CONFIG['USERNAME'],\n \"avatar_url\": CONFIG['AVATAR_URL'],\n \"embeds\": [{\n \"title\": \"Testing Webhook\",\n \"description\": \"This is just a quick test to ensure the webhook works. Thanks again for using these monitors!\",\n \"color\": int(CONFIG['COLOUR']),\n \"footer\": {'text': 'Made by Yasser'},\n \"timestamp\": str(datetime.utcnow())\n }]\n }\n\n result = requests.post(CONFIG['WEBHOOK'], data=json.dumps(data), headers={\"Content-Type\": \"application/json\"})\n\n try:\n result.raise_for_status()\n except requests.exceptions.HTTPError as err:\n logging.error(err)\n else:\n print(\"Payload delivered successfully, code {}.\".format(result.status_code))\n logging.info(msg=\"Payload delivered successfully, code {}.\".format(result.status_code))", "def test_valid_post_message(self):\r\n self.client.post(reverse('vumi-backend'), json.dumps(self.valid_data),\r\n content_type='text/json')\r\n message = self.inbound[0]\r\n self.assertEqual(self.valid_data['content'], message.text)\r\n self.assertEqual(self.valid_data['from_addr'],\r\n message.connection.identity)\r\n self.assertEqual('vumi-backend',\r\n message.connection.backend.name)", "def is_valid_webhook_event(event):\n token = event['token']\n expected_token = htk_setting('HTK_SLACK_WEBHOOK_TOKEN')\n is_valid = token == expected_token\n webhook_settings = get_webhook_settings(token)\n event['webhook_settings'] = webhook_settings\n if not is_valid:\n is_valid = webhook_settings is not None\n else:\n # it's really invalid\n pass\n return is_valid", "def test_get_webhook(self):\n pass", "def test_webhook_empty_event(self):\n event = {\n 'body': json.dumps({})\n }\n context = {}\n resp = webhook(event, context)\n self.assertEqual(resp[\"statusCode\"], 500)\n self.assertEqual(resp[\"body\"], json.dumps({}))", "def test_api_onfido_webhook_post(self):\n pass", "def test_update_webhook(self):\n pass", "def test_good_signature(post_data, expected_error_message, settings, rf):\n app_key = '123appkey'\n request_signature = compute_request_signature(app_key, post_data)\n setattr(settings, APP_KEY_SETTING, app_key)\n setattr(settings, FAIL_ON_MISMATCH_SETTING, True)\n view = OurVeryOwnReceiverView.as_view()\n request = rf.post(\n WEBHOOK_URL,\n post_data,\n content_type='application/json',\n HTTP_X_GAPI_SIGNATURE=request_signature)\n\n response = view(request)\n if expected_error_message is None:\n assert response.status_code == 200\n else:\n assert response.status_code == 400\n assert response.content == expected_error_message", "def test_bot_message():\n send_json_message_to_bot(request.get_json())\n return \"ok\"", "def test_empty_string_json_as_response_to_outgoing_webhook_request(self) -> None:\n bot_owner = self.example_user(\"othello\")\n bot = self.create_outgoing_bot(bot_owner)\n\n responses.add(\n responses.POST,\n \"https://bot.example.com/\",\n json=\"\",\n )\n\n with self.assertLogs(level=\"INFO\") as logs:\n stream_message_id = self.send_stream_message(\n bot_owner, \"Denmark\", content=f\"@**{bot.full_name}** foo\", topic_name=\"bar\"\n )\n\n self.assert_length(responses.calls, 1)\n\n self.assert_length(logs.output, 1)\n self.assertIn(f\"Outgoing webhook request from {bot.id}@zulip took \", logs.output[0])\n\n # We verify that no new message was sent, since that's the behavior implied\n # by the response_not_required option.\n last_message = self.get_last_message()\n self.assertEqual(last_message.id, stream_message_id)", "def test_webhook(self, webhook_id):\n response = self._get(self.uri_for(\"webhooks/%s/test\" % webhook_id))\n return True # An exception will be raised if any error occurs", "def test_honeypot(self):\n response = self.client.post(self.url, {self.honeypot: 'some value'})\n self.assertEqual(response.status_code, 400)\n response = self.client.post(self.url, {self.honeypot: ''})\n self.assertEqual(response.status_code, 200)", "def test_command_trigger_webhook_post(self):\n pass", "def test_validate_payload(self):\n # Arrange\n url = 'https://0bcb332ca10a.eu.ngrok.io/api/git_to_dbfs'\n encoded_body = 'somebody'.encode()\n secret = 'somesecret'\n encoded_secret = secret.encode()\n signature = HMAC(key=encoded_secret, msg=encoded_body, digestmod=sha1).hexdigest()\n headers = {'X-Hub-Signature': f'sha1={signature}'}\n req = func.HttpRequest(method='POST', url=url, headers=headers, body=encoded_body)\n\n # Act\n result = validate_payload(req, secret)\n\n # Assert\n self.assertTrue(result)", "def test_uptimerobot_invalid_payload_with_missing_data(self) -> None:\n self.url = self.build_webhook_url()\n payload = self.get_body(\"uptimerobot_invalid_payload_with_missing_data\")\n result = self.client_post(self.url, payload, content_type=\"application/json\")\n self.assert_json_error(result, \"Invalid payload\")\n\n expected_message = MISCONFIGURED_PAYLOAD_ERROR_MESSAGE.format(\n bot_name=self.test_user.full_name,\n support_email=FromAddress.SUPPORT,\n ).strip()\n\n msg = self.get_last_message()\n self.assertEqual(msg.content, expected_message)\n self.assertEqual(msg.recipient.type, Recipient.PERSONAL)", "def test_webhooks_create(self):\n pass", "async def test_mailgun_webhook_with_missing_signature_without_api_key(\n http_client, webhook_id_without_api_key, mailgun_events\n) -> None:\n event_count = len(mailgun_events)\n\n await http_client.post(\n f\"/api/webhook/{webhook_id_without_api_key}\",\n json={\"hello\": \"mailgun\", \"signature\": {}},\n )\n\n assert len(mailgun_events) == event_count + 1\n assert mailgun_events[-1].data[\"webhook_id\"] == webhook_id_without_api_key\n assert mailgun_events[-1].data[\"hello\"] == \"mailgun\"\n\n await http_client.post(\n f\"/api/webhook/{webhook_id_without_api_key}\", json={\"hello\": \"mailgun\"}\n )\n\n assert len(mailgun_events) == event_count + 1\n assert mailgun_events[-1].data[\"webhook_id\"] == webhook_id_without_api_key\n assert mailgun_events[-1].data[\"hello\"] == \"mailgun\"" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Sets up biosafe and stores it as an object variable.
def setup_biosafe(self): # Generate dummy data in the right format species_presence = pd.DataFrame( np.random.randint(2, size=len(self.links_law)), columns=['speciesPresence'], index=self.links_law.index) ecotope_area = pd.DataFrame( np.ones(len(self.links_eco2.columns)-1) * 1e5, columns = ['area_m2'], index = self.links_eco2.columns.values[0:-1]) # Simplify ecotope tables to VR ecotopes unique_eco = np.unique( np.hstack((self.vr_eco.ecotope1.values, self.vr_eco.ecotope2.values))) links_eco3 = self.links_eco2.reindex(columns=unique_eco) ecotope_area = ecotope_area.reindex(index=unique_eco) # Run a first version of Biosafe self.bsf_model = bsf.biosafe( self.legal_weights, self.links_law, links_eco3, species_presence, ecotope_area) #PotTax = self.bsf_model.TFI() #PotAll = self.bsf_model.FI() return
[ "def __init__(self):\n if os.path.isfile('./jamf.pickle') is False:\n self.save_keys()\n else:\n pickle_in = open(\"jamf.pickle\", \"rb\")\n keys = pickle.load(pickle_in)\n self.casper_system_url = keys[\"Casper URL\"]\n self.casper_username = keys[\"Casper User\"]\n self.casper_password = keys[\"Casper Pass\"]", "def __init__(self, barcamp, handler):\n self.barcamp = barcamp\n self.handler = handler\n self.app = self.handler.app\n self.config = self.handler.app.config\n self.user = self.handler.user", "def init_batfish(self):\n network_name = config.SETTINGS.batfish.network_name\n snapshot_name = config.SETTINGS.batfish.snapshot_name\n snapshot_path = config.SETTINGS.main.configs_directory\n\n bf_params = dict(\n host=config.SETTINGS.batfish.address,\n port_v1=config.SETTINGS.batfish.port_v1,\n port_v2=config.SETTINGS.batfish.port_v2,\n ssl=config.SETTINGS.batfish.use_ssl,\n )\n if config.SETTINGS.batfish.api_key:\n bf_params[\"api_key\"] = config.SETTINGS.batfish.api_key\n\n try:\n self.bfi = Session.get(\"bf\", **bf_params)\n self.bfi.verify = False\n self.bfi.set_network(network_name)\n self.bfi.init_snapshot(snapshot_path, name=snapshot_name, overwrite=True)\n except BatfishException as exc:\n error = json.loads(str(exc).splitlines()[-1])\n error = re.sub(r\"[^:]*:.\", \"\", error[\"answerElements\"][0][\"answer\"][0])\n raise AdapterLoadFatalError(error) from exc", "def __init__(self):\n # Get a weboob instance\n self.weboob = Weboob()\n self.backend = None", "def _setup(app_obj):", "def init_fastapi(self):\n from fastmsa.api import app\n\n app.title = self.title", "def setUp(self):\n self.my_book = Book(\"Leyla Ali\",9999999999999,9,\"The Greatest\",1, 14.99)", "def __init__(self):\n self.cbsa_dict = {}", "def boot(self):\n pass", "def setup(self):\n self.machine = Machine(['a', 'b', 'c', '_'])", "def boot(self):\n\n pass", "def __init__(self, controller):\n super(CanBusInterface, self).__init__()\n self.controller = controller\n self.baudrate = self.get_setting('baudrate', int)\n self.handle = None\n self.port = self.get_setting('port')\n self.timeout = self.get_setting('timeout', int)\n self.thread = None", "def __init_euca(self):\n if self.euca:\n return\n self.euca = Euca2ool()", "def setUp(self):\n\t\tself.deck = Deck()", "def _configure(self):\n if self._app is not None:\n return\n\n # create the flask app instance\n app = self._create_app()\n\n # initialize the flask-sa object\n self._init_db(app)\n\n # create the api entry points\n self._create_api(app)\n\n self._app = app", "def init():\n try:\n compile_contract(\n \"fishcake\", f\"Fishcake(sp.address('{pub_key_hash}'),{default_supply})\")\n fishcake_addr = deploy(\"fishcake\")\n print(\"\\n\")\n compile_contract(\n \"fishcakeBox\", f\"FishcakeBox({default_redeem_amt}, sp.address('{fishcake_addr}'))\")\n fishcake_box_addr = deploy(\"fishcakeBox\")\n setup(fishcake_addr, fishcake_box_addr)\n print(\"\\n\\n[!] Details :\\n\")\n print(f\"-- Fishcake Token Address : {fishcake_addr}\")\n print(f\"-- Fishcake Box Address : {fishcake_box_addr}\")\n except Exception as e:\n print(\"Failed to originate Contracts : \", e)", "def init_bounce(self, bounce):\n self.can_bounce = bounce", "def startup(): \n # Initialize buzzer IO\n writePin(BUZZER, True)\n setPinDir(BUZZER, True)\n \n # Send the commands to initialize the display\n lcd_init()\n \n # Initialize hardware timers\n initHwTmr()\n \n # Write game instrcutions to screen\n clear_screen()\n write_line1('Reaction time')\n write_line2('press any button')", "def setup_method(self):\n # pylint: disable=attribute-defined-outside-init\n\n self.session = FakedSession('fake-host', 'fake-hmc', '2.13.1', '1.8')\n self.client = Client(self.session)\n\n self.faked_console = self.session.hmc.consoles.add({\n 'object-id': None,\n # object-uri will be automatically set\n 'parent': None,\n 'class': 'console',\n 'name': 'fake-console1',\n 'description': 'Console #1',\n })\n self.console = self.client.consoles.find(name=self.faked_console.name)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Calculate the total area of all ecotopes on the playing board.
def ecotope_area_sums(self, board): # clean up the input and merge into a single dataframe cols = ['geometry', 'z_reference', 'landuse', 'biosafe'] board_clean = board.loc[board.biosafe, cols] board_eco = pd.merge(board_clean, self.vr_eco, on=['z_reference', 'landuse']) # optional: output gdf to shp # gdf = board_eco.copy() # gdf['biosafe'] = gdf.biosafe.values.astype('int') # gdf.to_file('board_eco.shp') # calculate the total area of all columns # note: landuse-z_reference combinations not in vr_ecotopes are # excluded area_eco1 = board_eco.groupby('ecotope1').sum() area_eco2 = board_eco.groupby('ecotope2').sum() area_fractions = pd.concat([area_eco1.fraction1, area_eco2.fraction2], axis=1, sort=True) area_total = area_fractions.fillna(0).sum(axis=1).reset_index() area_total.columns = ['ecotope', 'area_m2'] # assert that that total area of the ecotopes matches the biosafe # hexagons try: assert int(area_total.sum().area_m2) == int(board_clean.shape[0]),\ ("ERROR: There appears to be one or more polygons that is not " + "detected correctly, resulting in a missmatch of the VR ecotopes") except AssertionError as error: print(error) pass area_out = area_total.set_index('ecotope') area_out.index.name=None return area_out
[ "def calculate_total_area(self) -> float:\n sum_of_areas = 0\n for value in self.list_of_shapes:\n area = value.get_area()\n sum_of_areas += area\n return sum_of_areas", "def area(self):\n area = 0\n\n for room in self.rooms:\n area += room.polygon.area()\n\n for wall in self.walls:\n area += wall.polygon.area()\n\n return area", "def area(self):\n return sum(i.area for i in self.items)", "def total_area(self):\n return sum(self.alpha.values())", "def _area(self):\n self.area = 0.0\n for sail in self.sails:\n self.area += sail.area", "def Area(self):\r\n Vert = self.Vertici + self.Vertici # Creazione lista ripetuta\r\n A = 0\r\n for punto in range(len(self.Vertici)):\r\n v_1 = self.Centro - Vert[punto]\r\n v_2 = self.Centro - Vert[punto + 1]\r\n area_parz = 0.5 * Grandezza_vettore(np.cross(v_1, v_2)) # area Parziale\r\n A += area_parz # Somma delle aree parziali\r\n A = round(A, precisione)\r\n return (A)", "def total_area(self):\n return numpy.prod([r[1] - r[0] for r in self.range_])", "def calculate_area(building, pixel_size=1):\n return len(building.points) * (pixel_size**2)", "def compute_surface_area(self):\n return np.sum(self._find_triangle_areas())", "def total_area(envelope):\n return sum(p.area for p in envelope)", "def total_area(self):\n return self._total_area", "def area_of_my_square(self):\n return self.width * self.width", "def area_of_my_square(self):\n return self.width * self.height", "def area(self) -> float:\n return sum(child.area() for child in self._children)", "def calculate(self):\n\n return self._calculate_area(self.ground_truth, self.slice_number)", "def _calculate_square_feet(self, present: Present) -> int:\n return present.total_area + min(\n present.length_wise_area, present.width_wise_area, present.height_wise_area\n )", "def calculate_area(self):\r\n if self.length == self.width:\r\n area = super().calculate_area()\r\n return area\r\n else:\r\n print(\"Invalid Square\")", "def get_artif_area(self):\n result = self.cities.all().aggregate(total=Sum(\"surface_artif\"))\n return result[\"total\"] or 0", "def calculate_triangle_areas(self):\n # get three corner points (each with x and y coord) of one triangle (with index 100)\n a, b, c = [np.array([self.triobject.x[k], self.triobject.y[k]]) for k in self.triobject.triangles[100]]\n # Area of the triangle = 1/2 * |AC x AB|_z (x = cross product)\n self.triangle_area_m2 = 1e6 * abs(0.5 * ((c - a)[0] * (b - a)[1] - (c - a)[1] * (b - a)[0])) # in m^2\n # Area of Easter Island in the discretised state\n self.area_map_m2 = self.triangle_area_m2 * self.n_triangles_map\n # Number of gardens per cell (rounded down)\n self.n_gardens_percell = int(self.triangle_area_m2 / self.m.garden_area_m2)\n print(\"Area of triangles in m^2: {}; Area of discretised EI: {}; Nr of gardens per cell: {}\".format(\n self.triangle_area_m2, self.area_map_m2, self.n_gardens_percell))\n return" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Function that calculates the biodiversity score based on the Biosafe output. the numbers 29.33 and 1.4349 follow from running MC simulations to determine the lowest and highest possible scores. The biodiversity score reflects the 0100% range between the two.
def set_score(self): if self.PotTax_intervention is None: if self.PotTax_reference is not None: self.score = (((self.PotTax_reference.sum().TFI - 29.33) / 1.4349) / 100) else: print("There is no Biosafe output to score") return else: self.score = (((self.PotTax_intervention.sum().TFI - 29.33) / 1.4349) / 100) return
[ "def compute_secondary_score(self, predictions):\n print(\"compute secondary score...\")\n\n return self.compute_bleu(predictions)", "def scoreChari_2015(svmInputFile, svmOutputFile, PAM, genome):\n\n model = f_p + '/models/293T_HiSeq_SP_Nuclease_100_SVM_Model.txt'\n dist = f_p + '/models/Hg19_RefFlat_Genes_75bp_NoUTRs_SPSites_SVMOutput.txt'\n\n if PAM == 'NGG' and genome == 'mm10':\n model = f_p + '/models/293T_HiSeq_SP_Nuclease_100_SVM_Model.txt'\n dist = f_p + '/models/Mm10_RefFlat_Genes_75bp_NoUTRs_SPSites_SVMOutput.txt'\n elif PAM == 'NNAGAAW' and genome == 'hg19':\n model = f_p + '/models/293T_HiSeq_ST1_Nuclease_100_V2_SVM_Model.txt'\n dist = f_p + '/models/Hg19_RefFlat_Genes_75bp_NoUTRs_ST1Sites_SVMOutput.txt'\n elif PAM == 'NNAGAAW' and genome == 'mm10':\n model = f_p + '/models/293T_HiSeq_ST1_Nuclease_100_V2_SVM_Model.txt'\n dist = f_p + '/models/Mm10_RefFlat_Genes_75bp_NoUTRs_ST1Sites_SVMOutput.txt'\n\n prog = Popen(\"%s/svm_light/svm_classify -v 0 %s %s %s\" % (f_p, svmInputFile, model, svmOutputFile), shell=True)\n prog.communicate()\n\n svmAll = open(dist,'r')\n svmThis = open(svmOutputFile, 'r')\n\n # first through go all scores and get the max and min\n allData = []\n for line in svmAll:\n line = line.rstrip('\\r\\n')\n allData.append(float(line))\n svmAll.close()\n\n scoreArray = []\n for line in svmThis:\n line = line.rstrip('\\r\\n')\n scoreArray.append(float(line))\n\n return [ss.percentileofscore(allData, i) for i in scoreArray]", "def get_score(snack_data, percentage_data, snack, snack_query, protein_query, carb_query, fat_query):\n\tstart_time = time.time()\n\n\t#Load necessary data\n\t\"\"\"\twith open ('../../../Data/percentagesDict.pickle', 'rb') as f:\n\t\tpercentage_data = pickle.load(f)\n\n\twith open ('../../../Data/FINAL_snacks_data.pickle', 'rb') as f:\n\t\tsnack_data = pickle.load(f)\"\"\"\n\n\t#Set constants\n\tLOW_FAT = .3\n\tHIGH_FAT = .6\n\tLOW_CARB = .1\n\tHIGH_CARB = .2\n\tLOW_PRO = .2\n\tHIGH_PRO = .4\n\n\t#Convert macro percentages to 'high', 'med', 'low' categories\n\tfat = percentage_data[snack]['fat']\n\tprotein = percentage_data[snack]['protein']\n\tcarb = percentage_data[snack]['carb']\n\n\tif fat > HIGH_FAT:\n\t\tfat_content = 'high'\n\telif fat < LOW_FAT:\n\t\tfat_content = 'low'\n\telse:\n\t\tfat_content = 'med'\n\n\tif protein > HIGH_PRO:\n\t\tprotein_content = 'high'\n\telif protein < LOW_PRO:\n\t\tprotein_content = 'low'\n\telse:\n\t\tprotein_content = 'med'\n\n\tif carb > HIGH_CARB:\n\t\tcarb_content = 'high'\n\telif carb < LOW_CARB:\n\t\tcarb_content = 'low'\n\telse:\n\t\tcarb_content = 'med'\n\n\t#Set x values\n\tx1 = fat_query == fat_content\n\tx2 = carb_query == carb_content\n\tx3 = protein_query == protein_content\n\tx4 = cooccur(snack_data, snack, snack_query) \n\tx5 = snack_data[snack]['rating']\n\n\tw1 = 1\n\tw2 = 1\n\tw3 = 1\n\tw4 = 1\n\tw5 = 1\n\t\n\t#print('x1: {}, x2: {}, x3: {}, x4: {}, x5: {}'.format(x1, x2, x3, x4, x5))\n\t#print(\"get_score() time: --- %s seconds ---\" % (time.time() - start_time))\n\n\n\treturn w1*x1 + w2*x2 + w3*x3 + w4*x4 + w5*x5", "def score(self):\n s = 100*self.coverage-10*self.overlapRatio-0.01*self.traveledDist\n return s", "def quality_score(predicted, actual):\n if predicted <0 or actual<0:\n print(\"Negative case counts are not allowed\")\n return\n numerator = abs(predicted-actual)\n denominator = max(predicted, actual, 4)\n qs = 1 - 1.*numerator/denominator\n return qs", "def biome_score(biome, npcs):\n result = 100.0\n if len(npc_list) < 2:\n return result\n\n for npc in npcs:\n if npc in ignore_set: #ignore non-vendor npcs\n continue\n npc_score = 1.0\n \n #check town size\n if len(npcs) > 3:\n npc_score *= pow(crowded_modifier, len(npcs) - 3)\n elif len(npcs) < 3:\n npc_score *= sparce_modifier\n\n #check biome\n if biome == npc_dict[npc].liked_biome:\n npc_score *= biome_like_modifier\n elif biome == npc_dict[npc].disliked_biome:\n npc_score *= biome_hate_modifier\n\n #check neighbors\n for neighbor in npcs:\n if neighbor != npc:\n if neighbor in npc_dict[npc].loved_npcs:\n npc_score *= npc_love_modifier\n elif neighbor in npc_dict[npc].liked_npcs:\n npc_score *= npc_like_modifier\n elif neighbor in npc_dict[npc].disliked_npcs:\n npc_score *= npc_dislike_modifier\n elif neighbor in npc_dict[npc].hated_npcs:\n npc_score *= npc_hate_modifier\n\n npc_score = npc_score\n if npc_score < result:\n result = npc_score\n\n #print(f\"{town_biome}: {result}\")\n return result", "def task5_b_39():\n ir_1 = float(input('Enter the annual interest rate for Bank 1: '))\n nper_1 = int(input('Enter the number of compounded period for Bank 1: '))\n\n ir_2 = float(input('Enter the annual interest rate for Bank 2: '))\n nper_2 = int(input('Enter the number of compounded period for Bank 2: '))\n\n apy_1 = (1 + ir_1 / nper_1) ** nper_1 - 1\n apy_2 = (1 + ir_2 / nper_2) ** nper_2 - 1\n\n which_better = max(apy_1, apy_2)\n\n print(f'APY for Bank 1 is: {apy_1:.3%}')\n print(f'APY for Bank 2 is: {apy_2:.3%}')\n\n if which_better == apy_1:\n print('Bank 1 is the better bank')\n else:\n print('Bank 2 is the better bank')", "def calculate_dbot_score(application_data: dict) -> int:\n if not application_data: # no response from Zimperium\n return 0\n\n classification = application_data.get('classification')\n if not classification:\n return 0\n if classification == 'Legitimate':\n return 1\n return 3 # classification == Malicious", "def stateQualtityScore(roomba):\n return 0", "def prob5(file = 'crime_data.csv'):\n #Question one\n data = pd.read_csv(file)\n my_list = data.columns[(data.mean()>1500000)][2:]\n \n #Get the correlation between the three crimes\n corr = data[my_list].corr()\n prop_max = data['Property'].max()\n larc_max = data[\"Larceny\"].max()\n ans1 = 'Property'\n \n #Question 2 get the ammount of aggravated assaults\n new_data = data[data['Year']>=2000]\n new_data = new_data.sort_values('Murder',ascending=True)\n agg_as = new_data['Aggravated Assault']\n agg_as = agg_as[agg_as>850000]\n ans2 = agg_as.values\n \n #Question 3 get the highest year of crime and get the percentage of that\n S = 10\n N = int(len(data)/S)\n \n #Split the decades\n frames = [ data.iloc[i*S:(i+1)*S].copy() for i in range(N+1) ]\n dec_crime = []\n for dec in frames:\n dec_crime.append(dec['Total'].mean())\n \n #Get the highest crime and its percentage of the total\n my_dec = frames[np.argmax(dec_crime)]\n my_crimes = ['Violent','Property','Murder','Forcible Rape','Robbery','Aggravated Assault','Burglary','Larceny','Vehicle Theft']\n high_crime = my_dec[my_crimes].mean().idxmax()\n ans3 = float(my_dec[high_crime].mean()/my_dec['Total'].mean())\n return(ans1,ans2,ans3)", "def __calculate_ethnic_diversity_score(project: dict, student: dict) -> int:\n # project_name = project[\"fields\"][PROJECT_NAME_FIELD]\n # student_name = student[\"fields\"][SURVEY_STUDENT_NAME_FIELD][0]\n\n # print(\"Calculating ethnic pairing score for: Project({}) - Student({})\".format(project_name, student_name))\n\n # Get the ethnicities specified by the student\n student_ethnicities = student[\"fields\"].get(SURVEY_ETHNICITIES_FIELD, None)\n if not student_ethnicities:\n # The student didn't specify ethnicities, so we can't calculate a score\n return 0\n\n # Get the list of current assignments for the project team\n team_assignments = __get_team_assignments(project)\n\n # This list will hold the list of ethnicities on the team\n team_ethnicities = []\n for assignment in team_assignments:\n assigned_student_ethnicities = assignment.student[\"fields\"].get(SURVEY_ETHNICITIES_FIELD, None)\n\n if assigned_student_ethnicities:\n team_ethnicities.append(assigned_student_ethnicities)\n\n # Team ethnicities is going to be a list of lists, so let's flatten it\n team_ethnicities = [item for sublist in team_ethnicities for item in sublist]\n\n # ================================================================================================================\n # Get the count ethnicities for the already assigned students\n ethnicity_counter = __get_ethnicity_counter()\n ethnicity_counter.update(team_ethnicities)\n\n # Check each of the student's listed ethnicities and take the highest score\n best_ethnicity_score = 0\n for student_ethnicity in student_ethnicities:\n matching_ethnicity_count = ethnicity_counter.get(student_ethnicity)\n\n current_ethnicity_score = 0\n\n if matching_ethnicity_count == 0:\n # This is good, as it will make the team more diverse\n current_ethnicity_score = SURVEY_BASE_ETHNICITY_WEIGHT\n elif matching_ethnicity_count == 1:\n # This is better, as it will pair students with like ethnicities\n current_ethnicity_score = SURVEY_BASE_ETHNICITY_WEIGHT * 2\n\n # Check to see if this is a better match\n if current_ethnicity_score > best_ethnicity_score:\n best_ethnicity_score = current_ethnicity_score\n\n return best_ethnicity_score", "def main():\n class_n = input('Which class? ')\n class_name = class_n.upper()\n\n total_score001 = 0\n count001 = 0\n avg_score001 = 0\n total_score101 = 0\n count101 = 0\n avg_score101 = 0\n\n if class_name == '-1':\n print('No class scores were entered')\n else:\n high_score001 = 0\n low_score001 = 100\n high_score101 = 0\n low_score101 = 100\n while True:\n if class_name == '-1':\n break\n elif class_name == 'SC001':\n score001 = int(input('Score: '))\n if score001 > high_score001:\n high_score001 = score001\n if score001 < low_score001:\n low_score001 = score001\n total_score001 += score001\n count001 += 1\n avg_score001 = float(total_score001/count001)\n elif class_name == 'SC101':\n score101 = int(input('Score: '))\n if score101 > high_score101:\n high_score101 = score101\n if score101 < low_score101:\n low_score101 = score101\n total_score101 += score101\n count101 += 1\n avg_score101 = float(total_score101/count101)\n\n class_n = input('Which class? ')\n class_name = class_n.upper()\n print('=============SC001=============')\n if count001 == 0:\n print('No score for SC001')\n else:\n print('Max (001): ' + str(high_score001))\n print('Min (001): ' + str(low_score001))\n print('Avg (001): ' + str(avg_score001))\n print('=============SC101=============')\n if count101 == 0:\n print('No score for SC101')\n else:\n print('Max (101): ' + str(high_score101))\n print('Min (101): ' + str(low_score101))\n print('Avg (101): ' + str(avg_score101))", "def bridge_score(bridge):\n return (bridge_strength(bridge), len(bridge))", "def compute_scores():\n\n prediction_table = load_predictions(\"all\")\n\n # ROC AUC scores\n roc_aucs = compute_score(prediction_table, roc_auc_score)\n roc_aucs = roc_aucs.round(4)\n save_evaluation(roc_aucs, \"roc_auc\")\n\n # Brier loss scores\n brier_losses = compute_score(prediction_table, brier_score_loss)\n brier_losses = brier_losses.round(4)\n save_evaluation(brier_losses, \"brier_loss\")", "def score(target01FilePath):\n\tlist_all, appnames = loadDataSet_real(target01FilePath)\n\tnB,sita,aB = get_sita(benign01_result, BENIGNNUMBER)\t\n\t\n\tscore_all = {}\n\ti = 0\n\t\n\tlist_test1 = [143,139,7,130,45,44]\n\tlist_test2 = [142,138,6,129,44]\n\tscore_temp1 =1\n\tscore_temp2 =1\n\tfor item1 in list_test1:\n\t if item1 != 45 and item1 != 7 and item1 != 139 and item1 != 130 and item1 != 9:\n\t\tscore_temp1 = score_temp1 * sita[item1-1]\t\n\tscore_temp1 = math.log(score_temp1) # score = -ln (score_temp)\n\tprint \"score_temp1: \"+str(score_temp1)\n\t\n\tfor item2 in list_test2:\n\t if item2 != 45 and item2 != 7 and item2 != 139 and item2 != 130 and item2 != 9:\n\t\tscore_temp2 = score_temp2 * sita[item2-1]\n\tscore_temp2 = math.log(score_temp2) # score = -ln (score_temp)\t\n\tprint \"score_temp2: \"+str(score_temp2)\n\t \n\t#for item in list_all:\t\t\n\t\t#score_temp =1\n\t\t#for subitem in item:\n\t\t #if subitem != 45 and subitem != 7 and subitem != 139 and subitem != 130 and subitem != 9:\n\t\t\t##delete the item whose sita>50% \n\t\t\t#score_temp = score_temp * sita[subitem-1]\t\t\n\t\t#score_temp = math.log(score_temp) # score = -ln (score_temp)\n\t\t#score_all[appnames[i]] = -score_temp\n\t\t#i = i+1\n\treturn score_all", "def score_candidates(user, candidates):\n if is_debug: # debug message\n print(os.linesep, f'[Debug] Score_candidates has started')\n u_age = user.get_age()\n u_city_id = user.get_city_id()\n u_gr = set(user.get_groups()) if user.get_groups() else None\n u_fr = set(user.get_friends()) if user.get_friends() else None\n u_books = user.get_books()\n u_interests = user.get_interests()\n # (важнее) друзья, возраст, группы, интересы, книги, город (менее важно)\n cand_score = []\n for c in candidates:\n score = 0\n if u_city_id and c.get_city_id() and c.get_city_id() == u_city_id:\n score += 1.1\n if u_books and find_common(c.get_books(), u_books) > 0:\n score += 1.2\n if u_interests and find_common(c.get_interests(), u_interests) > 0:\n score += 1.3\n if u_gr and find_common(c.get_groups(), u_gr) > 0:\n score += 1.4\n if u_age and c.get_age() and c.get_age() == u_age:\n score += 1.5\n if u_fr and find_common(c.get_friends(), u_fr) > 0:\n score += 1.6\n cand_score.append([c, score])\n cand_score.sort(key=lambda cand: cand[1], reverse=True)\n if is_debug: # debug message\n print(os.linesep, f'[Debug] Score_candidates has finished')\n return cand_score", "def score( self ):\r\n result = 0.0\r\n for rr in self.ee.getRsrcs( ):\r\n value = self.scoreRsrc( rr )\r\n result += value\r\n print( \"INFO: Value for the schedule is %s \" % ( rr, result ) )\r\n return( result )", "def stateQualtityScore(roomba):\n return 0", "def get_score(time):\n return 1000.0 / time" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Like `Flask.app.route` but takes only a function that returns HtmlSanitizedStr
def safe_route(app: Flask, rule, **options) -> Callable[[RouteFunction], None]: original_decorator = app.route(rule, **options) def decorator(fn: RouteFunction): return original_decorator(compose(str, fn)) # type: ignore return decorator
[ "def strip_html(func):\n\n cleaner = re.compile(\"<.*?>\")\n def new_func(*args, strip_html=False, **kwargs):\n name = func(*args, **kwargs)\n if strip_html:\n if isinstance(name, str):\n return html.unescape(re.sub(cleaner, \"\", name))\n elif isinstance(name, list) or isinstance(name, tuple):\n return type(name)([html.unescape(re.sub(cleaner, \"\", n)) for n in name])\n else:\n return name\n new_func.__name__ = func.__name__\n new_func.__doc__ = func.__doc__\n return new_func", "def html(input):\n output=atpic.cleaner_alex.clean(input)\n return output", "def websafe(val):\r\n if val is None:\r\n return ''\r\n if isinstance(val, unicode):\r\n val = val.encode('utf-8')\r\n val = str(val)\r\n return htmlquote(val)", "def make_url_safe(self, url):\n return url.replace(' ', '%20')\\\n .replace('(', '%28')\\\n .replace(')', '%29')\\\n .replace('\"', '%22')", "def HTML(text, parser=None, base_url=None): # real signature unknown; restored from __doc__\n pass", "def pytest_funcarg__mock_html_nolinks(request):\n mock_html = \"\"\"\n <html>\n <head></head>\n <body>\n </body></html>\n \"\"\"\n return mock_html", "def escape_html(value, allowed_tags=[], allowed_attributes=[],\n allowed_styles=[]):\n if isinstance(value, basestring):\n if is_bleach_version_5():\n css_sanitizer = CSSSanitizer(allowed_css_properties=allowed_styles)\n value = bleach.clean(value, tags=allowed_tags,\n attributes=allowed_attributes, \n css_sanitizer=css_sanitizer, strip=False)\n else:\n value = bleach.clean(value, tags=allowed_tags,\n attributes=allowed_attributes, \n styles=allowed_styles, strip=False)\n return value", "def embed_nodes(flask_app, title, route, methods=None, css=[], js=[], removals=[], **kwargs):\n\n def magic(f):\n @flask_app.route(route, methods=methods, **kwargs)\n @wraps(f)\n def call(*args, **kwargs):\n html = copy(HTML)\n content = html.xpath(\"//html/body/*[@id='content']\")[0]\n res = f(*args, **kwargs)\n\n if type(res) is BaseResponse:\n return res\n\n for item in res:\n content.append(item)\n\n if title is not None:\n html.xpath(\"//title\")[0].text += (' - ' + title)\n html.xpath(\"//*[@id='page-title']\")[0].text = '> %s' % title\n\n HEAD = html.xpath(\"/html/head\")[0]\n BODY = html.xpath(\"/html/body\")[0]\n\n for url in css:\n HEAD.append(stylesheet(url))\n\n for url in js:\n BODY.append(javascript(url))\n\n for remove in removals:\n try:\n node = html.xpath(remove)[0]\n node.getparent().remove(node)\n except IndexError:\n print \"Could not find %s to remove!\" % remove\n\n # make sure all fields take UTF-8\n for element in html.xpath(\"//input | //textarea\"):\n element.attrib['accept-charset'] = 'UTF-8'\n\n return '<!DOCTYPE HTML>\\n%s' % ET.tostring(html, pretty_print=True)\n return call\n return magic", "def render_html(self) -> str:", "def wsgiapp(self):\n def wrapped(environ, start_response):\n \"\"\"wsgi application function\"\"\"\n start_time = time.clock()\n req = Request(environ)\n res = Responder(start_response, environ, self.mylookup, start_time)\n \n \n found_matches = None\n route = {}\n for reg, route in self.routes:\n found_matches = re.match(route['regex'], req.path)\n if found_matches and meetsreqs(req, route['reqs']):\n break\n else:\n return ''\n bindings = route['kwargs']\n for part in route['parts']:\n if len(part) == 2:\n bindings[part[0]] = part[1]\n for part in xrange(len(found_matches.groups())):\n if found_matches.group(part+1):\n partname = route['parts'][part][0]\n bindings[partname] = found_matches.group(part+1)\n\n return str(route['function'](res, **dict(bindings)))\n\n return wrapped", "def verbatim(self, rq, *args):\n return \"\"\"<html><body style=\"background:url('%s/_files/logo.gif') top right no-repeat;\">\n<h1>welcome to verbatim html from python</h1>\nYou requested [<strong>%s</strong>]<br/>\nYou query is [<strong>%s</strong>]<br/>\n</body></html>\"\"\" % (rq.script, escape('/'.join(args)), escape(rq.q.__repr__()))", "def safeHTML(s):\n parser = StrippingParser()\n parser.feed(s)\n parser.close()\n parser.cleanup()\n return parser.result", "def latex_safe(s, url_check=True, wrapper=\"url\"):\n if url_check:\n # If it looks like a URL make it a latex URL\n url_search = HTTP_RE.search(s)\n if url_search:\n url = r\"{start}\\{wrapper}{{{s}}}{end}\".format(\n start=(latex_safe(s[: url_search.start()])),\n end=(latex_safe(s[url_search.end() :])),\n wrapper=wrapper,\n s=s[url_search.start() : url_search.end()],\n )\n return url\n return (\n s.replace(\"&\", \"\\&\")\n .replace(\"$\", \"\\$\")\n .replace(\"#\", \"\\#\")\n .replace(\"_\", \"\\_\")\n )", "def my_form_post():\n all_output = feedline(request.form['text'],True)\n all_output = re.sub('\\n','<br>',all_output) # Change \\n to <br>\n html = \"\"\"\n <html>\n <head>\n <title>MyPython WebApp</title>\n </head>\n <body>\n <div id=\"container\">\n <div class=\"title\">\n <h1>MyPython WebApp</h1>\n </div>\n </div>\n <p>%s\n </p>\n <div id=\"content\">\n <form action=\".\" method=\"POST\">\n <input type=\"text\" name=\"text\">\n <input type=\"submit\" name=\"my-form\" value=\"Send\">\n </form>\n </div>\n </body>\n </html>\"\"\" %(all_output)\n return html", "def pytest_funcarg__mock_html_twolinks(request):\n mock_html = \"\"\"\n <html>\n <head></head>\n <body>\n <a href=\"link1.html\">Link 1</a>\n <a href=\"link2.html\">Link 2</a>\n </body></html>\n \"\"\"\n return mock_html", "def strong_decorate(func):\n def func_wrapper(*args, **kwargs):\n return \"<strong>{0}</strong>\".format(func(*args, **kwargs))\n\n return func_wrapper", "def _sanitize_function(self, func_msg):\n if func_msg is not None:\n func = str(func_msg)\n else:\n func = None\n return func", "def doHTML(bunch, text, env):\n return \"<html>%s</html>\" % text", "def hello_flask():\n return 'Hello HBNB!'" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Program entry point. Loads a CSV file of observations Determines how items were exchanged during various rendezvous Prints the exchanges as they happen, if desired Prints the latest owner of a specific item, if desired. Otherwise neatly prints a dictionary mapping suspects to the item they currently own. This program will return an exit code of `1` in one of two
def main(args): # Tuple of carried items and timeline time_tuple = load_timeline(args.observations) # For each Observation in list, calculated final held item for suspectPair in time_tuple[1].rendezvous(): # If user wanted exchanges, print each exchange if args.exchanges: print(suspectPair[0].name + " meets with " + suspectPair[1].name + " to exchange " + time_tuple[0][suspectPair[0].name] + " for " + time_tuple[0][suspectPair[1].name] + ".") # Trades items temp_item = time_tuple[0][suspectPair[0].name] time_tuple[0][suspectPair[0].name] = time_tuple[0][suspectPair[1].name] time_tuple[0][suspectPair[1].name] = temp_item # If no items specified or exchanges is true, # print list of final help items if (args.item == '') or (args.exchanges): pprint.pprint(time_tuple[0], indent=4) # If user specified an item, print who has said item if not args.item == '': for name, i in time_tuple[0].items(): if i == args.item: print(name + " had the " + i)
[ "def check_my_books(login):\n\n # rented.csv = [ID, rental_date, return_date, login]\n with open('rented.csv', 'r') as rented_base:\n rented_reader = csv.reader(rented_base)\n next(rented_reader)\n\n books_table = []\n\n for line in rented_reader:\n if line[-1] == login:\n books_table.append([line[0],line[1],line[2]])\n\n print(\"Your rented books are:\")\n\n # books.csv = [title, author, year, ID, book_type]\n with open('books.csv', 'r') as book_base:\n book_reader = csv.reader(book_base)\n next(book_reader)\n for line in book_reader:\n for box in books_table:\n if line[3] == box[0]:\n print(line)\n print(\"\\tRented on\",box[1],\"\\nTo be returned on\",box[2])\n\n\n input('> ')", "def main():\n\n # Refer to Problem Set 07 README.md for instructions and tips.\n\n # 6.1: Read in < sh_basic_info.csv >\n\n basic_info = read_csv_file('sh_basic_info.csv')\n\n # 6.2: Create instances of < SuperHeroine >\n\n heroines = {}\n for hero in basic_info:\n heroines[hero['name']] = SuperHeroine(hero['name'], hero['full_name'], hero['team'],\n hero['eye_color'], hero['hair_color'], hero['base'])\n print(heroines)\n\n # 6.3: Read in < sh_additional_info.csv >\n\n additional_info = read_csv_file('sh_additional_info.csv')\n\n # 6.4: Add powers and nemesis\n\n for row in additional_info:\n name = row[\"Heroine Name\"]\n instance_affected = heroines[name]\n how_affected = row[\"Category\"]\n value = row['Value']\n if how_affected == 'power':\n instance_affected.add_power(value)\n else:\n instance_affected.add_nemesis(value)\n\n # 6.5: Write to file\n\n write_to_file('storm.txt',heroines['Storm'])\n write_to_file('scarlet_witch.txt',heroines['Scarlet Witch'])\n write_to_file('jessica_jones.txt',heroines['Jessica Jones'])", "def main():\n\n # Ask for games to compare.\n games = {}\n more_games = True\n\n while more_games:\n search = input(\"Enter board game to search (leave empty if finished):\")\n\n if search:\n matches = bgg_compare.find_game(search)\n\n print(\"Games found:\")\n for game_id, name in matches.items():\n print(game_id + \"\\t\" + name)\n id = input(\"Enter the number before the intended game:\")\n games[id] = matches[id]\n\n else:\n more_games = False\n\n # If no games entered, compare all downloaded ratings.\n if not games:\n ids = []\n for f in glob.glob(\"[0-9]*.csv\"):\n id = os.path.splitext(f)[0]\n ids.append(id)\n game_info = bgg_compare.get_game_info(ids)\n for i, info in enumerate(game_info):\n name = info.find(\"name\", attrs={\"type\": \"primary\"})[\"value\"]\n games[ids[i]] = name\n\n print(\"Comparing games:\")\n\n all_ratings = []\n\n for game_id, name in games.items():\n\n print(name)\n\n ratings = {}\n filename = \"%s.csv\" % game_id\n\n try:\n ratings = bgg_compare.read_ratings(filename)\n except:\n ratings = bgg_compare.get_ratings(game_id)\n bgg_compare.write_ratings(ratings, filename)\n\n all_ratings.append(ratings)\n\n rankings = bgg_compare.condorcet_irv(all_ratings, list(games.keys()))\n\n print(\"Games ranked by Condorcet-IRV:\")\n\n header = [\"Rank\", \"ID\", \"Game\", \"Tiebreak\"]\n print(\"\\t\".join(header))\n\n for i, (game_id, tiebreak) in enumerate(rankings, 1):\n print(\"\\t\".join([str(i), game_id, games[game_id], str(tiebreak)]))\n\n outfile = input(\"Enter filename to save results (leave empty to not save)\")\n\n if outfile:\n with open(outfile, \"w\") as csvfile:\n writer = csv.writer(csvfile)\n writer.writerow(header)\n for i, (game_id, tiebreak) in enumerate(rankings, 1):\n writer.writerow([str(i), game_id, games[game_id], str(tiebreak)])", "def main():\n \n if len(sys.argv) < 5:\n print(usage())\n exit(1)\n\n token = sys.argv[1]\n csv_file = sys.argv[2]\n repo_owner = sys.argv[3] \n repo_name = sys.argv[4]\n\n table = []\n headers = []\n with open(csv_file, newline='') as f:\n data = csv.DictReader(f)\n table = [row for row in data]\n headers = data.fieldnames\n\n for row in table:\n issue = define_issue(headers, row)\n print(issue)\n if issue_exists(repo_owner, repo_name, row['id'], token):\n print(\"issue {} exists. Skipping...\".format(row['id']))\n continue\n id = create_issue(issue, token, repo_owner, repo_name)\n row['id'] = id\n \n with open(csv_file, 'w', newline='') as f:\n writer = csv.DictWriter(f, fieldnames=headers)\n writer.writeheader()\n for row in table:\n writer.writerow(row)", "def main():\n parser = argparse.ArgumentParser(description='A parser for Nosh.com')\n parser.add_argument('url', help='Valid nosh.com URL from a restaurant')\n parser.add_argument('-o', dest='output', help='Filename for the output',\n default=None)\n\n args = parser.parse_args()\n\n nosh = Nosh()\n terminal_print(\"\\rGetting restaurant... \")\n try:\n menu_items = nosh.get_menu_items_from_url(args.url)\n terminal_print(\"DONE!\\n\")\n count = len(menu_items)\n i = 0\n for mi in menu_items:\n terminal_print(\"\\rGetting items: %s / %s (%s%%)\" %\n (i, count, (100 * i / count)))\n description = nosh.get_item_description_from_url(menu_items[mi])\n menu_items[mi] = description\n i += 1\n\n if menu_items:\n terminal_print(\"\\rGetting items... DONE!%s\\n\" % (' ' * 20))\n if not args.output:\n regex = re.compile('(\\d+)')\n restaurant_id = regex.findall(args.url)[0]\n filename = '%s.csv' % restaurant_id\n else:\n filename = args.output\n terminal_print(\"\\rItems saved in: %s\\n\" % filename)\n write_csv_file(filename, menu_items)\n else:\n terminal_print(\"\\rNo items found!\\n\")\n except Exception, e:\n if hasattr(e, 'msg'):\n print e.msg\n else:\n print e", "def main():\n movies = read_movies('bond.csv')\n\n print('Original list (first 10):')\n print_movies(movies[:10])\n\n sorted_movies = movie_sort(movies)\n print('\\nSorted list (by year, first 10):')\n print_movies(sorted_movies[:10])\n\n bonus(movies)", "def load_data(filename):\n #Admittedly copy-pasted from Heredity project cuz I'm resourceful like that\n #Makes 2 lists, one for evidence and one for labels\n evidence = []\n labels = []\n #Open csv file\n with open(\"shopping.csv\") as f:\n reader = csv.reader(f)\n next(reader)\n #Iterate through user rows of file\n for row in reader:\n i = 0\n tmp_list = []\n for column in row:\n if i in [0,2,4,11,12,13,14]:\n column = int(column)\n if i in [1,3,5,6,7,8,9]:\n column = float(column)\n if i == 10:\n if column == \"Jan\":\n column = 0\n if column == \"Feb\":\n column = 1\n if column == \"Mar\":\n column = 2\n if column == \"Apr\":\n column = 3\n if column == \"May\":\n column = 4\n if column == \"June\":\n column = 5\n if column == \"Jul\":\n column = 6\n if column == \"Aug\":\n column = 7\n if column == \"Sep\":\n column = 8\n if column == \"Oct\":\n column = 9\n if column == \"Nov\":\n column = 10\n if column == \"Dec\":\n column = 11\n if i in [15,16]:\n if column == \"Returning_Visitor\" or column == \"TRUE\":\n column = 1\n else:\n column = 0\n if i == 17:\n if column == \"TRUE\":\n column = 1\n else:\n column = 0\n labels.append(column)\n else:\n tmp_list.append(column)\n i+=1\n evidence.append(tmp_list)\n \n return (evidence,labels)", "def main():\n filename = \"data/exercise.csv\"\n analyze(filename)", "def print_records(results_file, player_1, player_2):\n # keep track of the results in the file\n results_lines = []\n\n # read all of the lines from the file into a list\n with open(results_file) as f:\n results_lines = f.readlines()\n\n # parse the results (results will be a dictionary of string and tuple)\n # { string->name: tuple->(int->wins, int->losses) }\n # { 'reed': (2, 5), 'britney': (5, 2) }\n results = parse_results(results_lines)\n\n player_1_wins = results[player_1][0]\n player_1_losses = results[player_1][1]\n player_2_wins = results[player_2][0]\n player_2_losses = results[player_2][1]\n\n print \"\\n%s's record is %d wins and %d losses\" % (player_1, player_1_wins, player_1_losses)\n print \"\\n%s's record is %d wins and %d losses\" % (player_2, player_2_wins, player_2_losses)", "def main():\n\n #get the csv file into a data-frame\n universities_df = pd.read_csv('universities_data.csv', encoding = 'utf-8-sig')\n universities_names_list = universities_df['name'].tolist()\n\n #get list of university objects\n url = 'http://universities.hipolabs.com/search?country=Israel'\n api_universities = Get_universities(url)\n list_of_universities = api_universities.get_universities_info()\n\n #to see if we got new entities or not for exporting to csv later..\n is_new_entities = False\n\n for university in list_of_universities:\n if university.name not in universities_names_list:\n is_new_entities = True\n universities_df= universities_df.append(pd.DataFrame({\n 'alpha_two_code': [university.alpha_two_code], \n 'country': [university.country],\n 'web_pages': [str(university.web_pages)],\n 'domains': [str(university.domains)],\n 'name': [university.name],\n 'state_province':[str(university.state_province)]}) , ignore_index = True)\n\n #export back to csv if true\n if is_new_entities: \n print('we got new entities!') \n universities_df.to_csv('universities_data.csv', encoding = 'utf-8-sig', index = False)\n else:print('no new universities for now!')", "def main():\n\n print(\"--------------------\")\n print(\"| codedrome.com |\")\n print(\"| Percentile Ranks |\")\n print(\"--------------------\\n\")\n\n try:\n\n f_in = open(\"grades.csv\")\n r = csv.DictReader(f_in, fieldnames=['grade'])\n\n grades = []\n\n for item in r:\n grades.append(item['grade'])\n\n f_in.close()\n\n percentile_ranks = percentileranks.calculate_percentile_ranks(grades)\n\n percentileranks.print_percentile_ranks(percentile_ranks)\n\n except Exception as e:\n\n print(e)", "def check_my_books(main_page):\n\n login = main_page.login\n\n # rented.csv = [ID, rental_date, return_date, login]\n with open('rented.csv', 'r') as rented_base:\n rented_reader = csv.reader(rented_base)\n next(rented_reader)\n\n books_table = []\n\n for line in rented_reader:\n if line[-1] == login:\n books_table.append([line[0],line[1],line[2]])\n\n print(\"Your rented books are:\")\n\n books_table_reader(books_table)", "def run_csv_tally(csv_file,seats,runs):\n csv_data = file2table(csv_file)\n cands = get_candidates(csv_data) \n data = {}\n data['BALLOTS'] = swap(csv_data) \n data['CANDIDATES'] = cands \n was_elected = {}\n for i in range(runs):\n result = tally_csv(copy.deepcopy(data),seats)\n last = result.pop()\n for cand in last['committee']:\n if cand.eid in was_elected:\n was_elected[cand.eid] += 1\n else:\n was_elected[cand.eid] = 1\n sorted_elected = sorted(list(was_elected.items()),key=itemgetter(1),reverse=True)\n out = ''\n for tup in sorted_elected:\n out += tup[0]+' ('+ str(tup[1])+')\\n'\n return out", "def main():\n parser = argparse.ArgumentParser(description=__doc__)\n parser.add_argument(\"json_file\", help=\"JSON file containing abstracts data\")\n args = parser.parse_args()\n\n in_file = args.json_file\n with open(in_file, encoding=\"utf-8\") as jfp:\n data = json.load(jfp)\n\n for item in data:\n cit_list, doi_item = citation_info(item)\n\n year = date.today().year\n copy_item = f\"Copyright: © ({year}) {cit_list}\" if cit_list else \"\"\n cit_item = f\"Citation: {cit_list} ({year}) {item['title']}. \" \\\n f\"Bernstein Conference {year}.{doi_item}\"\n\n print(f\"{copy_item}\\n{cit_item}\")", "def book_printer(book_type):\n\n # books.csv = [title,author,year,ID,book_type]\n with open('books.csv', 'r') as book_base:\n book_list = csv.reader(book_base)\n next(book_list)\n\n for book_data in book_list:\n if book_data[-1] == book_type:\n print(book_data)\n ID = book_data[-2]\n if_rented(ID)\n print('\\n')\n return", "def main(args):\n input_file = args[1]\n output_occupations = args[2]\n output_states = args[3]\n\n print(\"Analyzing input file:\")\n summary = process_data.Summary(input_file)\n print(\"Reading input data\")\n summary.read_file()\n\n print(\"Computing summaries\")\n occupations = summary.get_results(input_format.Concept.SOC_NAME)\n states = summary.get_results(input_format.Concept.WORK_STATE)\n\n print(\"Writing results\")\n occupations.to_file(output_occupations)\n states.to_file(output_states)", "def import_observations(self):\n\n fn = QFileDialog(self).getOpenFileName(self, \"Choose a eMOC project file\", \"\",\n \"Project files (*.eMOC);;All files (*)\")\n fileName = fn[0] if type(fn) is tuple else fn\n\n if self.projectFileName and fileName == self.projectFileName:\n QMessageBox.critical(None, programName,\n \"This project is already open\", QMessageBox.Ok | QMessageBox.Default,\n QMessageBox.NoButton)\n return\n\n if fileName:\n try:\n fromProject = json.loads(open(fileName, \"r\").read())\n except:\n QMessageBox.critical(self, programName, \"This project file seems corrupted\")\n return\n\n # transform time to decimal\n fromProject = convert_time_to_decimal(fromProject) # function in utilities.py\n\n dbc = dialog.ChooseObservationsToImport(\"Choose the observations to import:\",\n sorted(list(fromProject[OBSERVATIONS].keys())))\n\n if dbc.exec_():\n\n selected_observations = dbc.get_selected_observations()\n if selected_observations:\n flagImported = False\n\n # set of behaviors in current projet ethogram\n behav_set = set([self.pj[ETHOGRAM][idx][\"code\"] for idx in self.pj[ETHOGRAM]])\n\n # set of subjects in current projet\n subjects_set = set([self.pj[SUBJECTS][idx][\"name\"] for idx in self.pj[SUBJECTS]])\n\n for obsId in selected_observations:\n\n # check if behaviors are in current project ethogram\n new_behav_set = set(\n [event[EVENT_BEHAVIOR_FIELD_IDX] for event in fromProject[OBSERVATIONS][obsId][EVENTS]\n if event[EVENT_BEHAVIOR_FIELD_IDX] not in behav_set])\n if new_behav_set:\n diag_result = dialog.MessageDialog(programName,\n (\"Some coded behaviors in <b>{}</b> are\"\n \"not in the ethogram:<br><b>{}</b>\").format(obsId,\n \", \".join(\n new_behav_set)),\n [\"Interrupt import\", \"Skip observation\",\n \"Import observation\"])\n if diag_result == \"Interrupt import\":\n return\n if diag_result == \"Skip observation\":\n continue\n\n # check if subjects are in current project\n new_subject_set = set(\n [event[EVENT_SUBJECT_FIELD_IDX] for event in fromProject[OBSERVATIONS][obsId][EVENTS]\n if event[EVENT_SUBJECT_FIELD_IDX] not in subjects_set])\n if new_subject_set and new_subject_set != {\"\"}:\n diag_result = dialog.MessageDialog(programName,\n (\n \"Some coded subjects in <b>{}</b> are not defined in the project:<br>\"\n \"<b>{}</b>\").format(obsId,\n \", \".join(new_subject_set)),\n [\"Interrupt import\", \"Skip observation\",\n \"Import observation\"])\n\n if diag_result == \"Interrupt import\":\n return\n\n if diag_result == \"Skip observation\":\n continue\n\n if obsId in self.pj[OBSERVATIONS].keys():\n diag_result = dialog.MessageDialog(programName,\n (\"The observation <b>{}</b>\"\n \"already exists in the current project.<br>\").format(\n obsId),\n [\"Interrupt import\", \"Skip observation\",\n \"Rename observation\"])\n if diag_result == \"Interrupt import\":\n return\n\n if diag_result == \"Rename observation\":\n self.pj[OBSERVATIONS][\"{} (imported at {})\".format(obsId,\n datetime_iso8601()\n )] = dict(\n fromProject[OBSERVATIONS][obsId])\n flagImported = True\n else:\n self.pj[OBSERVATIONS][obsId] = dict(fromProject[OBSERVATIONS][obsId])\n flagImported = True\n\n if flagImported:\n QMessageBox.information(self, programName, \"Observations imported successfully\")", "def main():\n check_input(sys.argv[0])\n infile = sys.argv[1]\n seqs = []\n for _, seq in read_fasta(infile):\n seqs.append(seq)\n profile = generate_profile(seqs)\n print(get_consensus(profile))\n print_profile(profile)", "def testing_centers(self):\n userstate = input(\"What state are you located in (Abbreviation): \").upper()\n usercity = input(\"What city are you located in? (No Abbreviation): \").upper()\n\n df = pd.read_csv(\"HHS_Provider_Relief_Fund.csv\")\n df3 = df[df[\"State\"] == userstate]\n df5 = df3[df3[\"City\"] == usercity]\n listvals = df5.values\n for item in listvals:\n print(f\"\\n----------\\nName: {item[0]}\\nState: {item[1]}\\nCity: {item[2]}\")\n\n with open(\"states.json\", \"r\", encoding=\"utf-8\") as file:states = json.load(file)\n userstate = states[userstate.upper()]\n\n with open(\"covid_data.csv\", \"r\", encoding=\"utf-8\") as file:\n csv = file.readlines()[3:]\n columns = csv[0].split(\",\")\n csv = csv[1:]\n for row in csv:\n if row.split(\",\")[0].lower() == userstate.lower():\n for item in range(len(row.split(\",\"))):\n print(columns[item].replace(\"\\n\", \"\") + \": \" + row.split(\",\")[item])" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Keep only notNaN column positions in all arrays.
def drop_nan_columns(arrays): # Keep all column indices not_nan_filter = ones(len(arrays[0]), dtype=bool) # Currently keeping all columns! # Keep column indices without missing value in all arrays # for a in arrays: # not_nan_filter &= ~isnan(a) return [a[not_nan_filter] for a in arrays]
[ "def columns_with_na_values(data):\n aux = data.isna().sum() > 0\n return aux.index[aux.values].values", "def remove_nans(coords):\n s = np.apply_along_axis(sum,1,np.isnan(coords[1])) == 0\n coords[0] = (np.asarray(coords[0])[s]).tolist()\n coords[1] = coords[1][s,:]", "def remove_nans(arr):\n not_nan = [i for i in range(len(arr)) if not np.isnan(arr[i])]\n\n return not_nan, arr[not_nan]", "def __where_not_nan(arr: np.ndarray):\n return np.where(np.isfinite(arr))", "def rm_nan(arr):\n return arr[np.isfinite(arr)].flatten()", "def locate_nan_rows(arr):\n # Count the number of NaNs in each row\n nan_counts = np.sum(~np.isfinite(arr), axis=1)\n # Trigger on a NaN appearing anywhere in a line/row\n nans, = np.where(nan_counts > 1)\n return frozenset(nans)", "def drop_nan(*xs):\n if len(xs) == 1 and type(xs[0]) == np.ndarray and len(xs[0].shape) > 1:\n xs = xs[0]\n f = lambda x: ~np.isnan(x)\n idx = np.bitwise_and.reduce(np.apply_along_axis(f, 1, xs), 0)\n return np.apply_along_axis(lambda x: x[idx], 1, xs)", "def np_dropnan(x):\n return x[np.logical_not(np.isnan(x))]", "def remove_nan(ps):\n\n mask = np.equal(ps[\"cmagpsf\"], ps[\"cmagpsf\"])\n\n return [np.array(_col)[mask].astype(type(_col[0])) for _col in ps]", "def nonans(array):\n return array[~np.isnan(array)]", "def purgeNanEveryWhere(df):\n #Row-wise dropping\n toDrop = np.array([])\n for i in range(df.shape[0]):\n if( np.sum ( pd.isnull(df.iloc[i]) ) == df.shape[1]-1 ):\n toDrop= np.append(toDrop,i)\n df.drop(df.index[toDrop.astype(int)],inplace=True) \n #Column-wise dropping\n for col in df.columns:\n arr = pd.notnull(df[col])\n nnan = np.sum(arr) \n if (nnan == df.shape[1]):\n df.drop(col,inplace=True,axis=1)\n return df", "def filter_not_nans(f):\n h = f.shape[0]\n w = f.shape[1]\n no_nans = []\n for r in range(h):\n for c in range(w):\n pixel = f[r][c]\n if np.isnan(pixel[0]) or np.isnan(pixel[1]):\n no_nans.append((r,c))\n return no_nans", "def _non_zero_columns_search(array):\n col_num = array.shape[1]\n non_zero_col = CArray([], dtype=int)\n for c in range(col_num):\n col = array[:, c]\n if col.any() == True:\n non_zero_col = non_zero_col.append(c)\n\n return non_zero_col", "def remove_null_cols(df, thresh=0.08):\n \n # look at this\n # df.dropna(thresh=int(df.shape[0] * .9), axis=1)\n pct_null = df.isnull().sum() / len(df)\n missing_features = pct_null[pct_null > thresh].index\n return df.drop(missing_features, axis=1)", "def clean(Z):\n columncounts = np.sum(Z, axis=0)\n nzc = np.where(columncounts>0)[0]\n retZ = Z[:, nzc]\n\n return retZ", "def pruneNansFromTable(table):\n nan_in_row = np.zeros(len(table), dtype=np.bool)\n for col in table.colnames:\n nan_in_row |= np.isnan(table[col])\n \n return table[~nan_in_row]", "def _nan_cells(traces):\n # Find all cells with NaNs\n nancells = []\n ncells = -1\n for cs in traces:\n if len(traces[cs]) > 0:\n ncells = np.shape(traces[cs])[1]\n ns = np.sum(np.sum(np.invert(np.isfinite(\n traces[cs])), axis=2), axis=0)\n vals = np.arange(ncells)\n nancells.extend(vals[ns > 0])\n\n # Set _mask_cells if it hasn't been set\n out = np.zeros(ncells, dtype=bool)\n\n # Convert nancells to a list of good cells\n nancells = np.array(list(set(nancells)))\n if len(nancells) > 0:\n print('Warning: %i cells have NaNs'%len(nancells))\n out[nancells] = True\n\n return out", "def get_full_column(X: np.ndarray):\n if len(X.shape) == 1:\n X = X.reshape((1, *X.shape))\n inds = np.arange(X.shape[1])\n wherenonnan = np.isfinite(X).all(axis=0)\n ind = inds[wherenonnan][0]\n return ind", "def get_nan_columns(dataframe):\n\n names = dataframe.columns.tolist()\n nans = []\n\n for column in dataframe:\n nan_count = len(dataframe[column]) - dataframe[column].count()\n if nan_count > 0:\n nans.append(nan_count)\n else:\n names.remove(column)\n return (names, nans)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Drop slice that contains only value from df.
def drop_uniform_slice_from_dataframe(df, value, axis=0): if axis == 0: dropped = (df == value).all(axis=0) if any(dropped): print('Removed {} column index(ices) whose values are all {}.'. format(dropped.sum(), value)) return df.ix[:, ~dropped] elif axis == 1: dropped = (df == value).all(axis=1) if any(dropped): print('Removed {} row index(ices) whose values are all {}.'.format( dropped.sum(), value)) return df.ix[~dropped, :]
[ "def keep_only_values(data, col, value):\n \n total = list(data[col].unique())\n invalid = set(total) - set(value)\n for val in invalid:\n data = data[data[col] != val]\n\n return data", "def _filter_depth_df(self, df):\n df = df.loc[self.depth_from:self.depth_to]\n if len(df) == 0:\n return df\n if (self.depth_from == df.index[0]) and (self.depth_to == df.index[-1]):\n # See __getitem__ docstring for an explanation of this behaviour\n df.drop(df.index[-1], inplace=True)\n return df", "def remove(df, pattern):\n return df[~df.index.isin(df.query(pattern).index)]", "def drop_transafers(df):\n return df.filter(~(df.valor == 0))", "def filter(self):\n self.data = self.data.loc[~self.data.isnull().any(1),:]", "def remove_rows_without_feature(df, feature):\n return df[np.isfinite(df[feature])]", "def drop_excluded(self):\n for value in self.exclude_values:\n self.data.drop(self.data.index[self.data == value], inplace=True)", "def df_cleaner(df):\n return df.dropna()", "def _rm_empty_cols(df: pd.DataFrame) -> pd.DataFrame:\n n_distinct = df.nunique()\n return df.drop( n_distinct[ n_distinct <= 1].index, axis=1)", "def null_removal_drop(dataframe, colname):\n\n dataframe = dataframe.dropna(subset=[colname])\n\n return dataframe", "def remove_not_available(df):\n drop_indices = df.index[df['genre'] == 'Not Available'].tolist()\n df = df.drop(drop_indices)\n return df", "def clean(df):\n return list(filter(lambda r: None not in r.values(), df))", "def remove_nan(self, dataframe):\n return dataframe.dropna()", "def drop_zero_pay(df):\n df = df.loc[df.payann > 0]\n return df", "def get_subtable(df, col, val) -> pd.DataFrame:\r\n return df[df[col] == val].drop(columns=col)", "def drop_illogical(df,var1,var2):\r\n #Mask the illogical entries\r\n mask = df[var1]>df[var2]\r\n #Record the number of entries\r\n NumRecords = df.shape[0]\r\n #drop the illogical entries\r\n df = df[df.keys()][~mask]\r\n #Notify the user how many records were dropped\r\n print('{} records dropped because {} is greater than {}'.format(NumRecords-df.shape[0],var1,var2))\r\n \r\n return df", "def trim (df, threshold):\n x = df.copy()\n x[np.abs(x)<threshold] = 0\n return x", "def delete_entries(df, column, values):\n for val in values:\n dropindex = df[df[column] == val].index\n df.drop(index = dropindex, inplace = True)", "def filter_rows(col, value):\n\n def filterer(data):\n return data.loc[data[col] != value]\n\n return filterer" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Split df into n_split blocks (by row).
def split_dataframe(df, n_split, axis=0): # TODO: implement axis logic if df.shape[0] < n_split: raise ValueError( 'n_split ({}) can\'t be greater than the number of rows ({}).'. format(n_split, df.shape[0])) elif n_split <= 0: raise ValueError('n_split ({}) can\'t be less than 0.'.format(n_split)) n = df.shape[0] // n_split splits = [] for i in range(n_split): start_i = i * n end_i = (i + 1) * n splits.append(df.iloc[start_i:end_i, :]) i = n * n_split if i < df.shape[0]: splits.append(df.ix[i:]) return splits
[ "def split_df(df, n_chunks):\n chunk_size = int(np.ceil(df.shape[0] / n_chunks))\n assert n_chunks * chunk_size >= df.shape[0]\n chunks = []\n for i in range(0, df.shape[0], chunk_size):\n chunks.append(df[i:i + chunk_size])\n assert len(chunks) == n_chunks\n return chunks", "def chunk_df(df, n):\n for i in range(0, df.shape[0], n):\n yield df[i:i + n]", "def split_df(df, n_split, axis=0):\n\n if df.shape[axis] < n_split:\n raise ValueError('Number of slices ({}) < n_split ({})'.format(\n df.shape[axis], n_split))\n elif n_split <= 0:\n raise ValueError('n_split ({}) <= 0'.format(n_split))\n\n n = df.shape[axis] // n_split\n\n list_ = []\n\n for i in range(n_split):\n start_i = i * n\n end_i = (i + 1) * n\n\n if axis:\n list_.append(df.iloc[:, start_i:end_i])\n else:\n list_.append(df.iloc[start_i:end_i, :])\n\n # Get leftovers (if any)\n i = n * n_split\n if i < df.shape[axis]:\n\n if axis:\n list_.append(df.iloc[:, i:])\n else:\n list_.append(df.iloc[i:, :])\n\n return list_", "def split_dataframe(df, size=10*1024*1024):\n \n # size of each row\n row_size = df.memory_usage().sum() / len(df)\n # maximum number of rows in each segment\n row_limit = int(size // row_size)\n # number of segments\n seg_num = (len(df)+row_limit-1)//row_limit\n # split df into segments\n segments = [df.iloc[i*row_limit : (i+1)*row_limit] for i in range(seg_num)]\n\n return segments", "def split_fold(df, n_fold=5):\n kf = KFold(n_splits=n_fold, shuffle=True, random_state=11)\n df_folds = list()\n\n for i, result in enumerate(kf.split(df)):\n sub_df = df.loc[result[1]]\n sub_df['fold'] = i\n df_folds.append(sub_df)\n\n df = pd.concat(df_folds).sort_index()\n \n return df", "def split_df(dfm, chunk_size):\n indices = index_marks(dfm.shape[0], chunk_size)\n return np.array_split(dfm, indices)", "def _fast_split_df(g_df):\n\n # TODO (#184): speed up when user doesn't need an index\n # right now, this is essentially a copy of\n # pandas.core.groupby.ops.DataSplitter.__iter__\n from pandas._libs import lib\n splitter = g_df.grouper._get_splitter(g_df.obj)\n\n starts, ends = lib.generate_slices(splitter.slabels, splitter.ngroups)\n\n # TODO: reset index\n sdata = splitter._get_sorted_data()\n\n # TODO: avoid costly make_block call, and hard-coded BlockManager init actions.\n # neither of these things is necessary when subsetting rows.\n for start, end in zip(starts, ends):\n yield splitter._chop(sdata, slice(start, end))", "def getKSplits(df, n_splits, seed = None):\n\n result = []\n\n # None random seed is same as not setting it\n df_shuffled = df.sample(len(df), random_state = seed)\n\n fold_size = int(len(df) / n_splits)\n\n for i in range(n_splits):\n if i == n_splits - 1: # last iteration\n df_fold = df_shuffled[fold_size * (i): len(df)] # gets remainder\n else:\n df_fold = df_shuffled[fold_size * (i):fold_size * (i + 1) ] # python starts indexing at 0\n result.append(df_fold)\n\n return result", "def dataFrameSplit(df, norec=1000000, outfile= None):\n # calculation of the no. of rows of the dataframe\n df_rsz = len(df.index)\n if df_rsz>norec:\n no_splits = np.ceil(df_rsz/norec)\n dfarr = np.array_split(df,no_splits)\n return dfarr\n else:\n print(\"The dataframe doesn't have sufficient records\")\n \n # printing to disk when \n if outfile!=None:\n i=0\n for arr in dfarr:\n arr.to_csv(\"D:\\\\ddf\"+str(i+1)+\".csv\",encoding='utf-8', index=False,\n header=False)\n i = i+1", "def split_dataset(dataset_df, a=0, b=30, step_size=5):\n\t\n\tend_b = dataset_df.shape[0] \n\t\n\tdataset_splits = []\n\t\n\t# Take a 30 period window of dataset_df, with a step size of 5\n\twhile b < end_b:\n\n\t\twindow = dataset_df.iloc[a:b, :]\n\t\tdataset_splits.append(window)\n\t\t\n\t\ta += step_size\n\t\tb += step_size\n\t# dataset_splits = dataset_splits[:len(dataset_splits)-5] # remove last 5 element since we predict price t+5\n\treturn dataset_splits", "def _partition_pandas_dataframe(df, num_partitions=None, row_chunksize=None):\n if num_partitions is not None:\n row_chunksize = len(df) // num_partitions \\\n if len(df) % num_partitions == 0 \\\n else len(df) // num_partitions + 1\n else:\n assert row_chunksize is not None\n\n temp_df = df\n\n row_partitions = []\n while len(temp_df) > row_chunksize:\n t_df = temp_df[:row_chunksize]\n # reset_index here because we want a pandas.RangeIndex\n # within the partitions. It is smaller and sometimes faster.\n t_df.reset_index(drop=True, inplace=True)\n t_df.columns = pandas.RangeIndex(0, len(t_df.columns))\n top = ray.put(t_df)\n row_partitions.append(top)\n temp_df = temp_df[row_chunksize:]\n else:\n # Handle the last chunk correctly.\n # This call is necessary to prevent modifying original df\n temp_df = temp_df[:]\n temp_df.reset_index(drop=True, inplace=True)\n temp_df.columns = pandas.RangeIndex(0, len(temp_df.columns))\n row_partitions.append(ray.put(temp_df))\n\n return row_partitions", "def greedy_split(arr, n, axis=0):\n length = arr.shape[axis]\n # compute the size of each of the first n-1 blocks\n block_size = int(np.ceil(length / float(n)))\n # the indices at which the splits will occur\n ix = np.arange(block_size, length, block_size)\n return np.array(np.split(arr, ix, axis))", "def split_in_batches(self, data: DataFrame, number_of_batches):\n LOGGER.info(\"Splitting the data in batches\")\n\n number_of_rows = data.shape[0]\n list_of_dfs = []\n\n for i in range(number_of_batches):\n start_index = (i * number_of_rows) // number_of_batches\n end_index = ((i + 1) * number_of_rows) // number_of_batches\n batch = data.iloc[start_index:end_index]\n list_of_dfs.append(batch)\n\n LOGGER.info(\"Done splitting the data in batches\")\n\n return list_of_dfs", "def train_test_split(df, frac):\n\n train_split = df[: (int(len(df) * frac))]\n test_split = df[(int(len(df) * frac)):]\n return train_split, test_split", "def row_split(items, n):\n for i in range(0, len(items), n):\n yield items[i : i + n]", "def splitInBlocks (l, n):\n k = len(l) / n\n r = len(l) % n\n\n i = 0\n blocks = []\n while i < len(l):\n if len(blocks)<r:\n blocks.append(l[i:i+k+1])\n i += k+1\n else:\n blocks.append(l[i:i+k])\n i += k\n\n return blocks", "def iter_chunks(df):\n df_chunk = []\n label_chunk = []\n index_chunk =[]\n for idx,row in df.iterrows():\n for chunk in row['text_split']: ##goes over every chunk in a the list for each transcript\n df_chunk.append(chunk) ## append chunk to list\n label_chunk.append(row[1]) ##append label of chunk/transcript\n index_chunk.append(idx) ## append index of transcript\n return df_chunk, label_chunk, index_chunk", "def split(f, num_blocks):\r\n blocks = []\r\n copy_f = list(f) # copy f so we don't ruin it!!!!!!!!\r\n while len(copy_f) % num_blocks != 0:\r\n copy_f.append(0)\r\n block_length = len(copy_f) // num_blocks\r\n index = 0\r\n while index + block_length < len(copy_f):\r\n blocks.append(copy_f[index:index+block_length])\r\n index += block_length\r\n blocks.append(copy_f[index:])\r\n return blocks", "def split(self):\n\n ratio_c = 1 - self.ratio\n self.train, self.test = self.df.randomSplit([self.ratio, ratio_c], seed=12345)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
decorator to register a babel cli handler.
def babel_cli_handler(**options): def decorator(cls): """ decorates the given class and registers an instance of it into available babel cli handlers. :param BabelCLIHandlerBase cls: babel cli handler class. :returns: babel cli handler class. :rtype: BabelCLIHandlerBase """ instance = cls() babel_services.register_cli_handler(instance, **options) return cls return decorator
[ "def decorator(cls):\n\n instance = cls()\n babel_services.register_cli_handler(instance, **options)\n\n return cls", "def __init__(self):\n\n super().__init__(BabelCLIHandlersEnum.INIT)", "def register_command(*parse_args, **options):\n def wrapper(function):\n function._is_command = True\n return function\n return wrapper", "def __init__(self):\n\n super().__init__(BabelCLIHandlersEnum.COMPILE)", "def register(cli):\n cli_node = PASSW_HARDENING\n if cli_node.name in cli.commands:\n raise Exception(f\"{cli_node.name} already exists in CLI\")\n cli.add_command(PASSW_HARDENING)", "def register_cli_commands(app):\n app.cli.add_command(init_events_command)", "def shell_command(cmd_name):\n def inner(func):\n @wraps(func)\n def wrapper(*args, **kwargs):\n func.trac_method = cmd_name\n return func(*args, **kwargs)\n return wrapper\n return inner", "def _register_cli(self) -> None:\n api_cli = APICli()\n api_cli.coresys = self.coresys\n\n self.webapp.add_routes(\n [\n web.get(\"/cli/info\", api_cli.info),\n web.get(\"/cli/stats\", api_cli.stats),\n web.post(\"/cli/update\", api_cli.update),\n ]\n )", "def decorator(fn: Callable) -> Callable:\n\n def wrapped(space: FungeSpace) -> None:\n \"\"\"Run instruction, if return value push value to stack\"\"\"\n # print(fn.__name__)\n if (res := fn(space)) is not None:\n space.stack.push(res)\n\n _command_routes.update(dict.fromkeys(chars, wrapped))\n return wrapped", "def register_command(func):\n supported_commands.append(func.__name__)\n return func", "def register_babel(app: Flask):\n BABEL.init_app(app)", "def add_run_hook(h):\n add_hook(run, h)", "def loadhook(h):\r\n def processor(handler):\r\n h()\r\n return handler()\r\n \r\n return processor", "def register(command, command_dict):\n def decorator(func):\n command_dict[command] = func\n return func\n return decorator", "def register_commands(app: Flask):\n\n app.cli.add_command(db_cli)\n\n @app.cli.command(\n \"pip-compile\",\n context_settings=dict(\n ignore_unknown_options=True,\n allow_extra_args=True,\n help_option_names=[],\n ),\n )\n @click.pass_context\n @dev_only\n def pip_compile(ctx: click.Context):\n \"\"\"Compile the .in files in /requirements.\n\n This command is for development purposes only.\n \"\"\"\n import subprocess\n\n if len(ctx.args) == 1 and ctx.args[0] == \"--help\":\n subprocess.call([\"pip-compile\", \"--help\"])\n else:\n req_files = [\n \"requirements/dev_unix.in\",\n \"requirements/dev_windows.in\",\n \"requirements/prod.in\",\n \"requirements/docs.in\",\n ]\n for filename in req_files:\n subprocess.call([\"pip-compile\", filename, *ctx.args])\n\n @app.cli.command(\"scrape\")\n def scrape_command():\n \"\"\"Scrape from public data into the database.\n\n This is a handy way to populate the database to start with publicly\n available data.\n \"\"\"\n pass", "def subcommand(f):\n SUBCOMMANDS[f.__name__] = f\n return f", "def _activate_helper(smuggle_func, parser_func):\n ipy_shell = config._ipython_shell\n smuggle_transformer = StatelessInputTransformer.wrap(parser_func)\n # noinspection PyDeprecation\n splitter_xforms = ipy_shell.input_splitter.python_line_transforms\n manager_xforms = ipy_shell.input_transformer_manager.python_line_transforms\n\n if not any(t.func is parser_func for t in splitter_xforms):\n splitter_xforms.append(smuggle_transformer())\n\n if not any(t.func is parser_func for t in manager_xforms):\n manager_xforms.append(smuggle_transformer())\n\n # insert \"smuggle\" into notebook namespace\n ipy_shell.user_ns['smuggle'] = smuggle_func", "def add_handler(self, handler):\n pass", "def mod_command_handler(self, cmd, args):\n self.command_handler_params = (cmd, args) # for inspection\n return bundy.config.create_answer(0)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
decorates the given class and registers an instance of it into available babel cli handlers.
def decorator(cls): instance = cls() babel_services.register_cli_handler(instance, **options) return cls
[ "def babel_cli_handler(**options):\n\n def decorator(cls):\n \"\"\"\n decorates the given class and registers an instance\n of it into available babel cli handlers.\n\n :param BabelCLIHandlerBase cls: babel cli handler class.\n\n :returns: babel cli handler class.\n :rtype: BabelCLIHandlerBase\n \"\"\"\n\n instance = cls()\n babel_services.register_cli_handler(instance, **options)\n\n return cls\n\n return decorator", "def _class_wrapper(command_class):\n WebBot().register_command(command_class)\n return command_class", "def register(cls):\n register(cls, cls.provided_class)", "def register(cls, class_):\n cls._registered[class_.tag()] = class_", "def register(cls, class_to_register):\n cls.registered_loaders.append(class_to_register)\n return class_to_register", "def plugin_class(cls):\r\n if isinstance(cls, basestring):\r\n context = cls\r\n\r\n def wrapper(cls):\r\n setattr(cls, CLASS_MARKER, context)\r\n return cls\r\n return wrapper\r\n\r\n elif inspect.isclass(cls):\r\n setattr(cls, CLASS_MARKER, True)\r\n return cls", "def _class(self, _class):\n\n self.__class = _class", "def implementation(cls):\n\n _register_as(cls, cls.__name__)", "def component_class(cls):\r\n if isinstance(cls, basestring):\r\n context = cls\r\n\r\n def wrapper(cls):\r\n setattr(cls, CLASS_MARKER, context)\r\n return cls\r\n return wrapper\r\n\r\n elif inspect.isclass(cls):\r\n setattr(cls, CLASS_MARKER, True)\r\n return cls", "def patch_class(cls):\n def _wrap_fun(wrapper, original):\n def wrapped(self, *args, **kwargs):\n return wrapper(self, original, *args, **kwargs)\n return wrapped\n def _wrap_cls(patch):\n for func in dir(patch):\n if not func.startswith('__'):\n wrapper = getattr(patch, func)\n original = getattr(cls, func, None)\n if original:\n setattr(cls, func, _wrap_fun(wrapper, original))\n else:\n setattr(cls, func, wrapper)\n return patch\n return _wrap_cls", "def register(cls):\n activations[cls.__name__] = cls\n\n return cls", "def class_message_handler(\n self,\n *custom_filters,\n commands=None,\n regexp=None,\n content_types=None,\n state=None,\n run_task=None,\n **kwargs\n ):\n\n def decorator(class_):\n handler = class_()\n\n self.register_message_handler(\n handler.callback,\n *custom_filters,\n commands=commands,\n regexp=regexp,\n content_types=content_types,\n state=state,\n run_task=run_task,\n **kwargs\n )\n return class_\n\n return decorator", "def set_handler (self, handler_class):\n h = self.handler\n self.handler = handler_class(h.parent, h.prefix, h.args)", "def __call__(self, cls: object, *args: Any, **kwargs: dict) -> Callable:\n self.__class__.__name__ = cls.__name__\n\n class Wrapped(cls, *args, **kwargs):\n \"\"\"\n Wrap Decorated Callable, Get Configurations and Create required\n attributes in the Decorated Callable. Finally Call the Callable\n :param cls: The Class Object to being decorated\n :param args: Additional arguments\n :param kwargs: Additional Keyword arguments\n :return: The Wrapped Callable\n \"\"\"\n config = self.config\n\n def __init__(self, *args: Any, **kwargs: dict):\n \"\"\"\n Override Classes __init__ method. Create the Configuration\n attributes. Call the Decorated class __init__ method.\n :param cls: The Class Object to being decorated\n :param args: Additional arguments\n :param kwargs: Additional Keyword arguments\n \"\"\"\n for key in self.config.keys():\n if isinstance(self.config[key], dict):\n setattr(self, key, Map(self.config[key]))\n else:\n setattr(self, key, self.config[key])\n cls.__init__(self, *args, **kwargs)\n\n return Wrapped", "def decorate_class(cls, klass: type, decorate_subclasses=False, **setting_kwds) -> None:\n assert isinstance(klass, type) # in \"debug\" mode only\n if not isinstance(klass, type): # in either mode, have the same awareness at the same time\n return\n\n # Filter out builtins.\n if not get_file_of_object(klass):\n return\n\n def _deco_class(kls: type):\n t = cls(**setting_kwds)\n _ = t(kls)\n # assert _ == kls\n\n def _deco_class_rec(kls: type):\n _deco_class(kls)\n for subclass in kls.__subclasses__():\n _deco_class_rec(subclass)\n\n if decorate_subclasses:\n _deco_class_rec(klass)\n else:\n _deco_class(klass)\n # (_deco_class_rec if decorate_subclasses else _deco_class)(klass)", "def with_function_style(class_):\n obj = class_()\n\n def wrapper(*args, **kwargs):\n return obj(*args, **kwargs)\n return wrapper", "def register_command(self, command_class):\n cmd_parser = self.subparsers.add_parser(\n command_class.get_name(),\n help=command_class.get_help(),\n )\n\n self.register_options(cmd_parser)\n command_class.register_options(cmd_parser)\n cmd_parser.set_defaults(command=command_class)", "def _register(cls):\r\n command_name = cls.__dict__.get('__command__', None)\r\n if command_name:\r\n Command._commands[command_name] = cls", "def classdispatch(func):\n dispatcher = singledispatch(func)\n\n def wrapper(*args, **kw): # pylint: disable=missing-docstring\n return dispatcher.dispatch(args[0])(*args, **kw)\n\n wrapper.register = dispatcher.register\n update_wrapper(wrapper, func)\n return wrapper" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return a new (unsaved) shareditem object. Does not set any of the fields that would come from the Request object (i.e. ``user``).
def get_shared_object(self): if not self.is_valid(): raise ValueError("get_shared_object may only be called on valid forms") new = SharedItem( object_id = force_unicode(self.target_object._get_pk_val()), content_type = ContentType.objects.get_for_model(self.target_object), share_date = datetime.datetime.now(), ) return new
[ "def get_shared_object(self):\n if not self.is_valid():\n raise ValueError(\"get_shared_object may only be called on valid forms\")\n\n new = SharedItem(\n content_type = ContentType.objects.get_for_model(self.target_object),\n object_id = force_unicode(self.target_object._get_pk_val()),\n share_date = datetime.datetime.now(),\n )\n \n return new", "def fromSharedItem(cls, sharedItem):\n localpart = None\n for (localpart, domain) in userbase.getAccountNames(sharedItem.store):\n break\n if localpart is None:\n raise NoSuchShare()\n for share in sharedItem.store.query(Share,\n Share.sharedItem == sharedItem):\n break\n else:\n raise NoSuchShare()\n return cls(\n shareID=share.shareID,\n localpart=localpart, domain=domain)", "def create_item(self, item):\n item_dict = {\n 'project_id': item.project_id,\n 'from_user_id': item.from_user_id,\n 'to_user_id': item.to_user_id,\n 'role': item.auth_role,\n 'user_message': item.user_message\n }\n if item.share_user_ids:\n item_dict['share_user_ids'] = item.share_user_ids\n data = json.dumps(item_dict)\n resp = requests.post(self.make_url(item.destination), headers=self.json_headers, data=data)\n self.check_response(resp)\n return resp", "def shareItem(self, sharedItem, shareID=None, interfaces=ALL_IMPLEMENTED):\n if shareID is None:\n shareID = genShareID(sharedItem.store)\n return Share(store=self.store,\n shareID=shareID,\n sharedItem=sharedItem,\n sharedTo=self,\n sharedInterfaces=interfaces)", "def create_item(self, user: User, **kwargs) -> None:", "def item_shared(self, item):\n self.update_item(item)", "def cloneItemOnly( self, parent ):\n o_item = self.__class__( parent, self.o_data )\n\n return o_item", "def _add_item(request, item_type, new_item):\n item_set = '%s_set' % item_type\n\n if request.user.is_authenticated():\n item_set = getattr(request.user, item_set)\n items = item_set.filter(**new_item)\n\n if items:\n return False\n else:\n item_set.create(**new_item)\n return True\n else:\n items = request.session.get(item_set, [])\n new_item = new_item.values()[0]\n\n if new_item in items:\n return False\n else:\n items.append(new_item)\n request.session[item_set] = items\n return True", "def share(self, request):\n try:\n article = self.get_object()\n except PermissionDenied as pd:\n return Response({'error': str(pd)})\n\n article.shared_by.add(request.user)\n return Response({'message': '\"{}\" is shared'.format(article.title)})", "def get_or_create_item(item_name):\n try:\n item = Item.objects.get(name=item_name)\n except Item.DoesNotExist:\n item = Item()\n item.name = item_name\n item.save()\n return item", "def copy(self):\n new = object.__new__(type(self))\n new.banner_id = self.banner_id\n new.cover_sticker_id = self.cover_sticker_id\n new.description = self.description\n new.id = 0\n new.name = self.name\n new.sku_id = self.sku_id\n stickers = self.stickers\n if (stickers is not None):\n stickers = frozenset(iter(stickers))\n new.stickers = stickers\n return new", "def new(cls, user_id, **kwargs):\n return Dropbox.get_or_insert(user_id, **kwargs)", "def InitializeMasterItem(self):\n data = copy.deepcopy(self.game.data['game']['master_item'])\n \n return data", "def create_rehashed_item_object(wow_api_item):\n return {\n \"id\": wow_api_item.get(\"id\"),\n \"name\": wow_api_item.get(\"name\"),\n \"icon\": wow_api_item.get(\"icon\"),\n \"is_stackable\": False if wow_api_item.get(\"stackable\") == 1 else True,\n \"item_class\": wow_api_item.get(\"itemClass\"),\n \"item_sub_class\": wow_api_item.get(\"itemSubClass\")\n }", "def get_new_instance():\n SharedDict.__instance = None\n return SharedDict()", "def _get_identical_object(self, request):\n filters = {}\n for field in self._meta.fields:\n if not field.name.startswith('_') and \\\n not field.name.endswith('_ptr'):\n # Add this key, value pair to a dict of lookup values\n filters[field.name] = getattr(self, field.name)\n\n # Add the user to the filter perms\n filters['submitted_by'] = request.user\n\n # Only look at items that were submitted in the last two minutes\n filters.pop('submitted_time')\n filters['submitted_time__gte'] = datetime.now() - timedelta(minutes=2)\n\n # If an identical object already exists in the database, return the first existing one\n identical_objects = self.__class__.objects.status(live=True).filter(**filters)\n\n if len(identical_objects):\n return identical_objects[0]\n return None", "async def create_or_replace(self, item_id, item: dict) -> object:\n raise NotImplementedError()", "def _create_or_update_packinglistitem(self, item_identifier, item, user, optional_attrs={}):\n try:\n packing_list_item = self.packing_list.packing_list_item_model.objects.get(\n packing_list=self.packing_list,\n item_reference=item_identifier)\n except self.packing_list.packing_list_item_model.DoesNotExist:\n try:\n optional_description = item.optional_description or ''\n except AttributeError:\n optional_description = None\n options = {\n 'requisition': item._meta.verbose_name,\n 'item_description': '{subject_identifier} ({initials}) VISIT:{visit} DOB:{dob} {optional}'.format(\n subject_identifier=item.registered_subject.subject_identifier,\n initials=item.registered_subject.initials,\n visit=item.visit_code,\n dob=item.registered_subject.dob,\n optional=optional_description,\n ),\n 'user_created': user,\n }\n options.update(**optional_attrs)\n packing_list_item = self.packing_list.packing_list_item_model.objects.create(\n packing_list=self.packing_list,\n item_reference=item_identifier,\n **options)\n return packing_list_item", "def new_or_get(self, request):\n cart_id = request.session.get('cart_id', None)\n qs = self.get_queryset().filter(id=cart_id)\n if qs.count() == 1:\n new_obj = False\n cart_obj = qs.first()\n if request.user.is_authenticated() and cart_obj.user is None:\n cart_obj.user = request.user\n cart_obj.save()\n else:\n cart_obj = Cart.objects.new(user=request.user)\n new_obj = True\n request.session['cart_id'] = cart_obj.id\n return cart_obj, new_obj" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Return a new (unsaved) shareditem object. Does not set any of the fields that would come from the Request object (i.e. ``user``).
def get_shared_object(self): if not self.is_valid(): raise ValueError("get_shared_object may only be called on valid forms") new = SharedItem( content_type = ContentType.objects.get_for_model(self.target_object), object_id = force_unicode(self.target_object._get_pk_val()), share_date = datetime.datetime.now(), ) return new
[ "def get_shared_object(self):\n if not self.is_valid():\n raise ValueError(\"get_shared_object may only be called on valid forms\")\n\n new = SharedItem(\n object_id = force_unicode(self.target_object._get_pk_val()),\n content_type = ContentType.objects.get_for_model(self.target_object),\n share_date = datetime.datetime.now(),\n )\n \n return new", "def fromSharedItem(cls, sharedItem):\n localpart = None\n for (localpart, domain) in userbase.getAccountNames(sharedItem.store):\n break\n if localpart is None:\n raise NoSuchShare()\n for share in sharedItem.store.query(Share,\n Share.sharedItem == sharedItem):\n break\n else:\n raise NoSuchShare()\n return cls(\n shareID=share.shareID,\n localpart=localpart, domain=domain)", "def create_item(self, item):\n item_dict = {\n 'project_id': item.project_id,\n 'from_user_id': item.from_user_id,\n 'to_user_id': item.to_user_id,\n 'role': item.auth_role,\n 'user_message': item.user_message\n }\n if item.share_user_ids:\n item_dict['share_user_ids'] = item.share_user_ids\n data = json.dumps(item_dict)\n resp = requests.post(self.make_url(item.destination), headers=self.json_headers, data=data)\n self.check_response(resp)\n return resp", "def shareItem(self, sharedItem, shareID=None, interfaces=ALL_IMPLEMENTED):\n if shareID is None:\n shareID = genShareID(sharedItem.store)\n return Share(store=self.store,\n shareID=shareID,\n sharedItem=sharedItem,\n sharedTo=self,\n sharedInterfaces=interfaces)", "def create_item(self, user: User, **kwargs) -> None:", "def item_shared(self, item):\n self.update_item(item)", "def cloneItemOnly( self, parent ):\n o_item = self.__class__( parent, self.o_data )\n\n return o_item", "def _add_item(request, item_type, new_item):\n item_set = '%s_set' % item_type\n\n if request.user.is_authenticated():\n item_set = getattr(request.user, item_set)\n items = item_set.filter(**new_item)\n\n if items:\n return False\n else:\n item_set.create(**new_item)\n return True\n else:\n items = request.session.get(item_set, [])\n new_item = new_item.values()[0]\n\n if new_item in items:\n return False\n else:\n items.append(new_item)\n request.session[item_set] = items\n return True", "def share(self, request):\n try:\n article = self.get_object()\n except PermissionDenied as pd:\n return Response({'error': str(pd)})\n\n article.shared_by.add(request.user)\n return Response({'message': '\"{}\" is shared'.format(article.title)})", "def get_or_create_item(item_name):\n try:\n item = Item.objects.get(name=item_name)\n except Item.DoesNotExist:\n item = Item()\n item.name = item_name\n item.save()\n return item", "def copy(self):\n new = object.__new__(type(self))\n new.banner_id = self.banner_id\n new.cover_sticker_id = self.cover_sticker_id\n new.description = self.description\n new.id = 0\n new.name = self.name\n new.sku_id = self.sku_id\n stickers = self.stickers\n if (stickers is not None):\n stickers = frozenset(iter(stickers))\n new.stickers = stickers\n return new", "def new(cls, user_id, **kwargs):\n return Dropbox.get_or_insert(user_id, **kwargs)", "def InitializeMasterItem(self):\n data = copy.deepcopy(self.game.data['game']['master_item'])\n \n return data", "def create_rehashed_item_object(wow_api_item):\n return {\n \"id\": wow_api_item.get(\"id\"),\n \"name\": wow_api_item.get(\"name\"),\n \"icon\": wow_api_item.get(\"icon\"),\n \"is_stackable\": False if wow_api_item.get(\"stackable\") == 1 else True,\n \"item_class\": wow_api_item.get(\"itemClass\"),\n \"item_sub_class\": wow_api_item.get(\"itemSubClass\")\n }", "def get_new_instance():\n SharedDict.__instance = None\n return SharedDict()", "def _get_identical_object(self, request):\n filters = {}\n for field in self._meta.fields:\n if not field.name.startswith('_') and \\\n not field.name.endswith('_ptr'):\n # Add this key, value pair to a dict of lookup values\n filters[field.name] = getattr(self, field.name)\n\n # Add the user to the filter perms\n filters['submitted_by'] = request.user\n\n # Only look at items that were submitted in the last two minutes\n filters.pop('submitted_time')\n filters['submitted_time__gte'] = datetime.now() - timedelta(minutes=2)\n\n # If an identical object already exists in the database, return the first existing one\n identical_objects = self.__class__.objects.status(live=True).filter(**filters)\n\n if len(identical_objects):\n return identical_objects[0]\n return None", "async def create_or_replace(self, item_id, item: dict) -> object:\n raise NotImplementedError()", "def _create_or_update_packinglistitem(self, item_identifier, item, user, optional_attrs={}):\n try:\n packing_list_item = self.packing_list.packing_list_item_model.objects.get(\n packing_list=self.packing_list,\n item_reference=item_identifier)\n except self.packing_list.packing_list_item_model.DoesNotExist:\n try:\n optional_description = item.optional_description or ''\n except AttributeError:\n optional_description = None\n options = {\n 'requisition': item._meta.verbose_name,\n 'item_description': '{subject_identifier} ({initials}) VISIT:{visit} DOB:{dob} {optional}'.format(\n subject_identifier=item.registered_subject.subject_identifier,\n initials=item.registered_subject.initials,\n visit=item.visit_code,\n dob=item.registered_subject.dob,\n optional=optional_description,\n ),\n 'user_created': user,\n }\n options.update(**optional_attrs)\n packing_list_item = self.packing_list.packing_list_item_model.objects.create(\n packing_list=self.packing_list,\n item_reference=item_identifier,\n **options)\n return packing_list_item", "def new_or_get(self, request):\n cart_id = request.session.get('cart_id', None)\n qs = self.get_queryset().filter(id=cart_id)\n if qs.count() == 1:\n new_obj = False\n cart_obj = qs.first()\n if request.user.is_authenticated() and cart_obj.user is None:\n cart_obj.user = request.user\n cart_obj.save()\n else:\n cart_obj = Cart.objects.new(user=request.user)\n new_obj = True\n request.session['cart_id'] = cart_obj.id\n return cart_obj, new_obj" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Test FeathrClient() get_online_features and batch_get can get data correctly.
def test_feathr_online_store_agg_features(): online_test_table = get_online_test_table_name("nycTaxiCITableMaven") test_workspace_dir = Path( __file__).parent.resolve() / "test_user_workspace" # os.chdir(test_workspace_dir) # The `feathr_runtime_location` was commented out in this config file, so feathr should use # Maven package as the dependency and `noop.jar` as the main file client: FeathrClient = basic_test_setup(os.path.join(test_workspace_dir, "feathr_config_maven.yaml")) location_id = TypedKey(key_column="DOLocationID", key_column_type=ValueType.INT32, description="location id in NYC", full_name="nyc_taxi.location_id") feature_query = FeatureQuery( feature_list=["f_location_avg_fare"], key=location_id) settings = ObservationSettings( observation_path="wasbs://public@azurefeathrstorage.blob.core.windows.net/sample_data/green_tripdata_2020-04.csv", event_timestamp_column="lpep_dropoff_datetime", timestamp_format="yyyy-MM-dd HH:mm:ss") now = datetime.now() # set output folder based on different runtime if client.spark_runtime == 'databricks': output_path = ''.join(['dbfs:/feathrazure_cijob','_', str(now.minute), '_', str(now.second), ".avro"]) else: output_path = ''.join(['abfss://feathrazuretest3fs@feathrazuretest3storage.dfs.core.windows.net/demo_data/output','_', str(now.minute), '_', str(now.second), ".avro"]) client.get_offline_features(observation_settings=settings, feature_query=feature_query, output_path=output_path) # assuming the job can successfully run; otherwise it will throw exception client.wait_job_to_finish(timeout_sec=Constants.SPARK_JOB_TIMEOUT_SECONDS) return backfill_time = BackfillTime(start=datetime( 2020, 5, 20), end=datetime(2020, 5, 20), step=timedelta(days=1)) redisSink = RedisSink(table_name=online_test_table) settings = MaterializationSettings("TestJobName", sinks=[redisSink], feature_names=[ "f_location_avg_fare", "f_location_max_fare"], backfill_time=backfill_time) client.materialize_features(settings) # just assume the job is successful without validating the actual result in Redis. Might need to consolidate # this part with the test_feathr_online_store test case client.wait_job_to_finish(timeout_sec=Constants.SPARK_JOB_TIMEOUT_SECONDS) res = client.get_online_features(online_test_table, '265', [ 'f_location_avg_fare', 'f_location_max_fare']) # just assume there are values. We don't hard code the values for now for testing # the correctness of the feature generation should be guaranteed by feathr runtime. # ID 239 and 265 are available in the `DOLocationID` column in this file: # https://s3.amazonaws.com/nyc-tlc/trip+data/green_tripdata_2020-04.csv # View more details on this dataset: https://www1.nyc.gov/site/tlc/about/tlc-trip-record-data.page assert len(res) == 2 assert res[0] != None assert res[1] != None res = client.multi_get_online_features(online_test_table, ['239', '265'], ['f_location_avg_fare', 'f_location_max_fare']) assert res['239'][0] != None assert res['239'][1] != None assert res['265'][0] != None assert res['265'][1] != None
[ "def test_get_training_datasets_for_featurestore(self):\n pass", "def test_bqfeature_fetch_1():\n filename = 'bqfeature_fetch_1.h5'\n path = os.path.join(results_location, filename)\n filename = Feature().fetch(bqsession, 'SimpleTestFeature', resource_list, path=path)", "def test_read_feature_collection(self):\n fc = self.read_feature()\n assert len(fc.features) == 1\n feature = fc.features[0]\n self.check_feature(feature)", "def test_all_features_with_data(self):\n feature1 = Feature('looktest1')\n feature1.set_percentage(5)\n\n feature2 = Feature('looktest2')\n feature2.activate()\n feature2.add_to_whitelist(3)\n\n feature3 = Feature('looktest3')\n feature3.activate()\n feature3.add_to_blacklist(4)\n feature3.add_to_blacklist(5)\n\n feature4 = Feature('looktest4')\n feature4.activate()\n feature4.add_to_whitelist(3)\n feature4.add_to_whitelist(5)\n feature4.add_to_blacklist(4)\n\n all_features = Feature.all_features(include_data=True)\n self.assertEqual(len(all_features), 4)\n\n for key in ['looktest1', 'looktest2', 'looktest3', 'looktest4']:\n self.assertTrue(key in all_features)\n if not key == 'looktest1':\n self.assertEqual(all_features[key]['percentage'], 100)\n\n self.assertEqual(all_features['looktest1']['percentage'], 5)\n self.assertFalse('whitelist' in all_features['looktest1'])\n self.assertFalse('blacklist' in all_features['looktest1'])\n\n self.assertTrue('whitelist' in all_features['looktest2'])\n self.assertEqual(all_features['looktest2']['whitelist'], [3])\n self.assertFalse('blacklist' in all_features['looktest2'])\n\n self.assertFalse('whitelist' in all_features['looktest3'])\n self.assertTrue('blacklist' in all_features['looktest3'])\n self.assertEqual(all_features['looktest3']['blacklist'], [4, 5])\n\n self.assertTrue('whitelist' in all_features['looktest4'])\n self.assertEqual(all_features['looktest4']['whitelist'], [3, 5])\n self.assertTrue('blacklist' in all_features['looktest4'])\n self.assertEqual(all_features['looktest4']['blacklist'], [4])", "def test_feature_service_read() -> None:\n\n runner = CliRunner()\n with runner.local_repo(\n get_example_repo(\"example_feature_repo_1.py\"), \"bigquery\"\n ) as store:\n\n basic_rw_test(\n store,\n view_name=\"driver_locations\",\n feature_service_name=\"driver_locations_service\",\n )", "def test_get___feature_progress(self):\n with test_app.test_request_context(self.request_path):\n actual = self.handler.do_get(feature_id=self.feature_id)\n\n self.assertEqual({\n 'Code in Chromium': 'True',\n 'Draft API spec': 'fake spec link',\n 'Estimated target milestone': 'True',\n 'Final target milestone': 'True',\n 'Intent to Prototype email': 'https://example.com/prototype',\n 'Intent to Experiment email': 'https://example.com/ot',\n 'Ready for Developer Testing email': 'https://example.com/ready_for_trial',\n 'Intent to Ship email': 'https://example.com/ship',\n 'Spec link': 'fake spec link',\n 'Updated target milestone': 'True',\n 'Web developer signals': 'True',\n }, actual)", "def do_features_request_2(features=None):\n\n #  connect to database\n cur_db = connect_db(\"172.20.38.50\", \"mvelay\", \"user\", \"sandbox\")\n cursor = cur_db.cursor()\n\n # build whole query\n cur_query = \"\"\" SELECT module, sw, version FROM t_feature\n WHERE feature=\"%s\" AND supported=1;\"\"\" % (features[0])\n\n print cur_query\n cursor.execute(cur_query)\n results = cursor.fetchall()\n cursor.close()\n\n if results:\n results = results[:1000] # Limit to first 1000 results\n else:\n results = None\n\n return features[0], results", "async def getFeatures(self, body=\"\"):\n payload = {}\n \n # Parameter validation\n schema = ConfigurationValidator.getFeatures()\n schema.dump(schema.load(payload))\n \n\n url_with_params = await create_url_with_params(api_url=self._urls[\"getFeatures\"], proccessed_params=\"\"\"{\"required\":[],\"optional\":[],\"query\":[],\"headers\":[],\"path\":[]}\"\"\", )\n query_string = await create_query_string()\n headers = {\n \"Authorization\": \"Bearer \" + base64.b64encode(\"{}:{}\".format(self._conf.applicationID, self._conf.applicationToken).encode()).decode()\n }\n if self._conf.locationDetails:\n headers[\"x-location-detail\"] = ujson.dumps(self._conf.locationDetails)\n for h in self._conf.extraHeaders:\n headers.update(h)\n exclude_headers = []\n for key, val in headers.items():\n if not key.startswith(\"x-fp-\"):\n exclude_headers.append(key)\n return await AiohttpHelper().aiohttp_request(\"GET\", url_with_params, headers=get_headers_with_signature(urlparse(self._urls[\"getFeatures\"]).netloc, \"get\", await create_url_without_domain(\"/service/application/configuration/v1.0/feature\", ), query_string, headers, body, exclude_headers=exclude_headers), data=body, cookies=self._conf.cookies)", "def test_get_next_feature():\n print(\" *** TESTING get_next_feature()\")\n feature_available, feature = get_next_feature()\n while feature_available:\n print(\"FEATURE: \" + str(feature))\n feature_available, feature = get_next_feature()\n print(\" --- END TEST get_next_feature()\")", "def test_bqfeature_fetch_2():\n hdf5 = Feature().fetch(bqsession, 'SimpleTestFeature', resource_list)\n hdf5.close()\n os.remove(hdf5.filename)", "def test_discovery_apis_get(self):\n pass", "def test_get_run(self):\n pass", "def test_client_retrieve(self):\n pass", "def test_bqparallelfeature_fetchvector_1():\n PF=ParallelFeature()\n PF.set_thread_num(2)\n PF.set_chunk_size(5)\n feature_vectors = PF.fetch_vector(bqsession, 'SimpleTestFeature', resource_list)", "def test_bqparallelfeature_fetch_1():\n PF=ParallelFeature()\n hdf5 = PF.fetch(bqsession, 'SimpleTestFeature', resource_list)\n hdf5.close()\n os.remove(hdf5.filename)", "def test_get_feature(self):\n # Checking context features\n feature_tensor = self.parser.get_feature(\n self.feature_config.get_feature(\"query_text\"),\n extracted_features=({\"query_text\": tf.zeros((3, 4, 6))}, {}),\n sequence_size=10,\n )\n assert feature_tensor.shape == (1, 3, 4, 6)\n\n # Check missing feature being replaced with default tensor\n feature_tensor = self.parser.get_feature(\n self.feature_config.get_feature(\"query_text\"),\n extracted_features=({}, {}),\n sequence_size=10,\n )\n assert feature_tensor.shape == (1,)\n\n # Checking sequence features\n feature_tensor = self.parser.get_feature(\n self.feature_config.get_feature(\"quality_score\"),\n extracted_features=({}, {\"quality_score\": tf.zeros((3, 4, 6))}),\n sequence_size=10,\n )\n assert feature_tensor.shape == (3, 4, 6)\n\n # Check missing feature being replaced with default tensor\n feature_tensor = self.parser.get_feature(\n self.feature_config.get_feature(\"quality_score\"),\n extracted_features=({}, {}),\n sequence_size=10,\n )\n assert feature_tensor.shape == (10,)", "def test_gettem_using_get(self):\n pass", "def test_api_erx_get_new_get(self):\n pass", "def test_bqparallelfeature_fetch_2():\n filename = 'bqparallelfeature_fetch_2.h5'\n path = os.path.join(results_location, filename)\n PF=ParallelFeature()\n PF.set_thread_num(2)\n PF.set_chunk_size(5)\n filename = PF.fetch(bqsession, 'SimpleTestFeature', resource_list, path=path)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Initializes the object to have a pronunciation dictionary available
def __init__(self): self._pronunciations = nltk.corpus.cmudict.dict()
[ "def __init__(self):\n self._pronunciations = nltk.corpus.cmudict.dict()\n \"\"\"\n API Documentation for CMU dictionary corpus\n http://www.nltk.org/api/nltk.corpus.reader.html#module-nltk.corpus.reader.cmudict\n \"\"\"", "def __init__(self):\n self.number_string = \"\"\n self.number_separate_string = \"\"\n self.punctuation_string = \"\"\n self.dic_pb = {}\n self.dic_cha = {}\n self.dic_term = {}", "def __init__(self):\n self.init(**self.get_init_storage())", "def __init__(self):\n super(sppasSymbolSettings, self).__init__()\n\n self.__dict__ = dict(\n unk=\"<UNK>\",\n phone=sppasSymbolSettings.__phone_symbols(),\n ortho=sppasSymbolSettings.__ortho_symbols(),\n all=sppasSymbolSettings.__all_symbols()\n )", "def __init__(self) -> None:\n self.mappings = {}", "def initPheromone(self):\n print '[Initializing pheromone values]'\n self.pheromoneValue = {}\n\n for token in self.postingTokens:\n self.pheromoneValue[token] = self.initialPheromone", "def __init__(self):\n self._word_dict = {}", "def __init__(self):\n\n self.parms = _init_parms()\n self.prefs = _init_prefs()", "def __init__(self):\n self.proxys = {}\n self.observers = {}\n self.uid_gen = generator.uid_generator()", "def __init__(self, variable, pnoun, nucleus):\n super(ProperNounExpression, self).__init__(variable, EmptyExpression(), nucleus)\n assert(pnoun in proper_nouns)\n self.pnoun = pnoun", "def __init__(self):\n for x in self.pronouns:\n tmplist = list(self.pronouns[x]) + list([x])\n self.pronouns[x] = tmplist\n\n # Pronouns\n \n self.reflposs.append(self.reflpossmanim)\n self.reflposs.append(self.reflpossminan)\n self.reflposs.append(self.reflpossf)\n self.reflposs.append(self.reflpossn)\n self.reflposs.append(self.reflposspanim)\n self.reflposs.append(self.reflposspinan)\n self.reflposs.append(\"possessive\")\n \n self.variety.append(self.varietymanim)\n self.variety.append(self.varietyminan)\n self.variety.append(self.varietyf)\n self.variety.append(self.varietyn)\n self.variety.append(self.varietypanim)\n self.variety.append(self.varietypinan)\n self.variety.append(\"variety\")\n \n self.reflexivesam.append(self.reflexivesamadjmanim)\n self.reflexivesam.append(self.reflexivesamadjminan)\n self.reflexivesam.append(self.reflexivesamadjf)\n self.reflexivesam.append(self.reflexivesamadjn)\n self.reflexivesam.append(self.reflexivesamadjpanim)\n self.reflexivesam.append(self.reflexivesamadjpinan)\n self.reflexivesam.append(\"reflexive\")\n \n self.possessive1a.append(self.possessive1amanim)\n self.possessive1a.append(self.possessive1aminan)\n self.possessive1a.append(self.possessive1af)\n self.possessive1a.append(self.possessive1an)\n self.possessive1a.append(self.possessive1apanim)\n self.possessive1a.append(self.possessive1apinan)\n self.possessive1a.append(\"possessive\")\n\n self.possessive2a.append(self.possessive2amanim)\n self.possessive2a.append(self.possessive2aminan)\n self.possessive2a.append(self.possessive2af)\n self.possessive2a.append(self.possessive2an)\n self.possessive2a.append(self.possessive2apanim)\n self.possessive2a.append(self.possessive2apinan)\n self.possessive2a.append(\"possessive\")\n\n self.possessive2b.append(self.possessive2bmanim)\n self.possessive2b.append(self.possessive2bminan)\n self.possessive2b.append(self.possessive2bf)\n self.possessive2b.append(self.possessive2bn)\n self.possessive2b.append(self.possessive2bpanim)\n self.possessive2b.append(self.possessive2bpinan)\n self.possessive2b.append(\"possessive\")\n\n self.possessive3a.append(self.possessive3amanim)\n self.possessive3a.append(self.possessive3aminan)\n self.possessive3a.append(self.possessive3af)\n self.possessive3a.append(self.possessive3an)\n self.possessive3a.append(self.possessive3apanim)\n self.possessive3a.append(self.possessive3apinan)\n self.possessive3a.append(\"possessive\")\n\n self.interrog.append(self.interrogmanim)\n self.interrog.append(self.interrogminan)\n self.interrog.append(self.interrogf)\n self.interrog.append(self.interrogn)\n self.interrog.append(self.interrogpanim)\n self.interrog.append(self.interrogpinan)\n self.interrog.append(\"interrogative\")\n \n self.demonstrative1.append(self.demonstrative1manim)\n self.demonstrative1.append(self.demonstrative1minan)\n self.demonstrative1.append(self.demonstrative1f)\n self.demonstrative1.append(self.demonstrative1n)\n self.demonstrative1.append(self.demonstrative1panim)\n self.demonstrative1.append(self.demonstrative1pinan)\n self.demonstrative1.append(\"demonstrative\")\n\n self.demonstrative2.append(self.demonstrative2manim)\n self.demonstrative2.append(self.demonstrative2minan)\n self.demonstrative2.append(self.demonstrative2f)\n self.demonstrative2.append(self.demonstrative2n)\n self.demonstrative2.append(self.demonstrative2panim)\n self.demonstrative2.append(self.demonstrative2pinan)\n self.demonstrative2.append(\"demonstrative\")\n\n self.inclusive.append(self.inclusivemanim)\n self.inclusive.append(self.inclusiveminan)\n self.inclusive.append(self.inclusivef)\n self.inclusive.append(self.inclusiven)\n self.inclusive.append(self.inclusivepanim)\n self.inclusive.append(self.inclusivepinan)\n self.inclusive.append(\"inclusive\")\n \n self.pronouns[\"a\"] = self.personal1a\n self.pronouns[\"b\"] = self.personal2a\n self.pronouns[\"c\"] = self.personal3a\n self.pronouns[\"d\"] = self.personal3b\n self.pronouns[\"e\"] = self.personal3c\n self.pronouns[\"f\"] = self.personal1b\n self.pronouns[\"g\"] = self.personal2b\n self.pronouns[\"h\"] = self.personal3c\n self.pronouns[\"i\"] = self.reflexive\n self.pronouns[\"j\"] = self.interrog1\n self.pronouns[\"k\"] = self.interrog2\n self.pronouns[\"l\"] = self.neginterrog1\n self.pronouns[\"m\"] = self.neginterrog2\n \n self.singlekeys = (\"a\", \"b\", \"c\", \"d\", \"e\", \"f\", \"g\", \"h\", \"i\", \"j\", \"k\", \"l\", \"m\")\n \n self.pronouns[\"1\"] = self.reflposs\n self.pronouns[\"2\"] = self.variety\n self.pronouns[\"3\"] = self.reflexivesam\n self.pronouns[\"4\"] = self.possessive1a\n self.pronouns[\"5\"] = self.possessive2a\n self.pronouns[\"6\"] = self.possessive2b\n self.pronouns[\"7\"] = self.possessive3a\n self.pronouns[\"8\"] = self.interrog\n self.pronouns[\"9\"] = self.demonstrative1\n self.pronouns[\"10\"] = self.demonstrative2\n self.pronouns[\"11\"] = self.inclusive\n \n self.groupkeys = (\"1\", \"2\", \"3\", \"4\", \"5\", \"6\", \"7\", \"8\", \"9\", \"10\", \"11\")\n \n # Noun data structure.\n self.mascsing.append(self.mascdeclanimhrd)\n self.mascsing.append(self.mascdeclinanhrd)\n self.mascsing.append(self.mascdeclanimsft)\n self.mascsing.append(self.mascdeclinansft)\n self.mascsing.append(self.mascdeclanimsft1)\n self.mascsing.append(self.mascdeclinansft1)\n self.mascsing.append(self.mascdeclanimsft2)\n self.mascsing.append(self.mascdeclinansft2)\n self.mascsing.append(self.mascdeclinanhrd1)\n self.mascsing.append(self.mascdeclinansft1a)\n self.mascsing.append(self.mascdeclanimsft3)\n self.mascsing.append(self.mascdeclinansft3)\n self.mascsing.append(self.declblank)\n self.mascsing.append(self.adjmascdeclhrdanim) # 13\n self.mascsing.append(self.adjmascdeclhrdinan)\n self.mascsing.append(self.adjmascdeclsftanim)\n self.mascsing.append(self.adjmascdeclsftinan)\n self.mascsing.append(self.adjmascdeclgkxanim)\n self.mascsing.append(self.adjmascdeclgkxinan)\n self.mascsing.append(self.adjmascoanim)\n self.mascsing.append(self.adjmascoinan)\n self.mascsing.append(self.adjmascmyagkiyanim)\n self.mascsing.append(self.adjmascmyagkiyinan)\n \n self.nuetsing.append(self.nuetdeclhrd)\n self.nuetsing.append(self.nuetdeclsft)\n self.nuetsing.append(self.nuetdeclsft1)\n self.nuetsing.append(self.nuetdeclmya)\n self.nuetsing.append(self.declblank)\n self.nuetsing.append(self.adjnuetdeclhrd) # 5\n self.nuetsing.append(self.adjnuetdeclsft)\n self.nuetsing.append(self.adjnuetdeclgkx)\n self.nuetsing.append(self.adjnueto)\n self.nuetsing.append(self.adjnuetmyagkiy)\n\n self.femsing.append(self.femideclhrd)\n self.femsing.append(self.femideclsft)\n self.femsing.append(self.femideclhrdgkx)\n self.femsing.append(self.femideclsftgkx)\n self.femsing.append(self.femideclsft1)\n self.femsing.append(self.femideclsft2)\n self.femsing.append(self.femideclsftcons)\n self.femsing.append(self.declblank)\n self.femsing.append(self.adjfemideclhrd) # 8\n self.femsing.append(self.adjfemideclsft)\n self.femsing.append(self.adjfemideclgkx)\n self.femsing.append(self.adjfemio)\n self.femsing.append(self.adjfemimyagkiy)\n\n self.mascplur.append(self.mascdeclanimplur)\n self.mascplur.append(self.mascdeclinanplur)\n self.mascplur.append(self.mascdeclanimplur1)\n self.mascplur.append(self.mascdeclinanplur1)\n self.mascplur.append(self.mascdeclanimplur2)\n self.mascplur.append(self.mascdeclinanplur2)\n self.mascplur.append(self.mascdeclanimplur3)\n self.mascplur.append(self.mascdeclinanplur3)\n self.mascplur.append(self.mascdeclanimplur4)\n self.mascplur.append(self.mascdeclinanplur4)\n self.mascplur.append(self.mascdeclanimplur5)\n self.mascplur.append(self.mascdeclinanplur5)\n self.mascplur.append(self.mascdeclanimplur6)\n self.mascplur.append(self.mascdeclinanplur6)\n self.mascplur.append(self.mascdeclatayataplur)\n self.mascplur.append(self.mascdeclaninyaninplur)\n self.mascplur.append(self.mascdeclnumericplur)\n self.mascplur.append(self.mascdeclhundredplur)\n self.mascplur.append(self.declblank)\n self.mascplur.append(self.adjplurdeclhrdanim) # 17\n self.mascplur.append(self.adjplurdeclhrdinan)\n self.mascplur.append(self.adjplurdeclsftanim)\n self.mascplur.append(self.adjplurdeclsftinan)\n self.mascplur.append(self.adjplurdeclgkxanim)\n self.mascplur.append(self.adjplurdeclgkxinan)\n self.mascplur.append(self.adjpluroanim)\n self.mascplur.append(self.adjpluroinan)\n self.mascplur.append(self.adjplurmyagkiyanim)\n self.mascplur.append(self.adjplurmyagkiyinan)\n \n self.nuetplur.append(self.nuetdeclhrdplur)\n self.nuetplur.append(self.nuetdeclsftplur)\n self.nuetplur.append(self.nuetdeclsftplur1)\n self.nuetplur.append(self.nuetdeclmyaplur)\n self.nuetplur.append(self.declblank)\n self.nuetplur.append(self.adjplurdeclhrdinan) # 5\n self.nuetplur.append(self.adjplurdeclsftinan)\n self.nuetplur.append(self.adjplurdeclgkxinan)\n self.nuetplur.append(self.adjpluroinan)\n self.nuetplur.append(self.adjplurmyagkiyinan)\n \n self.femplur.append(self.femideclanimhrdplur)\n self.femplur.append(self.femideclinanhrdplur)\n self.femplur.append(self.femideclanimhrdplur1)\n self.femplur.append(self.femideclinanhrdplur1)\n self.femplur.append(self.femideclanimsftplur)\n self.femplur.append(self.femideclinansftplur)\n self.femplur.append(self.femideclanimsftplur1)\n self.femplur.append(self.femideclinansftplur1)\n self.femplur.append(self.femideclanimsftplur2)\n self.femplur.append(self.femideclinansftplur2)\n self.femplur.append(self.femideclanimsftplur3)\n self.femplur.append(self.femideclinansftplur3)\n self.femplur.append(self.femideclnumericplur)\n self.femplur.append(self.declblank)\n self.femplur.append(self.adjplurdeclhrdanim) # 14\n self.femplur.append(self.adjplurdeclhrdinan)\n self.femplur.append(self.adjplurdeclsftanim)\n self.femplur.append(self.adjplurdeclsftinan)\n self.femplur.append(self.adjplurdeclgkxanim)\n self.femplur.append(self.adjplurdeclgkxinan)\n self.femplur.append(self.adjpluroanim)\n self.femplur.append(self.adjpluroinan)\n self.femplur.append(self.adjplurmyagkiyanim)\n self.femplur.append(self.adjplurmyagkiyinan)\n \n self.pluralonly.append(self.plural)\n \n self.datalist = (self.mascsing, self.nuetsing, self.femsing, self.mascplur, self.nuetplur, self.femplur, self.pluralonly)\n\n comma = \", \"\n\n self.conjugations.append(self.vrbprstat)\n self.conjugations.append(self.vrbprstyat)\n self.conjugations.append(self.vrbprstat1)\n self.conjugations.append(self.vrbprstat2)\n self.conjugations.append(self.vrbprstat3)\n self.conjugations.append(self.vrbprstgat)\n self.conjugations.append(self.vrbprstyt1)\n self.conjugations.append(self.vrbprstyt2)\n self.conjugations.append(self.vrbprstyt3)\n self.conjugations.append(self.vrbprstet)\n self.conjugations.append(self.vrbprsty)\n self.conjugations.append(self.vrbprststy)\n self.conjugations.append(self.vrbprstzty)\n self.conjugations.append(self.vrbprstt)\n self.conjugations.append(self.vrbprstt1)\n self.conjugations.append(self.vrbprstt2)\n self.conjugations.append(self.vrbprstovat)\n self.conjugations.append(self.vrbprstit)\n \n # Define adjectives\n \n self.adjectiveshard = list([self.adjmascdeclhrdanim, self.adjmascdeclhrdinan, self.adjfemideclhrd, self.adjnuetdeclhrd,\n self.adjplurdeclhrdanim, self.adjplurdeclhrdinan])\n self.adjectivesgkxsoft = list([self.adjmascdeclgkxanim, self.adjmascdeclgkxinan, self.adjfemideclgkx, self.adjnuetdeclgkx,\n self.adjplurdeclgkxanim, self.adjplurdeclgkxinan])\n self.adjectivessoft = list([self.adjmascdeclsftanim, self.adjmascdeclsftinan, self.adjfemideclsft, self.adjnuetdeclsft,\n self.adjplurdeclsftanim, self.adjplurdeclsftinan])\n self.adjectivesoy = list([self.adjmascoanim, self.adjmascoinan, self.adjfemio, self.adjnueto,\n self.adjpluroanim, self.adjpluroinan])\n self.adjectivesmyagkiy = list([self.adjmascmyagkiyanim, self.adjmascmyagkiyinan, self.adjfemimyagkiy, self.adjnuetmyagkiy,\n self.adjplurmyagkiyanim, self.adjplurmyagkiyinan])\n self.declension.append(self.adjectiveshard)\n self.declension.append(self.adjectivesgkxsoft)\n self.declension.append(self.adjectivessoft)\n self.declension.append(self.adjectivesoy)\n self.declension.append(self.adjectivesmyagkiy)\n return", "def __init__(self):\n self._word_freqs = {}", "def __init__(self):\n self.donors = {}", "def __init__(self, personDict):\n self.personTTLSet = TTLSet()\n self.initTTLSet\n self.personDict = personDict\n self.createPersonAsRessource()", "def __init__(self):\n\t\n\t\tconfig = ps.Decoder.default_config()\n\t\tconfig.set_string('-hmm','/usr/local/share/pocketsphinx/model/en-us/en-us')\n\t\tconfig.set_string('-lm', '/home/sudhin/JARVIS/beg/mine/1894.lm')\n\t\tconfig.set_string('-dict','/home/sudhin/JARVIS/beg/mine/1894.dic')\n\t\tself._decoder = ps.Decoder(config)", "def __init__(self):\n super(sppasAnnotationsSettings, self).__init__()\n self.__dict__ = dict(\n error=-1,\n ok=0,\n warning=1,\n ignore=2,\n info=3,\n\n extension=\".xra\",\n\n # all the types of the annotations implemented into SPPAS\n types=(\"STANDALONE\", \"SPEAKER\", \"INTERACTION\"),\n\n # standard iso639-3 code for an undetermined language.\n UNDETERMINED=\"und\"\n\n )", "def initialiser(self, affection):\n pass", "def __init__(self, func):\n self.dictionary = {}\n self.func = func", "def __init__(self):\n _hypre.HypreIdentity_swiginit(self, _hypre.new_HypreIdentity())" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Returns the number of syllables in a word. If there's more than one pronunciation, take the shorter one. If there is no entry in the dictionary, return 1.
def num_syllables(self, word): # TODO: provide an implementation! word = word.lower() D = self._pronunciations #D = nltk.corpus.cmudict.dict() if(word not in D.keys()): #print word not in CMUDictionary return 1 #count stores no of syllables for each pronunciation of the word count = [] #for each pronunciation for x in D[word]: n = 0 #for each syllable for y in x: #if vowel sound if y[-1].isdigit(): n = n + 1 count.append(n) # return the pronunciation having least syllables return min(count) #return min([len([y for y in x if y[-1].isdigit()]) for x in D[word.lower()]])
[ "def count_syllables_in_word(word):\n try:\n return [len(list(y for y in x if y[-1].isdigit())) for x in CMUDICT[word.lower()]][0]\n except KeyError:\n return sylco(word)", "def num_syllables(self, word):\n\n pronouncation = 0\n firstProno = 0\n count = 0\n\n x = word\n t = self._pronunciations\n\n for i in (x,):\n try:\n pronouncation = t[i]\n firstProno = pronouncation[0]\n except KeyError:\n return 1\n\n if len(pronouncation) > 1:\n if len(pronouncation[0]) <= len(pronouncation[1]):\n firstProno = pronouncation[0]\n else:\n firstProno = pronouncation[1]\n\n for i in firstProno:\n syll = re.search(r'[AEIOU]', i)\n if syll != None:\n count += 1\n\n return count", "def num_of_syllables(self, word):\n\n if word.lower() in self.cmu_dict:\n return len([phoneme for phoneme in self.cmu_dict[word.lower()][0]\n if phoneme[-1].isdigit()])\n # If word is unknown, assume 1 syllable/3 letters (average for English)\n else:\n return len(word)//3", "def countsyllables_en(word):\r\n\tif not word:\r\n\t\treturn 0\r\n\r\n\t# Remove final silent 'e'\r\n\tif word[-1] == \"e\":\r\n\t\tword = word[:-1]\r\n\r\n\t# Check for a cached syllable count\r\n\tif word in fallback_cache:\r\n\t\treturn fallback_cache[word]\r\n\r\n\t# Count vowel groups\r\n\tresult = 0\r\n\tprev_was_vowel = False\r\n\tfor char in word:\r\n\t\tis_vowel = char in VOWELS or char == 'y'\r\n\t\tif is_vowel and not prev_was_vowel:\r\n\t\t\tresult += 1\r\n\t\tprev_was_vowel = is_vowel\r\n\r\n\t# Add & subtract syllables\r\n\tfor r in fallback_addsyl:\r\n\t\tif r.search(word):\r\n\t\t\tresult += 1\r\n\tfor r in fallback_subsyl:\r\n\t\tif r.search(word):\r\n\t\t\tresult -= 1\r\n\r\n\t# Cache the syllable count\r\n\tfallback_cache[word] = result\r\n\r\n\treturn result", "def count_syllables(self, word: str) -> Tuple[int, ...]:\n pass", "def count_syllables(self, word: str) -> int:\n\n return len(self.hyphen.positions(word.strip())) + 1 if word.strip().isalnum() else 0", "def count_syllables(words):\n\n\n count = 0\n\n for word in words:\n word_count = count_syllables_in_word(word)\n count = count + word_count\n return count", "def get_syllable_count(self, syllables: List[str]) -> int:\n tmp_syllables = copy.deepcopy(syllables)\n return len(\n string_utils.remove_blank_spaces(\n string_utils.move_consonant_right(\n tmp_syllables, self._find_solo_consonant(tmp_syllables)\n )\n )\n )", "def num_syllables(self):\r\n objects = self.__get_objects()\r\n z1 = str(objects[1]).strip().split()\r\n return int(z1[0])", "def estimate(word):\n parts = re.split(r'[^aeiouy]+', word)\n valid_parts = []\n\n for part in parts:\n if part != '':\n valid_parts.append(part)\n\n syllables = 0\n\n for p in re_subsyllables:\n if p.match(word):\n syllables -= 1\n\n for p in re_addsyllables:\n if p.match(word):\n syllables += 1\n\n syllables += len(valid_parts)\n\n if syllables <= 0:\n syllables = 1\n\n return syllables", "def most_stressed(word):\r\n syllableses = DICT.get(word)\r\n if syllableses is None:\r\n raise NotInCMUDict(\"'{}' not in the CMUDict\".format(word))\r\n\r\n syllables = syllableses[0]\r\n\r\n for x in (\"1\", \"2\", \"0\"):\r\n for s in syllables:\r\n if s.endswith(x):\r\n return x\r\n\r\n return False", "def count_syllables_in_line(line):\n ws = line.rstrip('.').split()\n return sum([count_syllables_in_word(w) for w in ws])", "def get_num_syllables(poem_pronunciation: POEM_PRONUNCIATION) -> List[int]:\n num_syllables = []\n for line in poem_pronunciation:\n num = 0\n for word in line:\n for p in word:\n if p[-1].isdigit():\n num += 1\n num_syllables.append(num)\n return num_syllables", "def syllables( word ):\n return syllable_guide[word.lower()]", "def total_syllables(target_text):\n\n splited_text = target_text.split()\n count = 0\n for word in splited_text:\n count = count + word_syllables(word)\n return count", "def count_syllables(text):\n\n import re\n\n # Make a list of vowel sounds presenting in the text (converted to lower-case letters)\n syllable_list = re.findall(r'[aiouy]+e*|e(?!d\\b|ly)[aiouye]?|[td]ed|le\\b', text.lower())\n # Find the size of the list\n count = len(syllable_list)\n\n return count", "def count_syllables(book):\n d = dict(cmudict.entries())\n with open(book, 'r') as myfile:\n booky = myfile.read().lower()\n tokenized_book = nltk.word_tokenize(booky)\n\n count = 0\n for word in tokenized_book:\n count += ( nsly(word, d))\n\n return count", "def cmu_syl(word: str) -> int:\n return len(\n [ph for ph in phoneme_dict[word][0] if ph.strip(string.ascii_letters)]\n )", "def syllables_in_text(word_list):\n total_syllables = 0\n vowels = 'aeiouy'\n for word in word_list:\n count = 0\n if word[0] in vowels:\n count += 1\n for index in range(1, len(word)):\n if word[index] in vowels and word[index - 1] not in vowels:\n count += 1\n if word.endswith('e'):\n count -= 1\n if word.endswith('le') and len(word) > 2 and word[-3] not in vowels:\n count += 1\n if count == 0:\n count += 1\n total_syllables += count\n return total_syllables" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Takes text where lines are separated by newline characters. Returns True if the text is a limerick, False otherwise. A limerick is defined as a poem with the form AABBA, where the A lines rhyme with each other, the B lines rhyme with each other, and the A lines do not rhyme with the B lines.
def is_limerick(self, text): # TODO: provide an implementation! text = text.lower() p = [] p = text.split('\n') p = [i.strip(' ') for i in p] p = list(filter(None, p)) # all limericks must have 5 lines AABBA if len(p) != 5: return False #words list stores the list of words in each line of the limerick words = [] for i in range(0, 5): p[i] = p[i].strip(".,:;?!") temp = [] T = p[i] temp = self.apostrophe_tokenize(T) words.append(temp) count = [] #print len(words) for i in range(0, 5): #print words[i] n = 0 for j in words[i]: n = n + self.num_syllables(j) count.append(n) # check if any line has fewer than 4 syllables for i in count: if i < 4: return False A1 = count[0] A2 = count[1] B1 = count[2] B2 = count[3] A3 = count[4] # check if B1 has fewer syllables than A1, A2 and A3 if B1 > A1 or B1 > A2 or B1 > A3: return False # check if B2 has fewer syllables than A1, A2 and A3 if B2 > A1 or B2 > A2 or B2 > A3: return False # check if the no of syllables in B1 and B2 differs by more than 2 if abs(B1 - B2) > 2: return False # check if any two A's differ in no of syllables by more than 2 if abs(A1 - A2) > 2 or abs(A1 - A3) > 2 or abs(A2 - A3) > 2: return False #check if A1, A2 and A3 rhyme with each other if self.rhymes(words[0][-1], words[1][-1]) and self.rhymes(words[0][-1], words[4][-1]) and self.rhymes(words[1][-1], words[4][-1]): #check if B1 and B2 rhyme with each other if self.rhymes(words[2][-1],words[3][-1]): #check if A and B do not rhyme if (not self.rhymes(words[0][-1], words[2][-1]) and not self.rhymes(words[0][-1], words[3][-1]) and not self.rhymes(words[1][-1], words[2][-1]) and not self.rhymes(words[1][-1], words[3][-1]) and not self.rhymes(words[4][-1], words[2][-1]) and not self.rhymes(words[4][-1], words[3][-1]) ): return True return False
[ "def is_limerick(self, text):\n array = []\n ending = []\n bobo = line_tokenize(text)\n pattern = re.compile(r'[a-z]')\n\n lim = False\n\n # Return last word in sentence\n for i in bobo:\n array.append(word_tokenize(i))\n\n for i in array:\n tempLen = len(i)\n abcs = re.match(pattern, i[tempLen-1])\n if abcs != None:\n ending.append(i[tempLen-1])\n else:\n ending.append(i[tempLen-2])\n\n if len(ending) != 5:\n lim = False\n return lim\n\n # Still working on Python loops, this somehow was buggy otherwise\n for c in punctuation:\n ending[0] = ending[0].replace(c,\"\")\n ending[1] = ending[1].replace(c,\"\")\n ending[2] = ending[2].replace(c,\"\")\n ending[3] = ending[3].replace(c,\"\")\n ending[4] = ending[4].replace(c,\"\")\n\n # AABBA\n if self.rhymes(ending[0],ending[1]) == True:\n if self.rhymes(ending[2],ending[3]) == True:\n if self.rhymes(ending[4],ending[0]) == True:\n if self.rhymes(ending[0],ending[2]) != True:\n lim = True\n\n return lim", "def is_limerick(self, text):\n\n return False", "def is_limerick(self, text):\n \n sentences = text.splitlines()\n \n #remove blank setences\n sentences = [sentence for sentence in sentences if sentence.strip()] \n \n if len(sentences) != 5 : return False \n #remove punctuations for all sentences\n words_sentence1 = word_tokenize(sentences[0].translate(None, string.punctuation).lower())\n words_sentence2 = word_tokenize(sentences[1].translate(None, string.punctuation).lower())\n words_sentence3 = word_tokenize(sentences[2].translate(None, string.punctuation).lower())\n words_sentence4 = word_tokenize(sentences[3].translate(None, string.punctuation).lower())\n words_sentence5 = word_tokenize(sentences[4].translate(None, string.punctuation).lower())\n \n #check rhymes for AAA BB and not rhymes for AB\n ret_flag = (self.rhymes(words_sentence1[len(words_sentence1) - 1],\n words_sentence2[len(words_sentence2) - 1]) and\n self.rhymes(words_sentence3[len(words_sentence3) - 1 ],\n words_sentence4[len(words_sentence4) - 1 ]) and\n self.rhymes(words_sentence2[len(words_sentence2) - 1 ],\n words_sentence5[len(words_sentence5) - 1 ]) and\n self.rhymes(words_sentence1[len(words_sentence1) - 1 ],\n words_sentence5[len(words_sentence5) - 1 ]) and \n (not self.rhymes(words_sentence1[len(words_sentence1) - 1],\n words_sentence3[len(words_sentence3) - 1])) and \n (not self.rhymes(words_sentence1[len(words_sentence1) - 1],\n words_sentence4[len(words_sentence4) - 1])) and \n (not self.rhymes(words_sentence2[len(words_sentence2) - 1],\n words_sentence3[len(words_sentence3) - 1])) and \n (not self.rhymes(words_sentence2[len(words_sentence2) - 1],\n words_sentence4[len(words_sentence4) - 1])) and \n (not self.rhymes(words_sentence5[len(words_sentence5) - 1],\n words_sentence3[len(words_sentence3) - 1])) and \n (not self.rhymes(words_sentence5[len(words_sentence5) - 1],\n words_sentence4[len(words_sentence4) - 1])))\n \n if ret_flag == False: return False\n \n \n # Check additional constraints\n \n sum_of_syl1 = 0\n for word in words_sentence1 : sum_of_syl1 += self.num_syllables(word)\n \n if sum_of_syl1 < 4 : return False\n sum_of_syl2 = 0\n for word in words_sentence2 : sum_of_syl2 += self.num_syllables(word)\n \n if sum_of_syl2 < 4 : return False\n \n \n sum_of_syl_A_diff = 0\n if sum_of_syl1 > sum_of_syl2 : sum_of_syl_A_diff = sum_of_syl1 - sum_of_syl2\n else : sum_of_syl_A_diff = sum_of_syl2 - sum_of_syl1\n \n if sum_of_syl_A_diff > 2 : return False \n \n sum_of_syl3 = 0\n for word in words_sentence3 : sum_of_syl3 += self.num_syllables(word)\n \n if sum_of_syl3 < 4 : return False\n sum_of_syl4 = 0\n for word in words_sentence4 : sum_of_syl4 += self.num_syllables(word)\n \n if sum_of_syl4 < 4 : return False\n \n \n sum_of_syl_B_diff = 0\n if sum_of_syl3 > sum_of_syl4 : sum_of_syl_B_diff = sum_of_syl3 - sum_of_syl4\n else : sum_of_syl_B_diff = sum_of_syl4 - sum_of_syl3\n \n if sum_of_syl_B_diff > 2 : return False \n \n if (sum_of_syl3 > sum_of_syl1 and sum_of_syl3 > sum_of_syl2 \n and sum_of_syl4 > sum_of_syl1 and sum_of_syl4 > sum_of_syl2) : return False\n \n \n sum_of_syl5 = 0\n for word in words_sentence5 : sum_of_syl5 += self.num_syllables(word) \n \n if sum_of_syl5 < 4 : return False\n \n sum_of_syl_A_diff = 0\n if sum_of_syl1 > sum_of_syl5 : sum_of_syl_A_diff = sum_of_syl1 - sum_of_syl5\n else : sum_of_syl_A_diff = sum_of_syl5 - sum_of_syl1\n \n if sum_of_syl_A_diff > 2 : return False \n \n sum_of_syl_A_diff = 0\n if sum_of_syl2 > sum_of_syl5 : sum_of_syl_A_diff = sum_of_syl2 - sum_of_syl5\n else : sum_of_syl_A_diff = sum_of_syl5 - sum_of_syl2\n \n \n if sum_of_syl_A_diff > 2 : return False \n \n if (sum_of_syl3 > sum_of_syl5 and sum_of_syl4 > sum_of_syl5) : return False\n \n \n return ret_flag", "def is_line_on_multiline(feature_1: Sequence, feature_2: Sequence) -> bool:\n return any(is_line_on_line(feature_1, coords_2) for coords_2 in feature_2)", "def test_LogicalLines(self) -> None:\n content = \"\"\"\nfoo \\\\\nbar \\\\\nbaz\nfoo\nbling \\\\\nbling \\\\ bling\nbling\n\"\"\"\n fobj = io.StringIO(content)\n lines = LogicalLines(fobj).readlines()\n assert lines == [\n '\\n',\n 'foo bar baz\\n',\n 'foo\\n',\n 'bling bling \\\\ bling\\n',\n 'bling\\n',\n ], lines", "def is_linebreak_sensor(sensor_msg):\n return sensor_msg[0:2] == \"lb\"", "def is_multiline(s):\n return len(s.splitlines()) > 1", "def IsMultiline(self):\r\n\r\n return \"\\n\" in self.caption", "def test_on_no_newlines(self):\n assert len(lint(self.text_with_no_newline)) == 1", "def __isQuebraLinha(self, char):\n return char == r'\\n'", "def test_multiline(self):\n self.assertEqual(3, len(self.md['Note'].splitlines()))", "def identify_multilines_command(line):\n index = line.find('\\'')\n if(index >=0):\n return True\n return False", "def lemmatize_chunk(self, chunk):\n self.current_data = None\n new_data = self.lemmatizer.lemmatize(chunk)\n # regex checks if '\\r\\n' is the only char used in the chunk\n contains_only_newline = bool(re.match(r\"^[\\r\\n]+$\", chunk))\n if not contains_only_newline:\n self.process_initial_data(new_data)\n self.lemmatized_text_data.extend(new_data)\n if contains_only_newline and len(self.lemmatized_text_data):\n token_lemma_dict_keys = list(self.token_lemma_dict.keys())\n prev_lemma_id = self.lemmatized_text_data[-1][\"lemma_id\"]\n following = self.lemmatized_text_data[-1][\"following\"]\n #Note: Added check if we have reached the end of the data array because theres a bug where new lines are added after each edit\n if len(token_lemma_dict_keys) and prev_lemma_id not in self.token_lemma_dict[token_lemma_dict_keys[-1]]:\n self.lemmatized_text_data[-1][\"following\"] = f\"{following}{chunk}\"\n else:\n self.process_initial_data(new_data)\n self.lemmatized_text_data.extend(new_data)\n #TODO EDGE CASE: Newlines/breaks that may happen at the very beginning of the text", "def is_sonnet(poem):\n return len([line for line in poem.split(\"\\n\") if line]) == 14", "def detect_nl(string_or_lines, line_end=None):\n if line_end is None:\n line_end = '\\n' if (string_or_lines and\n string_or_lines[-1].endswith('\\n')) else ''\n return line_end", "def match_multiline(self, text, delimiter, in_state, style):\n # If inside triple-single quotes, start at 0\n if self.previousBlockState() == in_state:\n start = 0\n add = 0\n # Otherwise, look for the delimiter on this line\n else:\n start = delimiter.indexIn(text)\n # Move past this match\n add = delimiter.matchedLength()\n\n # As long as there's a delimiter match on this line...\n while start >= 0:\n # Look for the ending delimiter\n end = delimiter.indexIn(text, start + add)\n # Ending delimiter on this line?\n if end >= add:\n length = end - start + add + delimiter.matchedLength()\n self.setCurrentBlockState(0)\n # No; multi-line string\n else:\n self.setCurrentBlockState(in_state)\n length = len(text) - start + add\n # Apply formatting\n self.setFormat(start, length, style)\n # Look for the next match\n start = delimiter.indexIn(text, start + length)\n\n # Return True if still inside a multi-line string, False otherwise\n if self.currentBlockState() == in_state:\n return True\n else:\n return False", "def endswith_linebreak(e):\n if len(e) == 0:\n txt = (e.text or \"\")\n else:\n txt = (e[-1].tail or \"\")\n for i in range(len(txt) - 1, -1, -1):\n c = txt[i]\n if not c.isspace():\n return False\n if c == \"\\n\":\n return True\n return False", "def match_multiline(self, text, delimiter, in_state, style):\n # If inside triple-single quotes, start at 0\n if self.previousBlockState() == in_state:\n start = 0\n add = 0\n # Otherwise, look for the delimiter on this line\n else:\n start = delimiter.indexIn(text)\n # Move past this match\n add = delimiter.matchedLength()\n\n # As long as there's a delimiter match on this line...\n while start >= 0:\n # Look for the ending delimiter\n end = delimiter.indexIn(text, start + add)\n # Ending delimiter on this line?\n if end >= add:\n length = end - start + add + delimiter.matchedLength()\n self.setCurrentBlockState(0)\n # No; multi-line string\n else:\n self.setCurrentBlockState(in_state)\n length = len(text) - start + add\n # Apply formatting\n self.setFormat(start, length, self.styles[style])\n # Look for the next match\n start = delimiter.indexIn(text, start + length)\n\n # Return True if still inside a multi-line string, False otherwise\n if self.currentBlockState() == in_state:\n return True\n else:\n return False", "def make_line(line, n_syl, syl_counts):\n\n # Current number of syllables in constructed line.\n # This includes the syllable count of the first word.\n curr = 0\n\n # Now, since the list is reversed, the last word of the actual sonnet\n # line is the first word of 'line'. So we want to check if this\n # word can be counted as one syllable.\n\n # Number of syllable in first word (last word of actual line)\n init_syl = syl_counts[line[0]]\n init_syl_alt = init_syl\n\n # Alternative syllable count\n if ((line[0] + '_') in syl_counts):\n init_syl_alt = syl_counts[line[0] + '_']\n\n for i in range(1, n_syl):\n if line[i] not in syl_counts:\n return (False, '')\n\n w_syl = syl_counts[line[i]]\n\n if init_syl + curr + w_syl and init_syl_alt + curr + w_syl > n_syl:\n return (False, '')\n if init_syl+ curr + w_syl == n_syl or init_syl_alt + curr + w_syl == n_syl:\n return (True, ' '.join(line[:i+1]))\n curr += w_syl" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Calculates sky background temperature for a given Galactic longitude (gl), Galactic latitude (gb), and frequency (freq in MHz). Coordinates are in degrees. Assuming spectral index of "index", default is 2.55 Return value is in K If frequency array 'freqs' is given, then Tsky is calculated for each frequency in the array, and returned value is list of Tsky's
def tsky(gl, gb, freq, index, freqs=None): # reading the table nsky=np.zeros((90, 180), dtype=float) for ii in xrange(90): for jj in xrange(180): pos=(ii*180+jj)*5 nsky[ii,jj]=float(haslam_table[pos:pos+5]) # Convert to standard l,b b = int(gb + 90.5) if b >= 180: b = 179 l = int(gl + 0.5) if gl >= 360: l = 0 l = int((l / 4)) if freqs == None: tsky = 2.7 + nsky[l,b] * (freq/408.0)**(index) return tsky else: temps=[] for freq in freqs: tsky = 2.7 + nsky[l,b] * (freq/408.0)**(index) temps.append(tsky) return temps
[ "def tsky_range(gl, gb, f1, f2, index, freqs=None):\n\n\t# reading the table\n\tnsky=np.zeros((90, 180), dtype=float)\n\tfor ii in xrange(90):\n\t\tfor jj in xrange(180):\n\t\t\tpos=(ii*180+jj)*5\n\t\t\tnsky[ii,jj]=float(haslam_table[pos:pos+5])\n\n\t# Convert to standard l,b\n\tb = int(gb + 90.5)\n\tif b >= 180: b = 179\n\tl = int(gl + 0.5)\n\tif gl >= 360: l = 0\n\tl = int((l / 4))\n\t\n\tif freqs == None:\n\t\ttot=0\n\t\tfor ii in xrange(101):\n\t\t\tfreq = f1 + ii*(f2-f1)/100.\n\t\t\ttsky = 2.7 + nsky[l,b] * (freq/408.0)**(index)\n\t\t\ttot += tsky\n\t\ttot /= 100.\n\t\treturn tot\n\telse:\n\t\ttemps=[]\n\t\tfor ff in xrange(1, len(freqs)):\n\t\t\ttot = 0\n\t\t\tfor ii in xrange(101):\n\t\t\t\tfreq = freqs[ff-1] + ii*(freqs[ff]-freqs[ff-1])/100.\n\t\t\t\ttsky = 2.7 + nsky[l,b] * (freq/408.0)**(index)\n\t\t\t\ttot += tsky\n\t\t\ttot /= 100.\n\t\t\ttemps.append(tot)\n\t\treturn temps", "def calc_ground_temperature(T_ambient, gv):\n\n T_max = max( T_ambient ) + 273 # to K\n T_avg = np.mean( T_ambient ) + 273 # to K\n e = gv.Z0 * math.sqrt ( ( math.pi * gv.Csl * gv.Psl ) / ( 8760 * gv.Bsl ) ) # soil constants\n Tg = [ T_avg + ( T_max - T_avg ) * math.exp( -e ) * math.cos ( ( 2 * math.pi * ( i + 1 ) / 8760 ) - e )\n for i in range(8760)]\n\n return Tg", "def tskypy(self, psr):\n # ensure l is in range 0 -> 360\n b = psr.gb\n if psr.gl < 0.:\n l = 360 + psr.gl\n else:\n l = psr.gl\n\n # convert from l and b to list indices\n j = b + 90.5\n if j > 179:\n j = 179\n\n nl = l - 0.5\n if l < 0.5:\n nl = 359\n i = float(nl) / 4.\n \n tsky_haslam = self.tskylist[180*int(i) + int(j)]\n # scale temperature before returning\n return tsky_haslam * (self.freq/408.0)**(-2.6)", "def Tsky(source, freq=350*u.MHz, model='2008'):\n\n if not isinstance(source, astropy.coordinates.sky_coordinate.SkyCoord):\n if isinstance(source,str):\n # assume .par file\n source=parfile2SkyCoord(source)\n else:\n raise TypeError('Do not know how to interpret an object of type %s' % source.__class__)\n\n\n m=SkyModel(freq=freq, tskymodel=model)\n return m.Tsky(source)", "def gaussianfg(ctx):\n\n import numpy as np\n\n from cora.core import skysim\n from cora.util import hputil\n from cora.foreground import galaxy\n\n fsyn = galaxy.FullSkySynchrotron()\n fpol = galaxy.FullSkyPolarisedSynchrotron()\n\n # Set frequency parameters\n fsyn.frequencies = ctx.obj.freq\n nfreq = len(fsyn.frequencies)\n\n nside = ctx.obj.nside\n lmax = 3 * nside\n npol = 4 if ctx.obj.full_pol else 1\n\n cv_fg = np.zeros((lmax+1, npol, nfreq, npol, nfreq))\n\n cv_fg[:, 0, :, 0, :] = skysim.clarray(fsyn.angular_powerspectrum, lmax, fsyn.nu_pixels)\n\n if ctx.obj.full_pol:\n cv_fg[:, 1, :, 1, :] = skysim.clarray(fpol.angular_powerspectrum, lmax, fsyn.nu_pixels)\n cv_fg[:, 2, :, 2, :] = skysim.clarray(fpol.angular_powerspectrum, lmax, fsyn.nu_pixels)\n\n cv_fg = cv_fg.reshape(lmax+1, npol*nfreq, npol*nfreq)\n\n alms = skysim.mkfullsky(cv_fg, nside, alms=True).reshape(npol, nfreq, lmax+1, lmax+1)\n alms = alms.transpose((1, 0, 2, 3))\n\n maps = hputil.sphtrans_inv_sky(alms, nside)\n write_map(ctx.obj.filename, maps, fsyn.frequencies, ctx.obj.freq_width, ctx.obj.include_pol)", "def harmonic_mapping(fg):\n\n f_mapping = []\n for f_res in fg:\n cfs = f_res.peak_params[:, 0]\n if len(cfs) > 0:\n f_mapping.append(list(cfs / cfs[0]))\n\n return f_mapping", "def Tsky(self, source):\n\n if not _usePyGSM:\n raise ImportError('PyGSM is not available: cannot access sky temperatures')\n if not isinstance(source, astropy.coordinates.sky_coordinate.SkyCoord):\n if isinstance(source,str):\n # assume .par file\n source=parfile2SkyCoord(source)\n else:\n raise TypeError('Do not know how to interpret an object of type %s' % source.__class__)\n\n source=source.galactic\n T=healpy.pixelfunc.get_interp_val(self.map,\n source.l.value,\n source.b.value,\n lonlat=True)\n return T*u.K", "def compute_ctf(freqs,rots,akv,cs,wgh,dfmid1f,dfmid2f,angastf,dscale,bfactor=None): \n av = akv * 1e3 # Convert kilovots to volts\n cs = cs * 1e7 # Convert spherical aberation from mm to A\n \n # wavelength of electrons\n elambda = 12.2643247 / n.sqrt(av + av**2 * 0.978466e-6)\n \n wgh1 = dscale*n.sqrt(1.0 - wgh**2)\n wgh2 = dscale*wgh\n\n ix = freqs[:,0]\n iy = freqs[:,1]\n freq_radius = n.sqrt(ix**2 + iy**2)\n\n angle = elambda*freq_radius\n angspt = n.arctan2(iy,ix)\n if rots is not None:\n angspt = n.mod(angspt.reshape((-1,1)) + rots.reshape((1,-1)),2.0*n.pi)\n angle = angle.reshape((-1,1)) \n c1 = 2.0*n.pi*angle**2/(2.0*elambda)\n c2 = -c1*cs*angle**2/2.0\n angdif = angspt - angastf\n ccos = n.cos(2.0*angdif)\n df = 0.5*(dfmid1f + dfmid2f + ccos*(dfmid1f-dfmid2f))\n chi = c1*df + c2\n\n ctf = -wgh1*n.sin(chi) - wgh2*n.cos(chi)\n \n if bfactor is not None:\n ctf *= envelope_function(freq_radius, bfactor)\n\n return n.require(ctf,dtype = freqs.dtype)", "def thermodynamic_temperature(frequency, T_cmb=None):\n nu = frequency.to(si.GHz, spectral())\n\n if T_cmb is None:\n from astropy.cosmology import default_cosmology\n\n T_cmb = default_cosmology.get().Tcmb0\n\n def f(nu, T_cmb=T_cmb):\n x = _si.h * nu / _si.k_B / T_cmb\n return x**2 * np.exp(x) / np.expm1(x) ** 2\n\n def convert_Jy_to_K(x_jybm):\n factor = (f(nu) * 2 * _si.k_B * si.K * nu**2 / _si.c**2).to_value(\n astrophys.Jy\n )\n return x_jybm / factor\n\n def convert_K_to_Jy(x_K):\n factor = (astrophys.Jy / (f(nu) * 2 * _si.k_B * nu**2 / _si.c**2)).to_value(\n si.K\n )\n return x_K / factor\n\n return Equivalency(\n [(astrophys.Jy / si.sr, si.K, convert_Jy_to_K, convert_K_to_Jy)],\n \"thermodynamic_temperature\",\n {\"frequency\": frequency, \"T_cmb\": T_cmb},\n )", "def get_gaia(tpf):\n c1 = SkyCoord(tpf.ra, tpf.dec, frame='icrs', unit='deg')\n result = Vizier.query_region(c1, catalog=[\"I/345/gaia2\"],\n radius=(np.hypot(*np.asarray(tpf.shape[1:])/2) * 4) *u.arcsec)\n result = result[0].to_pandas()\n result = result[result.Gmag < 20]\n cs = []\n for idx, d in result.iterrows():\n if d.Plx > 0:\n dist = Distance(parallax=d.Plx*u.milliarcsecond)\n else:\n dist = np.nan * u.parsec\n\n cs.append(SkyCoord(d.RA_ICRS*u.deg, d.DE_ICRS*u.deg,\n distance=dist,\n pm_ra_cosdec=d.pmRA*u.milliarcsecond/u.year,\n pm_dec=d.pmDE*u.milliarcsecond/u.year,\n obstime=Time('J2015.5'),\n radial_velocity=np.nanmax([0, d.RV])*(u.km/u.s)))\n return cs", "def get_skytemp(datetimestring, delays, frequency, alpha=-2.6, verbose=True):\n su.init_data()\n\n if not os.path.exists(config.RADIO_IMAGE_FILE):\n logger.error(\"Could not find 408 MHz image: %s\\n\" % (config.RADIO_IMAGE_FILE))\n return None\n try:\n if (verbose):\n print(\"Loading 408 MHz map from %s...\" % config.RADIO_IMAGE_FILE)\n f = pyfits.open(config.RADIO_IMAGE_FILE)\n except Exception as e:\n logger.error(\"Error opening 408 MHz image: %s\\nError: %s\\n\" % (config.RADIO_IMAGE_FILE, e))\n return None\n skymap = f[0].data[0]\n\n ra = (f[0].header.get('CRVAL1') +\n (numpy.arange(1, skymap.shape[1] + 1) - f[0].header.get('CRPIX1')) * f[0].header.get('CDELT1')) / 15.0\n dec = (f[0].header.get('CRVAL2') +\n (numpy.arange(1, skymap.shape[0] + 1) - f[0].header.get('CRPIX2')) * f[0].header.get('CDELT2'))\n\n # parse the datetimestring\n try:\n yr = int(datetimestring[:4])\n mn = int(datetimestring[4:6])\n dy = int(datetimestring[6:8])\n hour = int(datetimestring[8:10])\n minute = int(datetimestring[10:12])\n second = int(datetimestring[12:14])\n except ValueError:\n logger.error('Could not parse datetimestring %s\\n' % datetimestring)\n return None\n # UT = hour + minute / 60.0 + second / 3600.0\n UTs = '%02d:%02d:%02d' % (hour, minute, second)\n a_obstime = Time('%d-%d-%d %s' % (yr, mn, dy, UTs), scale='utc')\n a_obstime.delta_ut1_utc = 0\n a_obstime.location = config.MWAPOS\n if (verbose):\n print(\"For %02d-%02d-%02d %s UT, LST=%6.3f\" % (yr, mn, dy, UTs, a_obstime.sidereal_time(kind='mean').hour))\n\n RA, Dec = numpy.meshgrid(ra * 15, dec)\n coords = SkyCoord(ra=RA, dec=Dec, equinox='J2000', unit=(astropy.units.deg, astropy.units.deg))\n coords.location = config.MWAPOS\n coords.obstime = a_obstime\n coords_prec = coords.transform_to('altaz')\n Az, Alt = coords_prec.az.deg, coords_prec.alt.deg\n\n if (verbose):\n print(\"Creating primary beam response for frequency %.2f MHz...\" % (frequency))\n print(\"Beamformer delays are %s\" % delays)\n # get the beam response\n # first go from altitude to zenith angle\n theta = (90 - Alt) * math.pi / 180\n phi = Az * math.pi / 180\n\n # this is the response for XX and YY\n try:\n respX, respY = primary_beam.MWA_Tile_analytic(theta, phi, freq=frequency * 1e6, delays=numpy.array(delays))\n except Exception as e:\n logger.error('Error creating primary beams: %s\\n' % e)\n return None\n rX = numpy.real(numpy.conj(respX) * respX)\n rY = numpy.real(numpy.conj(respY) * respY)\n\n maskedskymap = numpy.ma.array(skymap, mask=Alt <= 0)\n maskedskymap *= (frequency / 408.0) ** alpha\n rX /= rX.sum()\n rY /= rY.sum()\n return ((rX * maskedskymap).sum()) / 10.0, ((rY * maskedskymap).sum()) / 10.0", "def fringe_frequency(self, wavelength=0.028, terrestrial_latitude=37.873199, h_s0=0):\n\t\tBew, Bns, baseline = bf.baseline_script_2D(self.hour_angles, 0, self.volts, self.times)\n\t\tfirst_term = Bew / wavelength * np.cos(self.dec) * cos(h_s0)\n\t\tsecond_term = Bns / wavelength * np.sin(terrestrial_latitude) * np.cos(self.dec) * np.sin(h_s0)\n\t\treturn first_term - second_term", "def gTsky(a,d,g1,g2,ac,dc):\n # requires high-precision\n unit=a.unit\n a = np.array(a,dtype=np.float64)\n d = np.array(d,dtype=np.float64)\n ac = np.array(ac,dtype=np.float64)\n dc = np.array(dc,dtype=np.float64)\n #\n r = np.arccos(np.cos(d)*np.cos(dc)*np.cos(a-ac)+np.sin(d)*np.sin(dc))\n cosp = np.sin(ac - a)*np.cos(d)/np.sin(r)\n sinp = (-np.cos(dc)*np.sin(d) + np.sin(dc)*np.cos(d)*np.cos(a-ac))/np.sin(r)\n cos2p = cosp**2 - sinp**2\n sin2p = 2.*sinp*cosp\n gt = - (g1 * cos2p + g2 * sin2p)\n gr = (g1 * sin2p - g2 * cos2p)\n return gt, gr, r*unit", "def compute_tsky_hot( xv, yv, hv, thot, tcold):\n\n nData = len(yv) \n epsilons = np.full( nData, EPSILON)\n tsys = np.zeros(nData) # initialize arrays\n\n Z = np.zeros(nData)\n oneMZ = np.zeros(nData)\n # For full Temp calibration, a spectrum taken at high elevation away from \n # The galactic plan is used. For this program the cold spectrum must be\n # the spectrum being calibrated. See the M command for comparision\n epsilons = np.full( nData, EPSILON)\n yv = np.maximum( yv, epsilons)\n hv = np.maximum( hv, epsilons)\n # comput the cold/hot ratio\n Z = yv/hv\n oneMZ = np.full( nData, 1.) - Z\n oneMZ = np.maximum( oneMZ, epsilons)\n\n # the cold, receiver, temperature is this function\n tsys = ((Z*thot) - tcold)/oneMZ\n \n n6 = int(nData/6)\n n56 = 5*n6\n\n tsysmedian = np.median( tsys[n6:n56])\n\n tsky = np.zeros(nData) # initialize arrays\n S = np.zeros(nData) # initialize arrays\n\n # The system gain S is computed assuming a tsys is the cold load\n S = np.full( nData, tsysmedian+thot)/hv\n # scale the observed instensity in counts to Kelvins.\n tsky = S*yv\n\n return tsky", "def system_temp(freq_hz):\n freqs = np.array([0.05e9, 0.07e9, 0.11e9, 0.17e9, 0.25e9, 0.35e9, 0.45e9,\n 0.55e9, 0.65e9])\n t_sys = np.array([4.0409e3, 1.5029e3, 0.6676e3, 0.2936e3, 0.1402e3, 0.0873e3,\n 0.0689e3, 0.0607e3, 0.0613e3])\n f = interp1d(np.log10(freqs), np.log10(t_sys), kind='cubic')\n return 10**f(np.log10(freq_hz))", "def Gamma_per_grain(ZZall, Gamma_a_Z, ZZ_fz, fdist, GG):\n\n # index in the ZZall array for the charges in ZZ_fz\n zi_down = np.where(ZZall == ZZ_fz[0])[0][0]# find the index of the ZZ_fz[0] in ZZall \n zi_up = np.where(ZZall == ZZ_fz[-1])[0][0]# find the index of the ZZ_fz[-1] in ZZall\n \n #Gamma_pe_a = np.sum(fz*Gamma_dotdot_scaled[zi_down:zi_up+1])\n Gamma_pe_a = np.sum(fdist*Gamma_a_Z[zi_down:zi_up+1])\n \n return Gamma_pe_a", "def at_frequencies(\n self,\n freqs,\n inplace=True,\n freq_interp_kind=\"cubic\",\n nan_handling=\"clip\",\n run_check=True,\n check_extra=True,\n run_check_acceptability=True,\n atol=None,\n ):\n sky = self if inplace else self.copy()\n\n if atol is None:\n atol = self.freq_tol\n\n if self.spectral_type == \"spectral_index\":\n sky.stokes = (\n self.stokes\n * (freqs[:, None].to(\"Hz\") / self.reference_frequency[None, :].to(\"Hz\"))\n ** self.spectral_index[None, :]\n )\n sky.reference_frequency = None\n elif self.spectral_type == \"full\":\n # Find a subset of the current array.\n ar0 = self.freq_array.to_value(\"Hz\")\n ar1 = freqs.to_value(\"Hz\")\n tol = atol.to_value(\"Hz\")\n matches = np.fromiter(\n (np.isclose(freq, ar1, atol=tol).any() for freq in ar0), dtype=bool\n )\n\n if np.sum(matches) != freqs.size:\n raise ValueError(\n \"Some requested frequencies are not present in the current SkyModel.\"\n )\n sky.stokes = self.stokes[:, matches, :]\n if sky.freq_edge_array is not None:\n sky.freq_edge_array = sky.freq_edge_array[:, matches]\n elif self.spectral_type == \"subband\":\n if np.max(freqs.to(\"Hz\")) > np.max(self.freq_array.to(\"Hz\")):\n raise ValueError(\n \"A requested frequency is larger than the highest subband frequency.\"\n )\n if np.min(freqs.to(\"Hz\")) < np.min(self.freq_array.to(\"Hz\")):\n raise ValueError(\n \"A requested frequency is smaller than the lowest subband frequency.\"\n )\n # Interpolate. Need to be careful if there are NaNs -- they spoil the\n # interpolation even for sources that do not have any NaNs.\n stokes_unit = self.stokes.unit\n if np.any(np.isnan(self.stokes.value)):\n allowed_nan_handling = [\"propagate\", \"interp\", \"clip\"]\n if nan_handling not in allowed_nan_handling:\n raise ValueError(\n f\"nan_handling must be one of {allowed_nan_handling}\"\n )\n\n message = \"Some stokes values are NaNs.\"\n if nan_handling == \"propagate\":\n message += (\n \" All output stokes values for sources with any NaN values \"\n \"will be NaN.\"\n )\n else:\n message += \" Interpolating using the non-NaN values only.\"\n message += (\n \" You can change the way NaNs are handled using the \"\n \"`nan_handling` keyword.\"\n )\n warnings.warn(message)\n stokes_arr = self.stokes.value\n freq_arr = self.freq_array.to(\"Hz\").value\n at_freq_arr = freqs.to(\"Hz\").value\n # first interpolate any that have no NaNs\n wh_nan = np.nonzero(np.any(np.isnan(stokes_arr), axis=(0, 1)))[0]\n wh_non_nan = np.nonzero(np.all(~np.isnan(stokes_arr), axis=(0, 1)))[0]\n assert wh_non_nan.size + wh_nan.size == self.Ncomponents, (\n \"Something went wrong with spliting sources with NaNs. This is a \"\n \"bug, please make an issue in our issue log\"\n )\n new_stokes = np.zeros(\n (4, freqs.size, self.Ncomponents), dtype=stokes_arr.dtype\n )\n if wh_non_nan.size > 0:\n finterp = scipy.interpolate.interp1d(\n freq_arr,\n stokes_arr[:, :, wh_non_nan],\n axis=1,\n kind=freq_interp_kind,\n )\n new_stokes[:, :, wh_non_nan] = finterp(at_freq_arr)\n\n if nan_handling == \"propagate\":\n new_stokes[:, :, wh_nan] = np.NaN\n else:\n wh_all_nan = []\n wh_nan_high = []\n wh_nan_low = []\n wh_nan_many = []\n for comp in wh_nan:\n freq_inds_use = np.nonzero(\n np.all(~np.isnan(stokes_arr[:, :, comp]), axis=0)\n )[0]\n if freq_inds_use.size == 0:\n new_stokes[:, :, comp] = np.NaN\n wh_all_nan.append(comp)\n continue\n at_freq_inds_use = np.arange(freqs.size)\n\n if np.max(at_freq_arr) > np.max(freq_arr[freq_inds_use]):\n at_freq_inds_use = np.nonzero(\n at_freq_arr <= np.max(freq_arr[freq_inds_use])\n )[0]\n at_freqs_large = np.nonzero(\n at_freq_arr > np.max(freq_arr[freq_inds_use])\n )[0]\n wh_nan_high.append(comp)\n if nan_handling == \"interp\":\n new_stokes[:, at_freqs_large, comp] = np.NaN\n else: # clip\n large_inds_use = np.full(\n (at_freqs_large.size), freq_inds_use[-1]\n )\n new_stokes[:, at_freqs_large, comp] = stokes_arr[\n :, large_inds_use, comp\n ]\n\n if np.min(at_freq_arr) < np.min(freq_arr[freq_inds_use]):\n at_freq_inds_use_low = np.nonzero(\n at_freq_arr >= np.min(freq_arr[freq_inds_use])\n )[0]\n at_freq_inds_use = np.intersect1d(\n at_freq_inds_use, at_freq_inds_use_low\n )\n at_freqs_small = np.nonzero(\n at_freq_arr < np.min(freq_arr[freq_inds_use])\n )[0]\n wh_nan_low.append(comp)\n if nan_handling == \"interp\":\n new_stokes[:, at_freqs_small, comp] = np.NaN\n else: # clip\n small_inds_use = np.full(\n (at_freqs_small.size), freq_inds_use[0]\n )\n new_stokes[:, at_freqs_small, comp] = stokes_arr[\n :, small_inds_use, comp\n ]\n\n if at_freq_inds_use.size > 0:\n try:\n finterp = scipy.interpolate.interp1d(\n freq_arr[freq_inds_use],\n stokes_arr[:, freq_inds_use, comp],\n axis=1,\n kind=freq_interp_kind,\n )\n except ValueError:\n wh_nan_many.append(comp)\n finterp = scipy.interpolate.interp1d(\n freq_arr[freq_inds_use],\n stokes_arr[:, freq_inds_use, comp],\n axis=1,\n kind=\"linear\",\n )\n new_stokes[:, at_freq_inds_use, comp] = finterp(\n at_freq_arr[at_freq_inds_use]\n )\n else:\n continue\n if len(wh_all_nan) > 0:\n warnings.warn(\n f\"{len(wh_all_nan)} components had all NaN stokes values. \"\n \"Output stokes for these components will all be NaN.\"\n )\n if len(wh_nan_high) > 0:\n message = (\n f\"{len(wh_nan_high)} components had all NaN stokes values \"\n \"above one or more of the requested frequencies. \"\n )\n if nan_handling == \"interp\":\n message += (\n \"The stokes for these components at these frequencies \"\n \"will be NaN.\"\n )\n else:\n message += (\n \"Using the stokes value at the highest frequency \"\n \"without a NaN for these components at these \"\n \"frequencies.\"\n )\n warnings.warn(message)\n if len(wh_nan_low) > 0:\n message = (\n f\"{len(wh_nan_low)} components had all NaN stokes values below \"\n \"one or more of the requested frequencies. \"\n )\n if nan_handling == \"interp\":\n message += (\n \"The stokes for these components at these frequencies \"\n \"will be NaN.\"\n )\n else:\n message += (\n \"Using the stokes value at the lowest frequency \"\n \"without a NaN for these components at these frequencies.\"\n )\n warnings.warn(message)\n if len(wh_nan_many) > 0:\n warnings.warn(\n f\"{len(wh_nan_many)} components had too few non-NaN stokes \"\n \"values for chosen interpolation. Using linear \"\n \"interpolation for these components instead.\"\n )\n sky.stokes = new_stokes * stokes_unit\n else:\n finterp = scipy.interpolate.interp1d(\n self.freq_array.to(\"Hz\").value,\n self.stokes.value,\n axis=1,\n kind=freq_interp_kind,\n )\n sky.stokes = finterp(freqs.to(\"Hz\").value) * stokes_unit\n else:\n # flat spectrum\n stokes_unit = self.stokes.unit\n sky.stokes = np.repeat(self.stokes.value, len(freqs), axis=1) * stokes_unit\n\n sky.reference_frequency = None\n sky.Nfreqs = freqs.size\n sky.freq_array = freqs\n if sky.spectral_type == \"subband\" and sky.freq_edge_array is not None:\n sky.freq_edge_array = None\n sky.spectral_type = \"full\"\n if sky.frame_coherency is not None:\n sky.coherency_radec = sky.calc_frame_coherency()\n\n if run_check:\n sky.check(\n check_extra=check_extra, run_check_acceptability=run_check_acceptability\n )\n\n if not inplace:\n return sky", "def get_skylight(self, coords):\n\n x, y, z = coords\n index, y = divmod(y, 16)\n\n return self.sections[index].get_skylight((x, y, z))", "def sfreq_to_times(gaze_array, sfreq, start_time=0):\n return np.arange(0, len(gaze_array) / sfreq, 1. / sfreq) + start_time" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Calculates average sky background temperature for a given Galactic longitude (gl), Galactic latitude (gb), and between frequencies f1 and f2 (in MHz). Coordinates are in degrees. Assuming spectral index of "index", default is 2.55 Return value is in K If frequency array 'freqs' is given, then avergae Tsky is calculated for each frequency range f0f1, f1f2,... in the array, and returned value is list of average Tsky's. The size of the returned array is less by 1 than the size of freqs.
def tsky_range(gl, gb, f1, f2, index, freqs=None): # reading the table nsky=np.zeros((90, 180), dtype=float) for ii in xrange(90): for jj in xrange(180): pos=(ii*180+jj)*5 nsky[ii,jj]=float(haslam_table[pos:pos+5]) # Convert to standard l,b b = int(gb + 90.5) if b >= 180: b = 179 l = int(gl + 0.5) if gl >= 360: l = 0 l = int((l / 4)) if freqs == None: tot=0 for ii in xrange(101): freq = f1 + ii*(f2-f1)/100. tsky = 2.7 + nsky[l,b] * (freq/408.0)**(index) tot += tsky tot /= 100. return tot else: temps=[] for ff in xrange(1, len(freqs)): tot = 0 for ii in xrange(101): freq = freqs[ff-1] + ii*(freqs[ff]-freqs[ff-1])/100. tsky = 2.7 + nsky[l,b] * (freq/408.0)**(index) tot += tsky tot /= 100. temps.append(tot) return temps
[ "def tsky(gl, gb, freq, index, freqs=None):\n\n\t# reading the table\n\tnsky=np.zeros((90, 180), dtype=float)\n\tfor ii in xrange(90):\n\t\tfor jj in xrange(180):\n\t\t\tpos=(ii*180+jj)*5\n\t\t\tnsky[ii,jj]=float(haslam_table[pos:pos+5])\n\n\t# Convert to standard l,b\n\tb = int(gb + 90.5)\n\tif b >= 180: b = 179\n\tl = int(gl + 0.5)\n\tif gl >= 360: l = 0\n\tl = int((l / 4))\n\t\n\tif freqs == None:\n\t\ttsky = 2.7 + nsky[l,b] * (freq/408.0)**(index)\n\t\treturn tsky\n\telse:\n\t\ttemps=[]\n\t\tfor freq in freqs:\n\t\t\ttsky = 2.7 + nsky[l,b] * (freq/408.0)**(index)\n\t\t\ttemps.append(tsky)\n\t\treturn temps", "def band_avg_flux(freqcent, gleam_df):\n\n # 200 - 230 MHz\n if float(freqcent) > 200e6:\n gleam_bands = np.array([204, 212, 220, 227])\n\n # 163 - 200 MHz\n else:\n gleam_bands = np.array([166, 174, 181, 189, 197])\n\n # Create array of fluxes in gleam bands\n flux_array = []\n for b in gleam_bands:\n flux_array.append(gleam_df[f\"peak_flux_{b}\"].to_numpy())\n\n # Average peak flux in each of the gleam bands\n sfluxes = np.sum(np.asarray(flux_array), axis=0) / gleam_bands.shape[0]\n\n return sfluxes", "def harmonic_mapping(fg):\n\n f_mapping = []\n for f_res in fg:\n cfs = f_res.peak_params[:, 0]\n if len(cfs) > 0:\n f_mapping.append(list(cfs / cfs[0]))\n\n return f_mapping", "def freq_average(self, fmin, fmax):\n return np.mean([c for f, c in zip(self.freq, self.crossft) if fmin <= f < fmax])", "def freq_average(self, fmin, fmax):\n freq_list = np.hstack([c.freq for c in self.cross_spectra])\n crossft_list = np.hstack([c.crossft for c in self.cross_spectra])\n\n bin_edges = [fmin, fmax]\n real_mean, _, _ = binned_statistic(freq_list, crossft_list.real, statistic='mean', bins=bin_edges)\n imag_mean, _, _ = binned_statistic(freq_list, crossft_list.imag, statistic='mean', bins=bin_edges)\n return complex(real_mean, imag_mean)", "def calc_ground_temperature(T_ambient, gv):\n\n T_max = max( T_ambient ) + 273 # to K\n T_avg = np.mean( T_ambient ) + 273 # to K\n e = gv.Z0 * math.sqrt ( ( math.pi * gv.Csl * gv.Psl ) / ( 8760 * gv.Bsl ) ) # soil constants\n Tg = [ T_avg + ( T_max - T_avg ) * math.exp( -e ) * math.cos ( ( 2 * math.pi * ( i + 1 ) / 8760 ) - e )\n for i in range(8760)]\n\n return Tg", "def global_avg(temp_nc):\n start=datetime.datetime(1800,1,1,0,0,0)\n times=temp_nc.variables['time'][...]\n the_times=[start + datetime.timedelta(days=i) for i in times]\n lats=temp_nc.variables['lat'][...]\n lons=temp_nc.variables['lon'][...]\n temp=temp_nc.variables['air'][...]\n areas=np.empty([lats.shape[0],lons.shape[0]],dtype=np.float)\n R2=6371.**2.\n deg2rad=np.pi/180.\n dlat=2.*deg2rad\n dlon=2.*deg2rad\n for row,the_lat in enumerate(lats):\n for col,the_lon in enumerate(lons):\n abs_lat=np.abs(the_lat)\n co_lat=(90. - abs_lat)*deg2rad\n areas[row,col]=R2*np.sin(co_lat)*dlat*dlon\n\n tot_area=np.sum(areas.flat)\n num_times=temp.shape[0]\n the_temps=np.empty([num_times],dtype=np.float)\n for the_time in range(num_times):\n product=(temp[the_time,:,:]*areas).ravel()\n avg_temp=np.sum(product)/tot_area\n the_temps[the_time]=avg_temp\n return lats,lons,the_times,the_temps", "def get_average(self, s_freq, e_freq):\n s_ind = self.get_bin(s_freq)\n e_ind = self.get_bin(e_freq)\n lst = self.mags[s_ind:e_ind+1]\n try:\n avg = sum(lst)/len(lst)\n except:\n print(s_ind, e_ind)\n print('werid stuff')\n avg = 0\n return avg", "def get_average_energy(audio, beats, begin, end):\n buffer = np.square(audio[int(beats[int(begin)]):int(beats[int(end)])])\n average = np.mean(buffer)\n return average", "def get_fluxes_within_mask(tpf, aper_mask, gaia_sources):\n assert tpf is not None\n assert aper_mask is not None\n assert gaia_sources is not None\n ra, dec = gaia_sources[[\"ra\", \"dec\"]].values.T\n pix_coords = tpf.wcs.all_world2pix(np.c_[ra, dec], 0)\n contour_points = measure.find_contours(aper_mask, level=0.1)[0]\n isinside = [\n is_point_inside_mask(contour_points, pix) for pix in pix_coords\n ]\n min_gmag = gaia_sources.loc[isinside, \"phot_g_mean_mag\"].min()\n gamma = gaia_sources.loc[isinside, \"phot_g_mean_mag\"].apply(\n lambda x: 10 ** (0.4 * (min_gmag - x))\n )\n return gamma", "def gaussianfg(ctx):\n\n import numpy as np\n\n from cora.core import skysim\n from cora.util import hputil\n from cora.foreground import galaxy\n\n fsyn = galaxy.FullSkySynchrotron()\n fpol = galaxy.FullSkyPolarisedSynchrotron()\n\n # Set frequency parameters\n fsyn.frequencies = ctx.obj.freq\n nfreq = len(fsyn.frequencies)\n\n nside = ctx.obj.nside\n lmax = 3 * nside\n npol = 4 if ctx.obj.full_pol else 1\n\n cv_fg = np.zeros((lmax+1, npol, nfreq, npol, nfreq))\n\n cv_fg[:, 0, :, 0, :] = skysim.clarray(fsyn.angular_powerspectrum, lmax, fsyn.nu_pixels)\n\n if ctx.obj.full_pol:\n cv_fg[:, 1, :, 1, :] = skysim.clarray(fpol.angular_powerspectrum, lmax, fsyn.nu_pixels)\n cv_fg[:, 2, :, 2, :] = skysim.clarray(fpol.angular_powerspectrum, lmax, fsyn.nu_pixels)\n\n cv_fg = cv_fg.reshape(lmax+1, npol*nfreq, npol*nfreq)\n\n alms = skysim.mkfullsky(cv_fg, nside, alms=True).reshape(npol, nfreq, lmax+1, lmax+1)\n alms = alms.transpose((1, 0, 2, 3))\n\n maps = hputil.sphtrans_inv_sky(alms, nside)\n write_map(ctx.obj.filename, maps, fsyn.frequencies, ctx.obj.freq_width, ctx.obj.include_pol)", "def coldaverage( names):\n\n rs = radioastronomy.Spectrum() # create input and average structures\n avenames = names # create an output list to average\n\n# assume only a limited range of galactic latitudes are available\n# not range above +/-60.\n use60Range = False\n minGlat = 90. # initialize to extremea\n maxGlat = -90.\n maxEl = -90.\n minEl = 90.\n ncold = 0\n\n # for all input files\n for filename in names:\n\n parts = filename.split('/')\n nparts = len(parts)\n if nparts == 1:\n aname = parts[0]\n else:\n aname = parts[nparts-1]\n\n parts = aname.split('.')\n nparts = len(parts)\n if nparts < 2:\n print 'File is not an astronomy file: ',filename\n continue\n else:\n extension = parts[nparts-1]\n\n extension = extension.upper()\n if extension != 'AST': # speed up by only looking at astronomy files\n continue\n \n rs.read_spec_ast(filename) # An observation, read values\n\n if rs.telel < 0: # only working with observations, skip elevation <= 0.\n continue\n\n maxGlat = max( rs.gallat, maxGlat)\n minGlat = min( rs.gallat, minGlat)\n maxEl = max( rs.telel, maxEl)\n minEl = min( rs.telel, minEl)\n # end for all files loop, looking for max el and latitude ranges\n\n # if any high galactic latitudes, use only above +/-60d \n if minGlat < -60. or maxGlat > 60.:\n minGlat = -60.\n maxGlat = 60.\n else: # else no high galactic latitude data\n # use highest galactic latitudes - +/-5.degrees\n if -minGlat > maxGlat: # if negative latitudes higher\n minGlat = minGlat + 5.\n maxGlat = 90.\n else: # else positive latitudes higher\n maxGlat = maxGlat - 5.\n minGlat = -90.\n\n # only use the elevations above 60 degrees, if any\n if maxEl > 60.:\n maxEl = 60.\n else:\n maxEl = maxEl - 10. #else must use highest elevations available\n\n # now average coldest data for calibration\n for filename in names:\n\n rs.read_spec_ast(filename)\n rs.azel2radec() # compute ra,dec from az,el\n\n if rs.telel < maxEl:\n continue\n\n if rs.gallat > maxGlat or rs.gallat < minGlat:\n avenames[ncold] = filename\n ncold = ncold + 1\n # end of for all files loop\n\n ncold, cold = average( avenames[0:ncold]) # now use generic program for averages\n if ncold < 1:\n print 'No Cold load files; can not calibrate!'\n exit()\n\n return ncold, cold, minEl, maxEl", "def freq_average_slow(self, fmin, fmax):\n cross_spec_points = []\n for cs in self.cross_spectra:\n cross_spec_points += cs.points_in_freqrange(fmin, fmax)\n\n return np.mean(cross_spec_points)", "def avg_spike_frequency_abf(abf, epoch):\n p0 = abf.sweepEpochs.p1s[epoch]\n p1 = abf.sweepEpochs.p1s[epoch+1]\n t = abf.sweepX[p0:p1]\n V = abf.sweepY[p0:p1]\n return avg_spike_frequency(t, V)", "def get_gaia(tpf):\n c1 = SkyCoord(tpf.ra, tpf.dec, frame='icrs', unit='deg')\n result = Vizier.query_region(c1, catalog=[\"I/345/gaia2\"],\n radius=(np.hypot(*np.asarray(tpf.shape[1:])/2) * 4) *u.arcsec)\n result = result[0].to_pandas()\n result = result[result.Gmag < 20]\n cs = []\n for idx, d in result.iterrows():\n if d.Plx > 0:\n dist = Distance(parallax=d.Plx*u.milliarcsecond)\n else:\n dist = np.nan * u.parsec\n\n cs.append(SkyCoord(d.RA_ICRS*u.deg, d.DE_ICRS*u.deg,\n distance=dist,\n pm_ra_cosdec=d.pmRA*u.milliarcsecond/u.year,\n pm_dec=d.pmDE*u.milliarcsecond/u.year,\n obstime=Time('J2015.5'),\n radial_velocity=np.nanmax([0, d.RV])*(u.km/u.s)))\n return cs", "def gavg(idata):\n\t\n\twgt1=np.cos(np.deg2rad(idata.lat))*(idata*0+1)\n\tga=(wgt1*idata).sum(dim=['lat','lon'])/wgt1.sum(dim=['lat','lon'])\n\n\treturn ga", "def average_fft(ut: np.ndarray) -> np.ndarray:\n\n # We average over each row of ut.\n ut_average = np.average(ut, axis=0) # shape (262144,)\n\n return ut_average", "def avg_flux(given_route):\n sum_flux = []\n\n for stop in stops_on_route(given_route):\n sum_flux.append(passenger_on_off(stop))\n \n return np.average(sum_flux)", "def hotaverage( names):\n rs = radioastronomy.Spectrum() # create input and average structures\n nhot = 0\n\n avenames = names # create a list of files to average\n\n # for all input files\n for filename in names:\n\n parts = filename.split('/')\n nparts = len(parts)\n if nparts == 1:\n aname = parts[0]\n else:\n aname = parts[nparts-1]\n\n parts = aname.split('.')\n nparts = len(parts)\n if nparts < 2:\n print 'File is not an astronomy file: ',filename\n continue\n else:\n extension = parts[nparts-1]\n\n extension = extension.upper()\n if extension != 'HOT': # speed up by only looking at hot load files\n continue\n \n rs.read_spec_ast(filename)\n\n if rs.telel > 0: # only working with hot load, skip elevation > 0.\n continue\n\n avenames[nhot] = filename\n nhot = nhot + 1\n # end of for all files loop\n\n nhot, hot = average( avenames[0:nhot]) # now use generic program for averages\n if nhot < 1:\n print 'No hot load files; can not calibrate!'\n exit()\n\n return nhot, hot" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
Verification process of signature for file name document
def verification(file_name: str) -> None: print("Verification process...") file_name = os.path.join('data', file_name) file1 = open("data/key.txt", "r") file2 = open("data/signature.txt", "r") p = int(file1.readline().rstrip()) q = int(file1.readline().rstrip()) g = int(file1.readline().rstrip()) h = int(file1.readline().rstrip()) c1 = int(file2.readline().rstrip()) c2 = int(file2.readline().rstrip()) print('c1 = ', c1) print('c2 = ', c2) t1 = sha_hash(file_name) print('hash = ', t1) inverseC2 = compute_inverse(c2, q) t1 = (t1 * inverseC2) % q t2 = compute_inverse(c2, q) t2 = (t2 * c1) % q valid1 = square_multiply(g, t1, p) valid2 = square_multiply(h, t2, p) valid = ((valid1 * valid2) % p) % q if valid == c1: print("Valid signature") else: print("Invalid signature")
[ "def _verify_signature(self):\n #FIXME\n return True", "def verify_signature_dialog():\n signature_name = input(\"Enter signature identity: \")\n file_path = input(\"Enter file path: \")\n user = input(\"Enter username: \")\n\n if not(os.path.exists(user)):\n raise Exception(ERRORS.NOT_FOUND_USER)\n if not(os.path.exists(f\"{signature_name}.sig\")):\n raise Exception(ERRORS.NOT_FOUND_SIGNATURE)\n if not(os.path.exists(file_path)):\n raise Exception(ERRORS.NOT_FOUND_FILE)\n\n with open(user, \"r\") as file:\n _ = int(file.readline())\n y = int(file.readline())\n with open(f\"{signature_name}.sig\", \"r\") as file:\n r = int(file.readline())\n s = int(file.readline())\n with open(file_path, \"rb\") as file:\n file_hash = hashlib.sha256(file.read()).hexdigest()\n file_hash_int = int(file_hash, 16)\n \n if (r<0 or r>=Q) or (s<0 or s>=Q):\n raise Exception(ERRORS.INVALID_SIGNATURE)\n \n w = pow(s, Q-2, Q)\n u1 = (file_hash_int * w) % Q\n u2 = (r * w) % Q\n v = ((pow(G, u1, P) * pow(y, u2, P)) % P) % Q\n\n if v == r:\n print(f\"Signature is valid. The signature {signature_name}.sig verifies that {file_path} is sent by {user}.\")\n return\n \n print(f\"Signature is not valid.\")", "def test_signature_verification(self):\n curdir = os.path.dirname(os.path.abspath(__file__))\n keydir = os.path.join(curdir, \"data\", \"ima_keys\")\n\n lines = SIGNATURES.split('\\n')\n\n # empty keyring\n keyring = ima_file_signatures.ImaKeyring()\n self.assertTrue(ima.process_measurement_list(lines, ima_keyring=keyring) is None)\n\n # add key for 1st entry; 1st entry must be verifiable\n rsakeyfile = os.path.join(keydir, \"rsa2048pub.pem\")\n pubkey, keyidv2 = ima_file_signatures.get_pubkey_from_file(rsakeyfile)\n keyring.add_pubkey(pubkey, keyidv2)\n self.assertTrue(ima.process_measurement_list(lines[0:1], ima_keyring=keyring) is not None)\n self.assertTrue(ima.process_measurement_list(lines[1:2], ima_keyring=keyring) is None)\n\n # add key for 2nd entry; 1st & 2nd entries must be verifiable\n eckeyfile = os.path.join(keydir, \"secp256k1.pem\")\n pubkey, keyidv2 = ima_file_signatures.get_pubkey_from_file(eckeyfile)\n keyring.add_pubkey(pubkey, keyidv2)\n self.assertTrue(ima.process_measurement_list(lines[0:2], ima_keyring=keyring) is not None)", "def validate_signature(self):\r\n\r\n if self.signature:\r\n return\r\n self.signature = self.file.read(8)\r\n if self.signature != _signature:\r\n raise FormatError(\"PNG file has invalid signature.\")", "def _sign_document(self):\n return False", "def verify_signature(self, inputs, signature):\n pass", "def test_create_unfininished_metadata_verify_signature(self):\n in_toto_record_start(\n self.step_name, self.key, [self.test_material])\n link = Link.read_from_file(self.link_name_unfinished)\n link.verify_signatures({self.key[\"keyid\"] : self.key})\n os.remove(self.link_name_unfinished)", "def check_sig(self):\n check_sig(self.path)\n dsc = self.get_dsc()\n if dsc is not None:\n check_sig(dsc)", "def pdf_verification(file_address):\n\ttest_result = check_file(file_address)\n\ttest_result = test_result.split(' ')[0]\n\tif test_resutl == 'PDF':\n\t\treturn True\n\telse:\n\t\treturn False", "def verify_request_signature(req_info: StatusResponse) -> None:\n if not req_info.signature_check(req_info.xmlstr):\n raise ValueError(_(\"Message signature verification failure\"))", "def verify_upload(request):\n args = parser.parse(upload_args, request, targets=('querystring',))\n payload = get_payload(args['message'])\n signature = args['signature']\n try:\n sign.Verifiers.verify(request, payload, signature)\n except errors.SignedUrlError as error:\n raise web.HTTPError(\n httplib.BAD_REQUEST,\n reason=error.message,\n )\n return payload, signature", "def check_signature_validity(self) -> None:\n raise NotImplementedError(\"Must be implemented by subclasses\")", "def verify_signature(message: str, public_key: str) -> str:\n pass", "def test_create_metadata_verify_signature(self):\n in_toto_record_start(self.step_name, self.key, [])\n in_toto_record_stop(self.step_name, self.key, [])\n link = Link.read_from_file(self.link_name)\n link.verify_signatures({self.key[\"keyid\"] : self.key})\n os.remove(self.link_name)", "def _check_signature(self, changes):\n if self.config.check_signature:\n try:\n changes.check_sig()\n except BadSignature as ex:\n raise cli.CommandError(\n \"%s. Check if the PGP block exists and if the key is in your \"\n \"keyring\" % ex)", "def verify_sigfile(sigdir, sig):\n cmd = ['sigtool', '-i', '%s/%s.cvd' % (sigdir, sig)]\n sigtool = Popen(cmd, stdout=PIPE, stderr=PIPE)\n ret_val = sigtool.wait()\n return ret_val == 0", "def verify(self):\n if not self.public_key:\n self.fetch_public_key()\n data = self.doc.find(\".//{http://salmon-protocol.org/ns/magic-env}data\").text\n sig = self.doc.find(\".//{http://salmon-protocol.org/ns/magic-env}sig\").text\n sig_contents = '.'.join([\n data,\n b64encode(b\"application/xml\").decode(\"ascii\"),\n b64encode(b\"base64url\").decode(\"ascii\"),\n b64encode(b\"RSA-SHA256\").decode(\"ascii\")\n ])\n sig_hash = SHA256.new(sig_contents.encode(\"ascii\"))\n cipher = PKCS1_v1_5.new(RSA.importKey(self.public_key))\n if not cipher.verify(sig_hash, urlsafe_b64decode(sig)):\n raise SignatureVerificationError(\"Signature cannot be verified using the given public key\")", "def signature(cls, file_name):\n hash_sha256 = hashlib.sha256()\n file_des = open(file_name, 'rb')\n chunk = file_des.read()\n hash_sha256.update(chunk)\n return hash_sha256.hexdigest()", "def test_signature_validation(self):\n signature = app.utils.generate_signed_data(\n self._body,\n settings.PRIVATE_KEY\n )\n\n self.assertTrue(app.utils.validate_signed_data(\n self._body,\n signature,\n settings.PUBLIC_KEY\n ))" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
return path for lights file
def lightPath(self): return mfl.mayaFile( self._path + '/lights.ma' )
[ "def file_path():\n # Get path to LightWave's config folder, and join with the filename\n folder = lwsdk.LWDirInfoFunc(lwsdk.LWFTYPE_SETTING)\n file_path = os.path.join(folder, PRESETS_FILE)\n return file_path", "def darkpath(cam):\n return os.path.join(BASEPATH, cam + \"_dark\")", "def path(self) -> str:\n\t\treturn os.path.join(self.location, self.fname)", "def getPathFile():\n\t\n\treturn getConfDir()+'paths'", "def getBlenderPath(shot):\n return os.path.join(shot.getBaseDir(), \"Blender\")", "def path(self) -> str:\n return self.src + \"/\"", "def sirsam_target_path(data_sirsam):\n return os.path.join(data_sirsam, 'targets', 'geochem_sites_log.shp')", "def environmentImagesPath():\n # A recursion counter to make sure that the loop ends.\n count = 0\n # Get the path to the Blender executable.\n filePath = os.path.dirname(bpy.app.binary_path)\n # Find the lowest path level which contains Blender.\n while \"blender\" not in os.path.basename(filePath).lower():\n filePath = os.path.dirname(filePath)\n if not filePath or count == 20:\n break\n count += 1\n\n # Search all subpaths for the datafiles folder. Based on this folder\n # the path can be completed.\n for dirPath, dirs, fileList in os.walk(filePath):\n if os.path.basename(dirPath) == \"datafiles\":\n return os.path.join(os.path.join(dirPath, \"studiolights\"), \"world\")", "def file_path(self) -> str:\n return pulumi.get(self, \"file_path\")", "def flatpath(cam):\n return os.path.join(BASEPATH, cam + \"_flats\")", "def path(self, filename):\n return os.path.join(self.base_path, filename)", "def file_path(self):\n return self.lib.file_path", "def get_image_path(type='Light') -> str:\n return os.path.join(data_in_path, 'image/Park Map - {}.jpg'.format(type))", "def FilePath(self) -> str:", "def neighbordb_path():\n\n filepath = runtime.default.data_root\n filename = runtime.neighbordb.filename\n return os.path.join(filepath, filename)", "def mitogen_machinectl_path(self):", "def opened_texture_files( self ) :\n return self._path_get( 'texture_path', 'opening_texture_file' )", "def aovsPath(self):\n\t\treturn fl.File( self._path + '/aovs.data' )", "def _set_full_file_path(self):\n if self.name.endswith(\".TIF\"):\n file_path = os.path.join(self.out_path, self.name)\n else:\n file_path = os.path.join(self.out_path, self.name + \"_RGB.TIF\")\n\n return (file_path)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
return the path for the shader file
def shaderPath(self): return mfl.mayaFile( self._path + '/shaders.ma' )
[ "def getShaderDir():\n\n if fslgl.GL_VERSION == '2.1': subdir = 'gl21'\n elif fslgl.GL_VERSION == '1.4': subdir = 'gl14'\n\n return op.join(fsleyes.assetDir, 'assets', 'gl', subdir)", "def _getFileName(prefix, shaderType):\n\n suffix = getShaderSuffix()\n\n if shaderType not in ('vert', 'frag'):\n raise RuntimeError('Invalid shader type: {}'.format(shaderType))\n\n return op.join(getShaderDir(), '{}_{}.{}'.format(\n prefix, shaderType, suffix))", "def getShaderSuffix():\n\n if fslgl.GL_VERSION == '2.1': return 'glsl'\n elif fslgl.GL_VERSION == '1.4': return 'prog'", "def shaderFromFile(self, shaderType, shaderFile):\n shaderSrc = ''\n with open(shaderFile) as sf:\n shaderSrc = sf.read()\n\n return shaders.compileShader(shaderSrc, shaderType)", "def getFragmentShader(self):\n return self.fshader", "def shader(self):\n #TODO -- think -- if sheader is None create shader without prepare!\n #Note -- because shader accepts list of shader in prepare!!!!!!!\n # And thease shader can be None!\n return self._shader", "def output_stl_path(self):\n head, tail = os.path.split(self.img_path)\n filename, ext = os.path.splitext(tail)\n new_tail = '{}.stl'.format(filename)\n new_path = os.path.join(head, new_tail)\n return new_path", "def get_shader_codes(self):\n vs = VS_TEMPLATE\n fs = FS_TEMPLATE\n \n # Shader headers\n vs_header = self.get_header('vertex')\n fs_header = self.get_header('fragment')\n \n # Varyings\n for varying in self.varyings:\n s1, s2 = get_varying_declarations(varying)\n vs_header += s1\n fs_header += s2\n \n # vs_header += \"\".join(self.vs_headers)\n # fs_header += \"\".join(self.fs_headers)\n \n # Integrate shader headers\n vs = vs.replace(\"%VERTEX_HEADER%\", vs_header)\n fs = fs.replace(\"%FRAGMENT_HEADER%\", fs_header)\n \n # Vertex and fragment main code\n vs_main = self.get_main('vertex')\n fs_main = self.get_main('fragment')\n \n # Integrate shader headers\n vs = vs.replace(\"%VERTEX_MAIN%\", vs_main)\n fs = fs.replace(\"%FRAGMENT_MAIN%\", fs_main)\n \n # frag color or frag data\n if self.fragdata is None:\n fs = fs.replace('%FRAG%', \"\"\"gl_FragColor = out_color;\"\"\")\n else:\n fs = fs.replace('%FRAG%', \"\"\"gl_FragData[%d] = out_color;\"\"\" % self.fragdata)\n \n # Make sure there are no Windows carriage returns\n vs = vs.replace(b\"\\r\\n\", b\"\\n\")\n fs = fs.replace(b\"\\r\\n\", b\"\\n\")\n \n # OLDGLSL does not know the texture function\n if not OLDGLSL:\n fs = fs.replace(\"texture1D(\", \"texture(\" % 2)\n fs = fs.replace(\"texture2D(\", \"texture(\" % 2)\n \n # set default color\n fs = fs.replace('%DEFAULT_COLOR%', str(self.default_color))\n \n # replace GLSL version header\n vs = vs.replace('%GLSL_VERSION_HEADER%', self.version_header)\n fs = fs.replace('%GLSL_VERSION_HEADER%', self.version_header)\n \n # replace GLSL precision header\n vs = vs.replace('%GLSL_PRECISION_HEADER%', self.precision_header)\n fs = fs.replace('%GLSL_PRECISION_HEADER%', self.precision_header)\n \n return vs, fs", "def _compile_shader_source(self, source_fn, shader_type):\n\n try:\n with open(source_fn, \"r\") as f:\n content = f.read()\n\n shader_id = GL.glCreateShader(shader_type)\n GL.glShaderSource(shader_id, content)\n GL.glCompileShader(shader_id)\n\n # Check compilation errors\n if GL.glGetShaderiv(shader_id, GL.GL_COMPILE_STATUS) == GL.GL_FALSE:\n info = GL.glGetShaderInfoLog(shader_id)\n raise RuntimeError(\"Error while compiling shader (%s)\\n\\n%s\"%(shader_type, info))\n \n return shader_id\n\n except IOError:\n raise RuntimeError(\"Could not load the shader file: %s\"%(source_fn))", "def file_path(self) -> global___Expression:", "def shader_with_tex_offset(offset):\n\n return FileShader(shader_source_with_tex_offset(offset), \".vert\")", "def compile(self):\n if not self.isCompiled():\n if self.file is not None:\n try:\n if self.tipo == VERTEX:\n self.shader = glCreateShader(GL_VERTEX_SHADER)\n else:\n self.shader = glCreateShader(GL_FRAGMENT_SHADER)\n glShaderSource(self.shader, self.file)\n glCompileShader(self.shader)\n self.compiled = True\n except:\n raise Exception(\"error al compilar el shader\")\n else:\n raise Exception(\"no se ha cargado un archivo\")\n else:\n print \"Error :: el shader ya ha sido compilado\"", "def path(self) -> str:\n return self.src + \"/\"", "def lightPath(self):\n\t\treturn mfl.mayaFile( self._path + '/lights.ma' )", "def loadShader(shaderpath, shadername, vertexFormatList=None, fragmentFormatlist=None):\n fragment = Shader(shaderpath + shadername + \".fsh\", FRAGMENT, True, fragmentFormatlist)\n vertex = Shader(shaderpath + shadername + \".vsh\", VERTEX, True, vertexFormatList)\n return ShaderProgram(vertex, fragment, True)", "def getPath(self):\n return os.path.join(self.brick._brick_path, 'input', self.name)", "def file_path():\n # Get path to LightWave's config folder, and join with the filename\n folder = lwsdk.LWDirInfoFunc(lwsdk.LWFTYPE_SETTING)\n file_path = os.path.join(folder, PRESETS_FILE)\n return file_path", "def path(self) -> str:\n\t\treturn os.path.join(self.location, self.fname)", "def getVertexShader(self):\n return self.vshader" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
return the path for the aovs file
def aovsPath(self): return fl.File( self._path + '/aovs.data' )
[ "def GetAAPath(topomsafile):#{{{\n if g_params['aapath'] != \"\":\n return g_params['aapath']\n else:\n return myfunc.my_dirname(topomsafile)", "def path(self) -> str:\n\t\treturn os.path.join(self.location, self.fname)", "def file_path(self) -> str:\n return pulumi.get(self, \"file_path\")", "def getPathFile():\n\t\n\treturn getConfDir()+'paths'", "def file_path(self) -> global___Expression:", "def file_path(self):\n return self.lib.file_path", "def path(self) -> str:\n return self.src + \"/\"", "def get_path(self):\n raise NotImplementedError(\"This asset does not support absolute paths\")", "def get_path(self):\n\n if not self.path:\n Settings.err_print(\"missing file path\")\n return \"\"\n return self.path", "def path(self, filename):\n return os.path.join(self.base_path, filename)", "def fullpath(self):\n return f\"{self.basepath}/{self.filename}\"", "def get_full_path(self):\n return '/'.join([self.get_path(), self.file_name])", "def outlog_path(self):\n f_txt = '%s.txt' % self.assessor_label\n return os.path.join(self.diskq, OUTLOG_DIRNAME, f_txt)", "def FilePath(self) -> str:", "def outlog_path(self):\n f_out = '%s.txt' % self.assessor_label\n return os.path.join(self.diskq, OUTLOG_DIRNAME, f_out)", "def file_path(self):\n if self._file_path is None:\n self._file_path = os.path.join(self.group_dir, self.table_name)\n return self._file_path", "def __get_file_path(self, path):\n\n fpath = os.path.sep.join([self.dc.get_repodir(), \"publisher\",\n \"test\", \"file\"])\n fhash = self.fhashes[path]\n return os.path.sep.join([fpath, fhash[0:2], fhash])", "def getAbsFile(self):\n absPathFile = os.path.normpath(os.path.join(self.getPath(),self.getFilename()))\n return absPathFile", "def sas_file(self):\n\n return os.path.normpath(self.path +'\\\\'+ cfg_dict['format_pgm'])" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
return the path for the masterLayer data
def masterPath(self): return fl.File( self._path + '/master.data' )
[ "def path(self):\n return self.dataset.mountpoint", "def get_data_path(self):\n return self.__path + '/data/'", "def data_path(self) -> str:\n return self._get(\"data_path\", \"./data/\")", "def getPathName(self):\n return self.data.pathName", "def getDataPath():\n\treturn \"..\" + os.sep + \"data\" + os.sep", "def path_in_hdf5(self):\n return '/'", "def path(self):\n return path_join(self.container.name, self.name)", "def getPath(self):\n return self.rootFolder.getLongName() + \"\\\\\" + self.keyPath", "def get_data_path(path):\n\n data_path = Path(self.kard.meta.get('data_path', 'data'))\n\n if data_path.is_absolute():\n return str(data_path / path)\n\n return str(self.kard_folder_path / self.kard.name / data_path /\n path)", "def _get_rig_data_path(self):\n pass", "def path(self):\n if self._path is None:\n self._path = Path(\n os.environ.get(\n \"RAPIDS_DATASET_ROOT_DIR\", Path.home() / \".cugraph/datasets\"\n )\n )\n return self._path", "def primary_data_dir(self):\n return self.__primary_data_dir", "def path(self) -> str:\n return (\n f\"/projects/{self.project}/datasets/{self.dataset_id}\"\n f\"/tables/{self.table_id}\"\n )", "def master_path(stub, directory=config.MASTER_DIR):\n logging.info(\"Building master filepath for {0}\".format(stub))\n # return config.MASTER_DIR+\"/\"+\"ftb_\"+stub+\".txt\"\n return os.path.join(directory, \"ftb_\" + stub + \".txt\")", "def root_path(self) -> Path:\n return ARCHIVES_ROOT / self.source_name / self.key", "def get_master_url(self, identifier) -> None:\n # TODO(victorhc): Implement the following method to fetch the cluster\n # master_url from Dataproc.\n return '.'.join([\n self.cluster_metadata.project_id,\n self.cluster_metadata.region,\n self.cluster_metadata.cluster_name\n ])", "def get_server_path() -> Path:\n\n if DATA_PATH.is_file():\n return Path(DATA_PATH.read_text().strip())\n\n return gen_and_save_server_path()", "def path(self):\n return self.server.base_path", "def workspace_path(self):\n raise NotImplementedError" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
export master layer settings so we can re apply it
def exportMasterLayerSettings(self): master = rlayer.RenderLayer( 'defaultRenderLayer' ) master.makeCurrent() masterData = {} nodes = ['defaultArnoldRenderOptions','defaultResolution','defaultRenderGlobals'] mnNodes =[ mn.Node( n ) for n in nodes ] for n in mnNodes: for a in n.listAttr( se = True, v = True, w = True ): try: masterData[a] = a.v except: continue pickle.dump( masterData, open( self.masterPath.path, "wb" ) )
[ "def importMasterSettings(self):\n\t\tpickleData = pickle.load( open( self.masterPath.path, \"rb\" ) )\n\t\tmaster = rlayer.RenderLayer( 'defaultRenderLayer' )\n\t\tmaster.makeCurrent()\n\t\tfor a in pickleData.keys():\n\t\t\ttry:\n\t\t\t\ta.v = pickleData[a]\n\t\t\texcept:\n\t\t\t\tcontinue", "def apply_settings_objects(self):\n \"\"\" https://tangentanimation.sharepoint.com/wiki/Pages/Layer%20Conventions.aspx \"\"\"\n display_layer = 9\n\n bpy.data.objects[self.nul_object_name].hide = True\n bpy.data.objects[self.nul_object_name].hide_select = True\n\n bpy.data.cameras[self.cam_data_name].show_name = True\n bpy.context.scene.objects[self.cam_object_name].location = (0, 0, 1.5)\n\n # bpy.data.objects[self.cam_object_name].layers[0] = False\n # bpy.data.objects[self.cam_object_name].layers[display_layer] = True\n # bpy.data.objects[self.nul_object_name].layers[0] = False\n # bpy.data.objects[self.nul_object_name].layers[display_layer] = True\n\n # bpy.context.scene.layers[0]=True\n # bpy.context.scene.layers[display_layer]=True\n\n self.conoutmessage = (\"Applying final settings\")\n if self.conout:\n print(self.conoutmessage)\n self.log.append(self.conoutmessage)", "def restore_export_preset():\n run_mel_command(\"FBXResetExport\")", "def exportData(self):\n\t\tlays = rlayer.renderlayers()\n\t\tdata = {}\n\t\tfor l in lays:\n\t\t\tif l.name == 'defaultRenderLayer':\n\t\t\t\tcontinue\n\t\t\tdata[l.name] = {'objects':l.objects, # OBJECTS IN LAYER\n\t\t\t\t\t\t\t'values' :l.overridesWithValues, # OVERRIDED ATTRIBUTES ONLY CHANGED VALUES\n\t\t\t\t\t\t\t'conns' :l.overridesWithConnections[0], # OVERRIDED ATTRIBUTES CHANGED CONNECTIONS\n\t\t\t\t\t\t\t'shader' :l.overridedShader # OVERRIDE RENDERLAYER SHADER\n\t\t\t\t\t\t\t}\n\t\tpickle.dump( data, open( self.dataPath.path, \"wb\" ) )", "def configureMaster(self):\n\t\t\n\t\tfin = open('/opt/google/earth/free/drivers.ini', 'r')\n\t\tfout = open('/etc/X11/ge-drivers.ini', 'w')\n\t\t\n\t\tfor line in fin.readlines():\n\t\t\tfout.write(line)\n\t\t\tif line.find('SETTINGS {') != 0:\n\t\t\t\tcontinue\n\t\t\tfout.write('\\tViewSync/send = true\\n')\n\t\t\tfout.write('\\tViewSync/receive = false\\n')\n\n\t\t\tfout.write('\\tViewSync/hostname = %s\\n' %\n\t\t\t\t self.db.getHostAttr('localhost',\n\t\t\t\t\t\t 'Kickstart_PrivateBroadcast'))\n fout.write('\\tViewSync/port = 21567\\n')\n\t\t\tfout.write('\\n')\n\t\t\tfout.write('\\tViewSync/horizFov = 60\\n')\n fout.write('\\tViewSync/rollOffset = 0\\n')\n fout.write('\\tViewSync/yawOffset = 0\\n')\n\t\t\tfout.write('\\tViewSync/pitchOffset = 0\\n')\n\t\t\tfout.write('\\n')\n\n\n\t\tfin.close()\n\t\tfout.close()\n\n\t\tshutil.copy('/etc/X11/ge-drivers.ini', '/opt/google/earth/free/drivers.ini')", "def persist_settings(self):\r\n self.KCube.CC_PersistSettings(self.serial)", "def saveToolSettings(*args, **kwargs):\n\n pass", "def clear():\n _global_config.layers = []", "def updateLayers(self):\n\t\tself.layers = self.extractLayers()", "def save_layer(index, settings) -> Action:\n return {\n \"kind\": SAVE_LAYER,\n \"payload\": {\"index\": index, \"settings\": settings},\n }", "def save_config():\n # Order the load flags using load_keys...\n od_load_flags = OrderedDict()\n for k in load_keys:\n od_load_flags[k] = load_flags[k]\n pawstools.save_cfg(od_load_flags,cfg_file)", "def saveToolSettings(*args, **kwargs)->None:\n pass", "def backup_config(self):\n if not os.path.isdir(self.data_dir):\n os.makedirs(self.data_dir)\n\n _info('Exporting templates and hostgroups')\n self.export_component_config('template', 'templateid', 'templates',\n 'templates')\n self.export_component_config('hostgroup', 'groupid', 'groups',\n 'hostgroups')\n _info('Exporting hosts')\n self.export_component_config('host', 'hostid', 'hosts', 'hosts')\n _info('Exporting registration actions')\n self.export_action_config(2, 'reg_actions',\n 'auto-registration actions')\n _info('Exporting trigger actions')\n self.export_action_config(0, 'trigger_actions',\n 'trigger actions')\n _info('Exporting media types')\n self.export_component_config('mediatype', 'mediatypeid', 'mediaTypes', 'mediatypes')\n\n _info('Exporting services')\n self.export_component('service', 'services')\n\n _info('Exporting proxies')\n self.export_component('proxy', 'proxies')\n\n self.get_id_file()", "def save_switch_configs(self):", "def export_configurations():\n pass", "def restore_backup_stats(self):\n self.layer_dict['batchNorm1'].restore_backup_stats()\n self.layer_dict['batchNorm2'].restore_backup_stats()\n self.layer_dict['batchNorm3'].restore_backup_stats()\n self.layer_dict['batchNorm4'].restore_backup_stats()", "def save_to_cfg(self):\n self.cfg['viewport']['vp_current_grid'] = str(self.vp_current_grid)\n self.cfg['viewport']['vp_current_ov'] = str(self.vp_current_ov)\n self.cfg['viewport']['vp_tile_preview_mode'] = str(\n self.vp_tile_preview_mode)\n self.cfg['viewport']['show_labels'] = str(self.show_labels)\n self.cfg['viewport']['show_axes'] = str(self.show_axes)\n self.cfg['viewport']['show_stub_ov'] = str(self.show_stub_ov)\n self.cfg['viewport']['show_imported'] = str(self.show_imported)\n self.cfg['viewport']['show_native_resolution'] = str(\n self.show_native_res)\n self.cfg['viewport']['show_saturated_pixels'] = str(\n self.show_saturated_pixels)\n\n self.cfg['viewport']['sv_current_grid'] = str(self.sv_current_grid)\n self.cfg['viewport']['sv_current_tile'] = str(self.sv_current_tile)\n self.cfg['viewport']['sv_current_ov'] = str(self.sv_current_ov)\n\n self.cfg['viewport']['m_current_grid'] = str(self.m_current_grid)\n self.cfg['viewport']['m_current_tile'] = str(self.m_current_tile)\n self.cfg['viewport']['m_current_ov'] = str(self.m_current_ov)", "def editDisplayLayerGlobals(*args, **kwargs):\n\n pass", "def save_bridge_mappings(self):" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
export data from scene, objects overrides in renderlayers.. etc
def exportData(self): lays = rlayer.renderlayers() data = {} for l in lays: if l.name == 'defaultRenderLayer': continue data[l.name] = {'objects':l.objects, # OBJECTS IN LAYER 'values' :l.overridesWithValues, # OVERRIDED ATTRIBUTES ONLY CHANGED VALUES 'conns' :l.overridesWithConnections[0], # OVERRIDED ATTRIBUTES CHANGED CONNECTIONS 'shader' :l.overridedShader # OVERRIDE RENDERLAYER SHADER } pickle.dump( data, open( self.dataPath.path, "wb" ) )
[ "def export_scene():\n try:\n # check for mesh\n mesh = cmds.ls(typ='mesh')\n if not mesh:\n cmds.warning('{0}: No mesh to export.'.format(__file__))\n return None\n else:\n cmds.select(mesh)\n # check plug-in\n if not cmds.pluginInfo('fbxmaya', q=True, l=True):\n cmds.loadPlugin('fbxmaya')\n\n file_path = cmds.file(q=True, exn=True)\n # todo: exclude unnecessary stuff for exporting, the engine only read .mesh now\n # painter doesn't support fbx 2016 yet\n # todo: checkout exported file from perforce\n return cmds.file(file_path, typ='DAE_FBX export', es=True)\n except: # todo: too broad exception, need to narrow down\n cmds.warning('{0}: Failed to export scene.'.format(__file__))", "def export_cube(self):\r\n export.export_cube(self.filename, self.data, self.dataview)", "def exportState(viewer, group, outData):\n gui = viewer.client.gui\n objNames = set([])\n # retrieve object names\n for obj in gui.getGroupNodeList(group):\n objNames.add(obj)\n while len(objNames) > 0:\n obj = objNames.pop()\n if obj not in outData:\n outData[obj] = []\n objFrame = outData[obj]\n objFrame.append(gui.getNodeGlobalTransform(obj))", "def menu_save_scene(self):\n file_name = QtGui.QFileDialog().getSaveFileName(self, \"Save Scene to File\", get_data_path(), \"*.pkl\")\n with open(file_name, \"wb\") as f:\n pickle.dump(self.scene, f, pickle.HIGHEST_PROTOCOL)", "def export(self, *args):\r\n if not self._hasSelection():\r\n return\r\n if self._settings.get('settings', 'exportdir') == '':\r\n if self._settings.updateConfig(menuExportDir, 'exportdir') == 'canceled':\r\n return\r\n exportMeshes = cmds.ls(selection=True, l=True)\r\n #Checks for duplicate objects selected\r\n for i in range(len(exportMeshes)):\r\n exportMeshes[i] = self._checkRenderMeshName(exportMeshes[i])\r\n\r\n #Removes duplicate items\r\n exportMeshes = list(set(exportMeshes))\r\n for renderMeshName in exportMeshes:\r\n meshes = self._getMeshes(renderMeshName)\r\n if meshes:\r\n mainMesh = meshes[0]\r\n else:\r\n mainMesh = renderMeshName\r\n fileName = mainMesh.split('|')[-1]\r\n path = self._settings.get('settings', 'exportDir') + fileName\r\n position = cmds.xform(mainMesh, ws=True, q=True, t=True)\r\n rotation = cmds.xform(mainMesh, ws=True, q=True, ro=True)\r\n cmds.select(d=True)\r\n #Center Meshes\r\n if self._settings.getboolean('settings', 'centerMeshes'):\r\n cmds.xform(mainMesh, r=True, t=([axis * -1 for axis in position]))\r\n cmds.xform(mainMesh, r=True, eu=True, ro=([axis * -1 for axis in rotation]))\r\n cmds.select(mainMesh)\r\n #FBX export\r\n if self._settings.getboolean('settings', 'exportFBX'):\r\n cmds.file(path + '.fbx', exportSelected=True, type='FBX export', force=True)\r\n print('Exported: ' + fileName + '.fbx')\r\n #OBJ export\r\n if self._settings.getboolean('settings', 'exportOBJ'):\r\n cmds.file(path + '.obj', exportSelected=True, type='OBJexport', force=True, op=\"materials=0\")\r\n print('Exported: ' + fileName + '.obj')\r\n cmds.select(d=True)\r\n #Decenter Meshes\r\n if self._settings.getboolean('settings', 'centerMeshes'):\r\n cmds.xform(mainMesh, r=True, eu=True, ro=rotation)\r\n cmds.xform(mainMesh, r=True, t=position)\r\n cmds.select(d=True)", "def test_to_from_scene(self): # pragma: lpy\n super(TestObjDict, self).test_to_from_scene(_as_obj=True)", "def export_stl(self, **kwargs):\n self.scene.export_stl(**kwargs)", "def import_ig_scene(self, scene):\n assert isinstance(scene, InteractiveIndoorScene), \\\n 'import_ig_scene can only be called with InteractiveIndoorScene'\n new_object_ids = scene.load()\n self.objects += new_object_ids\n if scene.texture_randomization:\n # use randomized texture\n for body_id, visual_mesh_to_material in \\\n zip(new_object_ids, scene.visual_mesh_to_material):\n shadow_caster = True\n if scene.objects_by_id[body_id].category == 'ceilings':\n shadow_caster = False\n class_id = self.class_name_to_class_id.get(\n scene.objects_by_id[body_id].category, SemanticClass.SCENE_OBJS)\n self.load_articulated_object_in_renderer(\n body_id,\n class_id=class_id,\n visual_mesh_to_material=visual_mesh_to_material,\n shadow_caster=shadow_caster)\n else:\n # use default texture\n for body_id in new_object_ids:\n use_pbr = True\n use_pbr_mapping = True\n shadow_caster = True\n if scene.scene_source == 'IG':\n if scene.objects_by_id[body_id].category in ['walls', 'floors', 'ceilings']:\n use_pbr = False\n use_pbr_mapping = False\n if scene.objects_by_id[body_id].category == 'ceilings':\n shadow_caster = False\n class_id = self.class_name_to_class_id.get(\n scene.objects_by_id[body_id].category, SemanticClass.SCENE_OBJS)\n self.load_articulated_object_in_renderer(\n body_id,\n class_id=class_id,\n use_pbr=use_pbr,\n use_pbr_mapping=use_pbr_mapping,\n shadow_caster=shadow_caster)\n self.scene = scene\n\n return new_object_ids", "def export( self, captionMode, copyFiles, outputDir ):\n scene = slicer.mrmlScene\n nodes = scene.GetNumberOfNodes()\n\n self.__nodes = {}\n\n # 1 for model name, 2 for parent name\n self.__captionMode = captionMode\n # TRUE if we shall copy the files to the outputDir\n self.__copyFiles = copyFiles\n self.__outputDir = outputDir\n\n self.__tree = Tree()\n self.__tree.create_node( \"Scene\", \"scene\" )\n\n for n in xrange( nodes ):\n\n node = scene.GetNthNode( n )\n\n self.parseNode( node )\n\n [header, footer] = self.configureXrenderers()\n output = header\n output += self.createXtree( \"scene\" )\n output += footer\n\n return output", "def __render_scene(self, scene):\n\n # Name and location of the exported project.\n project_dir = os.path.join(tempfile.gettempdir(), \"blenderseed\", \"render\")\n project_filepath = os.path.join(project_dir, \"render.appleseed\")\n\n # Create target directories if necessary.\n if not os.path.exists(project_dir):\n try:\n os.makedirs(project_dir)\n except os.error:\n self.report({\"ERROR\"}, \"The directory {0} could not be created. Check directory permissions.\".format(project_dir))\n return\n\n # Generate project on disk.\n self.update_stats(\"\", \"appleseed Rendering: Exporting Scene\")\n writer = projectwriter.Writer()\n writer.write(scene, project_filepath)\n\n # Render project.\n self.__render_project_file(scene, project_filepath, project_dir)", "def send_scene_informations(self):\n self.send_player_position()\n self.send_player_direction()\n self.send_grafik_objects()", "def export(self,file_name,save_name):\n check_name = self.check_save_name(file_name,save_name)\n if check_name:\n map = self.get_map()\n data = []\n file = open(file_name, 'r')\n saved_data = []\n for line in file:\n saved_data.append(line)\n file.close()\n file = open(file_name,'w+')\n for line in saved_data:\n file.write(line)\n file.write(\"######\" + \"\\n\")\n file.write(save_name.lower() + \"\\n\")\n for obj in map:\n # print(obj)\n coords = obj.get_coordinates()\n width = obj.get_width()\n height = obj.get_height()\n if isinstance(obj, Wall):\n obj_type = 'Wall'\n\n if isinstance(obj, Bar):\n obj_type = \"Bar\"\n if isinstance(obj, Toilet):\n obj_type = \"Toilet\"\n\n if isinstance(obj, DanceFloor):\n obj_type = \"DanceFloor\"\n\n data = [obj_type, coords, width, height]\n # Different data needed to record person objects\n\n if isinstance(obj,Person):\n obj_type = 'Person'\n coords = obj.get_coordinates()\n angle = obj.get_angle()\n width = obj.get_width()\n data = [obj_type,coords,width,angle]\n\n str1 = '/'.join(str(e) for e in data)\n file.write(str1 + \"\\n\")\n file.close()\n return True\n else:\n return False", "def __write_scene(self):\n self.__file.write(\" <scene>\\n\")\n self.__file.write(\" <ambient>0.4 0.4 0.4 1</ambient>\\n\")\n self.__file.write(\" <background>0.7 0.7 0.7 1</background>\\n\")\n self.__file.write(\" <shadows>1</shadows>\\n\")\n self.__file.write(\" </scene>\\n\")", "def serialize_game(self):\n\t\tprint 'full serialization not implemented yet'", "def get_scene_info(self):\n pass", "def objects_to_bmesh(objs, transform=True):\n\n # CAUTION: Removes/destroys custom layer props\n\n # Creates the mesh used to merge the entire scene\n bm_all = bmesh.new()\n\n # Adds the objects\" meshes to the bmesh\n for obj in objs:\n dprint(\"Preparing object {} for export...\".format(obj.name))\n # Creates a bmesh from the supplied object\n bm = bmesh.new()\n bm.from_mesh(obj.data)\n\n # Makes sure all layers exist so values don't get lost while exporting\n uv_layer = bm.loops.layers.uv.get(\"UVMap\")\n tex_layer = bm.faces.layers.tex.get(\"UVMap\")\n vc_layer = (bm.loops.layers.color.get(\"Col\") or\n bm.loops.layers.color.new(\"Col\"))\n env_layer = (bm.loops.layers.color.get(\"Env\") or\n bm.loops.layers.color.new(\"Env\"))\n env_alpha_layer = (bm.faces.layers.float.get(\"EnvAlpha\") or\n bm.faces.layers.float.new(\"EnvAlpha\"))\n va_layer = (bm.loops.layers.color.get(\"Alpha\") or\n bm.loops.layers.color.new(\"Alpha\"))\n texnum_layer = bm.faces.layers.int.get(\"Texture Number\")\n type_layer = (bm.faces.layers.int.get(\"Type\") or\n bm.faces.layers.int.new(\"Type\"))\n material_layer = (bm.faces.layers.int.get(\"Material\") or\n bm.faces.layers.int.new(\"Material\"))\n\n # Removes the parent for exporting and applies transformation\n parent = obj.parent\n if parent:\n mat = obj.matrix_world.copy()\n old_mat = obj.matrix_basis.copy()\n obj.parent = None\n obj.matrix_world = mat\n\n spc = obj.matrix_basis\n bmesh.ops.scale(\n bm,\n vec=obj.scale,\n space=spc,\n verts=bm.verts\n )\n if transform:\n bmesh.ops.transform(\n bm,\n matrix=Matrix.Translation(obj.location),\n space=spc,\n verts=bm.verts\n )\n bmesh.ops.rotate(\n bm,\n cent=obj.location,\n matrix=obj.rotation_euler.to_matrix(),\n space=spc,\n verts=bm.verts\n )\n\n # Restores the parent relationship\n if parent and not obj.parent:\n obj.parent = parent\n obj.matrix_basis = old_mat\n\n # Converts the transformed bmesh to mesh\n new_mesh = bpy.data.meshes.new(\"ncp_export_temp\")\n bm.to_mesh(new_mesh)\n\n # Adds the transformed mesh to the big bmesh\n bm_all.from_mesh(new_mesh)\n\n # Removes unused meshes\n bpy.data.meshes.remove(new_mesh, do_unlink=True)\n bm.free()\n\n return bm_all", "def visualize_obj(obj_path, *args, color=\"lightcoral\", **kwargs):\n print(\"Visualizing : \" + obj_path)\n scene = Scene(add_root=True)\n scene.add_from_file(obj_path, *args, c=color, **kwargs)\n\n return scene", "def load_obj_into_brainrender(\n scene, obj_file, color=None, alpha=0.8, shading=\"phong\"\n):\n obj_file = str(obj_file)\n if color is None:\n color = get_random_vtkplotter_color()\n act = scene.add_from_file(obj_file, c=color, alpha=alpha)\n\n if shading == \"flat\":\n act.GetProperty().SetInterpolationToFlat()\n elif shading == \"gouraud\":\n act.GetProperty().SetInterpolationToGouraud()\n else:\n act.GetProperty().SetInterpolationToPhong()", "def exportState(viewer, robot, configuration, outData):\n save = viewer.robotConfig\n viewer(configuration)\n etb.exportState(viewer, robot.getRobotName(), outData)\n viewer(save)\n # ~ robot.setCurrentConfig(configuration)\n # ~ objNames = set([])\n # ~ #retrieve object names\n # ~ for joint in robot.getAllJointNames():\n # ~ for obj in robot.getJointInnerObjects(joint):\n # ~ objNames.add(obj)\n # ~ for obj in robot.getJointOuterObjects(joint):\n # ~ objNames.add(obj)\n # ~ while len(objNames) > 0:\n # ~ obj = objNames.pop()\n # ~ if not outData.has_key(obj):\n # ~ outData[obj] = []\n # ~ objFrame = outData[obj]\n # ~ objFrame.append(robot.getObjectPosition(obj))" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
export lights from scene
def exportLights(self): #TODO! REMOVE CONSTRAINS lights = mc.ls( typ=['light','aiAreaLight','aiSkyDomeLight','aiVolumeScattering','aiSky'], l=1 ) mc.editRenderLayerGlobals( currentRenderLayer = 'defaultRenderLayer' ) litsToExport = [] for li in lights: finalLi = li.split( '|' ) if len(finalLi) == 1: litsToExport.append( finalLi[0] ) else: litsToExport.append( finalLi[1] ) if litsToExport: mc.select( litsToExport, r=1, ne=1 ) mc.file( self.lightPath.path, op="v=0", typ="mayaAscii", pr=1, es=1 ) #export Light Linking self.exportLightLinking()
[ "def export_lights(lamps, file, scene, global_matrix, tab_write):\n\n from .render import write_matrix, tab_write\n\n # Incremented after each lamp export to declare its target\n # currently used for Fresnel diffuse shader as their slope vector:\n global exported_lights_count\n # Get all lamps and keep their count in a global variable\n for exported_lights_count, ob in enumerate(lamps, start=1):\n lamp = ob.data\n\n matrix = global_matrix @ ob.matrix_world\n\n # Color is no longer modified by energy\n # any way to directly get bpy_prop_array as tuple?\n color = tuple(lamp.color)\n\n tab_write(file, \"light_source {\\n\")\n tab_write(file, \"< 0,0,0 >\\n\")\n tab_write(file, \"color srgb<%.3g, %.3g, %.3g>\\n\" % color)\n\n if lamp.type == \"POINT\":\n pass\n elif lamp.type == \"SPOT\":\n tab_write(file, \"spotlight\\n\")\n\n # Falloff is the main radius from the centre line\n tab_write(file, \"falloff %.2f\\n\" % (degrees(lamp.spot_size) / 2.0)) # 1 TO 179 FOR BOTH\n tab_write(\n file, \"radius %.6f\\n\" % ((degrees(lamp.spot_size) / 2.0) * (1.0 - lamp.spot_blend))\n )\n\n # Blender does not have a tightness equivalent, 0 is most like blender default.\n tab_write(file, \"tightness 0\\n\") # 0:10f\n\n tab_write(file, \"point_at <0, 0, -1>\\n\")\n if lamp.pov.use_halo:\n tab_write(file, \"looks_like{\\n\")\n tab_write(file, \"sphere{<0,0,0>,%.6f\\n\" % lamp.distance)\n tab_write(file, \"hollow\\n\")\n tab_write(file, \"material{\\n\")\n tab_write(file, \"texture{\\n\")\n tab_write(file, \"pigment{rgbf<1,1,1,%.4f>}\\n\" % (lamp.pov.halo_intensity * 5.0))\n tab_write(file, \"}\\n\")\n tab_write(file, \"interior{\\n\")\n tab_write(file, \"media{\\n\")\n tab_write(file, \"emission 1\\n\")\n tab_write(file, \"scattering {1, 0.5}\\n\")\n tab_write(file, \"density{\\n\")\n tab_write(file, \"spherical\\n\")\n tab_write(file, \"color_map{\\n\")\n tab_write(file, \"[0.0 rgb <0,0,0>]\\n\")\n tab_write(file, \"[0.5 rgb <1,1,1>]\\n\")\n tab_write(file, \"[1.0 rgb <1,1,1>]\\n\")\n tab_write(file, \"}\\n\")\n tab_write(file, \"}\\n\")\n tab_write(file, \"}\\n\")\n tab_write(file, \"}\\n\")\n tab_write(file, \"}\\n\")\n tab_write(file, \"}\\n\")\n tab_write(file, \"}\\n\")\n elif lamp.type == \"SUN\":\n tab_write(file, \"parallel\\n\")\n tab_write(file, \"point_at <0, 0, -1>\\n\") # *must* be after 'parallel'\n\n elif lamp.type == \"AREA\":\n tab_write(file, \"fade_distance %.6f\\n\" % (lamp.distance / 2.0))\n # Area lights have no falloff type, so always use blenders lamp quad equivalent\n # for those?\n tab_write(file, \"fade_power %d\\n\" % 2)\n size_x = lamp.size\n samples_x = lamp.pov.shadow_ray_samples_x\n if lamp.shape == \"SQUARE\":\n size_y = size_x\n samples_y = samples_x\n else:\n size_y = lamp.size_y\n samples_y = lamp.pov.shadow_ray_samples_y\n\n tab_write(\n file,\n \"area_light <%.6f,0,0>,<0,%.6f,0> %d, %d\\n\"\n % (size_x, size_y, samples_x, samples_y),\n )\n tab_write(file, \"area_illumination\\n\")\n if lamp.pov.shadow_ray_sample_method == \"CONSTANT_JITTERED\":\n if lamp.pov.use_jitter:\n tab_write(file, \"jitter\\n\")\n else:\n tab_write(file, \"adaptive 1\\n\")\n tab_write(file, \"jitter\\n\")\n\n # No shadow checked either at global or light level:\n if not scene.pov.use_shadows or (lamp.pov.shadow_method == \"NOSHADOW\"):\n tab_write(file, \"shadowless\\n\")\n\n # Sun shouldn't be attenuated. Area lights have no falloff attribute so they\n # are put to type 2 attenuation a little higher above.\n if lamp.type not in {\"SUN\", \"AREA\"}:\n if lamp.falloff_type == \"INVERSE_SQUARE\":\n tab_write(file, \"fade_distance %.6f\\n\" % (sqrt(lamp.distance / 2.0)))\n tab_write(file, \"fade_power %d\\n\" % 2) # Use blenders lamp quad equivalent\n elif lamp.falloff_type == \"INVERSE_LINEAR\":\n tab_write(file, \"fade_distance %.6f\\n\" % (lamp.distance / 2.0))\n tab_write(file, \"fade_power %d\\n\" % 1) # Use blenders lamp linear\n elif lamp.falloff_type == \"CONSTANT\":\n tab_write(file, \"fade_distance %.6f\\n\" % (lamp.distance / 2.0))\n tab_write(file, \"fade_power %d\\n\" % 3)\n # Use blenders lamp constant equivalent no attenuation.\n # Using Custom curve for fade power 3 for now.\n elif lamp.falloff_type == \"CUSTOM_CURVE\":\n tab_write(file, \"fade_power %d\\n\" % 4)\n\n write_matrix(file, matrix)\n\n tab_write(file, \"}\\n\")\n\n # v(A,B) rotates vector A about origin by vector B.\n file.write(\n \"#declare lampTarget%s= vrotate(<%.4g,%.4g,%.4g>,<%.4g,%.4g,%.4g>);\\n\"\n % (\n exported_lights_count,\n -ob.location.x,\n -ob.location.y,\n -ob.location.z,\n ob.rotation_euler.x,\n ob.rotation_euler.y,\n ob.rotation_euler.z,\n )\n )", "def plant_lights():", "def exportLightLinking(self):\n\t\tlights = [a for a in mc.ls( typ = ['light','aiAreaLight'] ) if not 'eye' in a]\n\t\tallShapes = [s for s in mc.ls( type = 'geometryShape', ni = 1) if not (mc.objectType( s ) in ( 'aiAreaLight','aiSkyDomeLight' ))]\n\t\tlitLinks = {}\n\t\tfor l in lights:\n\t\t\tlightLinkShapes = mc.lightlink( query=True, light=l ,shp=1,t=0,set=0,h=0)\n\t\t\tlitLinks[l]\t = list( set( allShapes ) - set( lightLinkShapes ) )#SHAPES WITH NO LINK TO THIS LIGHT\n\t\tpickle.dump( litLinks, open( self.lightLinkPath.path, \"wb\" ) )", "def lightsON():\n # TODO call a function that turns the lights on", "def lights(self):\n return list(self.GetLights())", "def renderScene(self):\r\n\r\n # Initialize the renderer.\r\n self.render.init(self.render.camera.imageWidth, self.render.camera.imageHeight)\r\n\r\n for pixel in self.render.getPixel():\r\n '''\r\n pixel is a list containing the image coordinate of a pixel i.e.\r\n pixel = [col, row]\r\n '''\r\n\r\n # create a ray from the eye position and goes through the pixel\r\n ray = self.create_ray(pixel[1], pixel[0])\r\n\r\n # set the default color to the background color\r\n color = self.render.bgcolor\r\n\r\n nearest_isect = self.get_nearest_object_intersection(ray)\r\n\r\n if nearest_isect.is_valid_intersection(): # valid intersection\r\n color = self.ambient[:3] * nearest_isect.material.ambient[:3] # ambient color is used when the point is in shadow\r\n # get a list of light sources that are visible from the nearest intersection point\r\n visible_lights = self.get_visible_lights(nearest_isect)\r\n nearest_isect.n = GT.normalize(nearest_isect.n) # ensure that the returned normals are normalized\r\n if len(visible_lights) > 0: # light-shadow\r\n '''\r\n Compute the color based on the material found in nearest_isect.material\r\n and the light sources visible from nearest_isect.p position.\r\n '''\r\n for light in visible_lights:\r\n color += self.blinn_phong_shading_per_light(-ray.viewDirection, light, nearest_isect)\r\n\r\n #At this point color should be a floating-point numpy array of 3 elements\r\n #and is the final color of the pixel.\r\n self.render.setPixel(pixel, color)\r\n\r\n self.render.save()", "def lightsOFF():\n # TODO call a function that turns the lights off", "def lightning():", "def setupLights(self) :\n\t\tself.ambientLight = render.attachNewNode(AmbientLight( \\\n\t\t\t\t\t\"ambientLight\"))\n\t\tself.ambientLight.node().setColor(Vec4(.8,.8,.8,1))\n\t\trender.setLight(self.ambientLight)\n\n\t\tdLight1 = DirectionalLight(\"dLight1\")\n\t\tdLight1.setColor(Vec4(6,5,7,1))\n\t\tdLight1.setDirection(Vec3(1,1,1))\n\t\tdlnp1 = render.attachNewNode(dLight1)\n\t\tdlnp1.setHpr(30,-160,0)\n\t\trender.setLight(dlnp1)\n\n\t\tdLight2 = DirectionalLight(\"dLight2\")\n\t\tdLight2.setColor(Vec4(.6,.7,1,1))\n\t\tdLight2.setDirection(Vec3(-1,-1,-1))\n\t\tself.dlnp2 = render.attachNewNode(dLight2)\n\t\tself.dlnp2.node().setScene(render)\n\t\tself.dlnp2.setHpr(-70,-60,0)\n\t\trender.setLight(self.dlnp2)", "def dynamic_light():\r\n\r\n settings[\"redColor\"] = (150, 150, 150)\r\n settings[\"yellowColor\"] = (255, 255, 0)\r\n settings[\"greenColor\"] = (150, 150, 150)", "def export_stl(self, **kwargs):\n self.scene.export_stl(**kwargs)", "def _set_light(self):\n self.scene_shader.use()\n diffuseColor = np.array([1, 1, 1])\n ambientColor = np.array([0.9, 0.875, 0.85])\n self.scene_shader.set_value(\"light.ambient\", ambientColor)\n self.scene_shader.set_value(\"light.diffuse\", diffuseColor)\n self.scene_shader.set_value(\"light.specular\",\n np.array([0.95, 0.95, 1.0]))", "def createLights(self):\n\n self.render.clearLight()\n\n alight = AmbientLight('ambientLight')\n alight.setColor(Vec4(0.7, 0.7, 0.7, 1))\n alightNP = self.worldRender.attachNewNode(alight)\n self.worldRender.setLight(alightNP)\n\n # Create a directional light for shadows\n dlight = DirectionalLight('dLight')\n dlight.setColor(Vec4(0.6, 0.6, 0.6, 1))\n dlight.setShadowCaster(True, 1024, 1024)\n dlight.getLens().setNearFar(1, 15)\n dlight.getLens().setFilmSize(128, 128)\n dlightNP = self.worldRender.attachNewNode(dlight)\n dlightNP.setPos(0, 0, 10)\n dlightNP.lookAt(0, 0, 0)\n self.worldRender.setLight(dlightNP)", "def HighLightByMat(self):\n print('highLight image file by selected material')", "def export_scene():\n try:\n # check for mesh\n mesh = cmds.ls(typ='mesh')\n if not mesh:\n cmds.warning('{0}: No mesh to export.'.format(__file__))\n return None\n else:\n cmds.select(mesh)\n # check plug-in\n if not cmds.pluginInfo('fbxmaya', q=True, l=True):\n cmds.loadPlugin('fbxmaya')\n\n file_path = cmds.file(q=True, exn=True)\n # todo: exclude unnecessary stuff for exporting, the engine only read .mesh now\n # painter doesn't support fbx 2016 yet\n # todo: checkout exported file from perforce\n return cmds.file(file_path, typ='DAE_FBX export', es=True)\n except: # todo: too broad exception, need to narrow down\n cmds.warning('{0}: Failed to export scene.'.format(__file__))", "def __write_scene(self):\n self.__file.write(\" <scene>\\n\")\n self.__file.write(\" <ambient>0.4 0.4 0.4 1</ambient>\\n\")\n self.__file.write(\" <background>0.7 0.7 0.7 1</background>\\n\")\n self.__file.write(\" <shadows>1</shadows>\\n\")\n self.__file.write(\" </scene>\\n\")", "def static_light():\r\n # set color accordingly\r\n if settings[\"clicked\"] == \"go\":\r\n\r\n settings[\"redColor\"] = (150, 150, 150)\r\n settings[\"yellowColor\"] = (150, 150, 150)\r\n settings[\"greenColor\"] = (0, 255, 0)\r\n\r\n elif settings[\"clicked\"] == \"stop\":\r\n settings[\"redColor\"] = (255, 0, 0)\r\n settings[\"yellowColor\"] = (150, 150, 150)\r\n settings[\"greenColor\"] = (150, 150, 150)", "def gl_lighting():\n for viewer in nuke.allNodes('Viewer'):\n val = int(viewer.knob('gl_lighting').getValue())\n viewer.knob('gl_lighting').setValue(not val)", "def HighLightByTex(self):\n print('highLight image file by selected texture(fileNode)')" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
export all the lightlinking in the scene
def exportLightLinking(self): lights = [a for a in mc.ls( typ = ['light','aiAreaLight'] ) if not 'eye' in a] allShapes = [s for s in mc.ls( type = 'geometryShape', ni = 1) if not (mc.objectType( s ) in ( 'aiAreaLight','aiSkyDomeLight' ))] litLinks = {} for l in lights: lightLinkShapes = mc.lightlink( query=True, light=l ,shp=1,t=0,set=0,h=0) litLinks[l] = list( set( allShapes ) - set( lightLinkShapes ) )#SHAPES WITH NO LINK TO THIS LIGHT pickle.dump( litLinks, open( self.lightLinkPath.path, "wb" ) )
[ "def exportLights(self):\n\t\t#TODO! REMOVE CONSTRAINS\n\t\tlights = mc.ls( typ=['light','aiAreaLight','aiSkyDomeLight','aiVolumeScattering','aiSky'], l=1 )\n\t\tmc.editRenderLayerGlobals( currentRenderLayer = 'defaultRenderLayer' )\n\t\tlitsToExport = []\n\t\tfor li in lights:\n\t\t\tfinalLi = li.split( '|' )\n\t\t\tif len(finalLi) == 1:\n\t\t\t\tlitsToExport.append( finalLi[0] )\n\t\t\telse:\n\t\t\t\tlitsToExport.append( finalLi[1] )\n\t\tif litsToExport:\n\t\t\tmc.select( litsToExport, r=1, ne=1 )\n\t\t\tmc.file( self.lightPath.path, op=\"v=0\", typ=\"mayaAscii\", pr=1, es=1 )\n\t\t\t#export Light Linking\n\t\t\tself.exportLightLinking()", "def export_lights(lamps, file, scene, global_matrix, tab_write):\n\n from .render import write_matrix, tab_write\n\n # Incremented after each lamp export to declare its target\n # currently used for Fresnel diffuse shader as their slope vector:\n global exported_lights_count\n # Get all lamps and keep their count in a global variable\n for exported_lights_count, ob in enumerate(lamps, start=1):\n lamp = ob.data\n\n matrix = global_matrix @ ob.matrix_world\n\n # Color is no longer modified by energy\n # any way to directly get bpy_prop_array as tuple?\n color = tuple(lamp.color)\n\n tab_write(file, \"light_source {\\n\")\n tab_write(file, \"< 0,0,0 >\\n\")\n tab_write(file, \"color srgb<%.3g, %.3g, %.3g>\\n\" % color)\n\n if lamp.type == \"POINT\":\n pass\n elif lamp.type == \"SPOT\":\n tab_write(file, \"spotlight\\n\")\n\n # Falloff is the main radius from the centre line\n tab_write(file, \"falloff %.2f\\n\" % (degrees(lamp.spot_size) / 2.0)) # 1 TO 179 FOR BOTH\n tab_write(\n file, \"radius %.6f\\n\" % ((degrees(lamp.spot_size) / 2.0) * (1.0 - lamp.spot_blend))\n )\n\n # Blender does not have a tightness equivalent, 0 is most like blender default.\n tab_write(file, \"tightness 0\\n\") # 0:10f\n\n tab_write(file, \"point_at <0, 0, -1>\\n\")\n if lamp.pov.use_halo:\n tab_write(file, \"looks_like{\\n\")\n tab_write(file, \"sphere{<0,0,0>,%.6f\\n\" % lamp.distance)\n tab_write(file, \"hollow\\n\")\n tab_write(file, \"material{\\n\")\n tab_write(file, \"texture{\\n\")\n tab_write(file, \"pigment{rgbf<1,1,1,%.4f>}\\n\" % (lamp.pov.halo_intensity * 5.0))\n tab_write(file, \"}\\n\")\n tab_write(file, \"interior{\\n\")\n tab_write(file, \"media{\\n\")\n tab_write(file, \"emission 1\\n\")\n tab_write(file, \"scattering {1, 0.5}\\n\")\n tab_write(file, \"density{\\n\")\n tab_write(file, \"spherical\\n\")\n tab_write(file, \"color_map{\\n\")\n tab_write(file, \"[0.0 rgb <0,0,0>]\\n\")\n tab_write(file, \"[0.5 rgb <1,1,1>]\\n\")\n tab_write(file, \"[1.0 rgb <1,1,1>]\\n\")\n tab_write(file, \"}\\n\")\n tab_write(file, \"}\\n\")\n tab_write(file, \"}\\n\")\n tab_write(file, \"}\\n\")\n tab_write(file, \"}\\n\")\n tab_write(file, \"}\\n\")\n tab_write(file, \"}\\n\")\n elif lamp.type == \"SUN\":\n tab_write(file, \"parallel\\n\")\n tab_write(file, \"point_at <0, 0, -1>\\n\") # *must* be after 'parallel'\n\n elif lamp.type == \"AREA\":\n tab_write(file, \"fade_distance %.6f\\n\" % (lamp.distance / 2.0))\n # Area lights have no falloff type, so always use blenders lamp quad equivalent\n # for those?\n tab_write(file, \"fade_power %d\\n\" % 2)\n size_x = lamp.size\n samples_x = lamp.pov.shadow_ray_samples_x\n if lamp.shape == \"SQUARE\":\n size_y = size_x\n samples_y = samples_x\n else:\n size_y = lamp.size_y\n samples_y = lamp.pov.shadow_ray_samples_y\n\n tab_write(\n file,\n \"area_light <%.6f,0,0>,<0,%.6f,0> %d, %d\\n\"\n % (size_x, size_y, samples_x, samples_y),\n )\n tab_write(file, \"area_illumination\\n\")\n if lamp.pov.shadow_ray_sample_method == \"CONSTANT_JITTERED\":\n if lamp.pov.use_jitter:\n tab_write(file, \"jitter\\n\")\n else:\n tab_write(file, \"adaptive 1\\n\")\n tab_write(file, \"jitter\\n\")\n\n # No shadow checked either at global or light level:\n if not scene.pov.use_shadows or (lamp.pov.shadow_method == \"NOSHADOW\"):\n tab_write(file, \"shadowless\\n\")\n\n # Sun shouldn't be attenuated. Area lights have no falloff attribute so they\n # are put to type 2 attenuation a little higher above.\n if lamp.type not in {\"SUN\", \"AREA\"}:\n if lamp.falloff_type == \"INVERSE_SQUARE\":\n tab_write(file, \"fade_distance %.6f\\n\" % (sqrt(lamp.distance / 2.0)))\n tab_write(file, \"fade_power %d\\n\" % 2) # Use blenders lamp quad equivalent\n elif lamp.falloff_type == \"INVERSE_LINEAR\":\n tab_write(file, \"fade_distance %.6f\\n\" % (lamp.distance / 2.0))\n tab_write(file, \"fade_power %d\\n\" % 1) # Use blenders lamp linear\n elif lamp.falloff_type == \"CONSTANT\":\n tab_write(file, \"fade_distance %.6f\\n\" % (lamp.distance / 2.0))\n tab_write(file, \"fade_power %d\\n\" % 3)\n # Use blenders lamp constant equivalent no attenuation.\n # Using Custom curve for fade power 3 for now.\n elif lamp.falloff_type == \"CUSTOM_CURVE\":\n tab_write(file, \"fade_power %d\\n\" % 4)\n\n write_matrix(file, matrix)\n\n tab_write(file, \"}\\n\")\n\n # v(A,B) rotates vector A about origin by vector B.\n file.write(\n \"#declare lampTarget%s= vrotate(<%.4g,%.4g,%.4g>,<%.4g,%.4g,%.4g>);\\n\"\n % (\n exported_lights_count,\n -ob.location.x,\n -ob.location.y,\n -ob.location.z,\n ob.rotation_euler.x,\n ob.rotation_euler.y,\n ob.rotation_euler.z,\n )\n )", "def lightning():", "def export_blend_connections():\n selection_list = pm.ls(tr=1, sl=1, l=1)\n\n dialog_return = pm.fileDialog2(cap=\"Save As\", fm=0, ff='Text Files(*.txt)')\n\n filename = dialog_return[0]\n print(filename)\n\n print(\"\\n\\nFiles written:\\n--------------------------------------------\\n\")\n\n with open(filename, 'w') as fileId:\n for i in range(0, len(selection_list)):\n shapes = pm.listRelatives(selection_list[i], s=True, f=True)\n\n main_shape = \"\"\n for j in range(0, len(shapes)):\n if pm.getAttr(shapes[j] + '.intermediateObject') == 0:\n main_shape = shapes\n break\n if main_shape == \"\":\n main_shape = shapes[0]\n\n con = pm.listConnections(main_shape, t=\"blendShape\", c=1, s=1, p=1)\n\n cmd = \"connectAttr -f %s.worldMesh[0] %s;\" % (\n ''.join(map(str, main_shape)),\n ''.join(map(str, con[0].name()))\n )\n print (cmd + \"\\n\")\n fileId.write(\"%s\\n\" % cmd)\n\n print(\"\\n------------------------------------------------------\\n\")\n print(\"filename: %s ...done\\n\" % filename)", "def link_data(ctx, output_path='./material/'):\n run_data_linking(output_path)", "def plant_lights():", "def importLightLinking(self, asset = '', searchAndReplace = ['',''] ):\n\t\tLayersInfo = pickle.load( open( self.lightLinkPath.path, \"rb\") )\n\t\tmc.refresh( su = 1 )\n\t\tif not asset == '':\n\t\t\tLayersInfo = self.filterLightLinksData( LayersInfo , asset, searchAndReplace )\n\t\tfor l in LayersInfo.keys():\n\t\t\tobjsToBreakLink = []\n\t\t\tfor link in LayersInfo[l]:\n\t\t\t\tif mc.objExists( link ):\n\t\t\t\t\tobjsToBreakLink.append( link )\n\t\t\tmc.lightlink( b = True, light = l, o = objsToBreakLink )\n\t\tmc.refresh( su = 0 )", "def export(self, *args):\r\n if not self._hasSelection():\r\n return\r\n if self._settings.get('settings', 'exportdir') == '':\r\n if self._settings.updateConfig(menuExportDir, 'exportdir') == 'canceled':\r\n return\r\n exportMeshes = cmds.ls(selection=True, l=True)\r\n #Checks for duplicate objects selected\r\n for i in range(len(exportMeshes)):\r\n exportMeshes[i] = self._checkRenderMeshName(exportMeshes[i])\r\n\r\n #Removes duplicate items\r\n exportMeshes = list(set(exportMeshes))\r\n for renderMeshName in exportMeshes:\r\n meshes = self._getMeshes(renderMeshName)\r\n if meshes:\r\n mainMesh = meshes[0]\r\n else:\r\n mainMesh = renderMeshName\r\n fileName = mainMesh.split('|')[-1]\r\n path = self._settings.get('settings', 'exportDir') + fileName\r\n position = cmds.xform(mainMesh, ws=True, q=True, t=True)\r\n rotation = cmds.xform(mainMesh, ws=True, q=True, ro=True)\r\n cmds.select(d=True)\r\n #Center Meshes\r\n if self._settings.getboolean('settings', 'centerMeshes'):\r\n cmds.xform(mainMesh, r=True, t=([axis * -1 for axis in position]))\r\n cmds.xform(mainMesh, r=True, eu=True, ro=([axis * -1 for axis in rotation]))\r\n cmds.select(mainMesh)\r\n #FBX export\r\n if self._settings.getboolean('settings', 'exportFBX'):\r\n cmds.file(path + '.fbx', exportSelected=True, type='FBX export', force=True)\r\n print('Exported: ' + fileName + '.fbx')\r\n #OBJ export\r\n if self._settings.getboolean('settings', 'exportOBJ'):\r\n cmds.file(path + '.obj', exportSelected=True, type='OBJexport', force=True, op=\"materials=0\")\r\n print('Exported: ' + fileName + '.obj')\r\n cmds.select(d=True)\r\n #Decenter Meshes\r\n if self._settings.getboolean('settings', 'centerMeshes'):\r\n cmds.xform(mainMesh, r=True, eu=True, ro=rotation)\r\n cmds.xform(mainMesh, r=True, t=position)\r\n cmds.select(d=True)", "def HighLightByObj(self):\n print('highLight image file by selected obj(geometry)')", "def join_scene(self) ->'TexturesVertex':\n return self.__class__(verts_features=[torch.cat(self.verts_features_list())])", "def sendToBlender(shot, update=False):\n # shot info\n asset = shot.shotstages[\"Lighting\"]\n if not asset.latest:\n cmds.warning(\"This shot has nothing in the lighting stage\")\n return\n\n path = os.path.join(asset.getBaseDir(), \"Blender\")\n if not os.path.exists(path):\n os.makedirs(path)\n\n try:\n asset.openVersion(asset.latest)\n except:\n pass\n\n # get render cam\n cameras = utilities.getRenderCams()\n if len(cameras) > 1:\n cmds.warning(\"CAMERA ERROR: Too many cooks!\")\n return\n elif len(cameras) == 0:\n cmds.warning(\"CAMERA ERROR: Not enough cooks!\")\n return\n else:\n camera = cameras[0]\n\n # get meshes from all render layers\n layer_data = {}\n layers = cmds.ls(type=\"renderLayer\")\n for layer in layers:\n if len(layers) > 1 and layer == \"defaultRenderLayer\":\n continue\n elif ':' in layer:\n continue\n\n geos = cmds.editRenderLayerMembers(layer, q=True, fullNames=True)\n\n # (make sure that we have all the children of everything selected as well\n # since Alembic does NOT traverse hierearchies without using the\n # super-buggy '-root [thing]' flag)\n geos = geos + cmds.listRelatives(geos, c=True, ad=True, fullPath=True)\n\n layer_data[layer] = geos\n cmds.select(geos, add=True)\n \n cmds.select(utilities.getRenderCams(), add=True)\n\n # export alembic file\n start = cmds.playbackOptions(minTime=True, q=True)\n end = cmds.playbackOptions(maxTime=True, q=True)\n abc_path = os.path.join(path, shot.name + \".abc\")\n args = \"-frameRange \" + str(start) + ' ' + str(end) + \" -sl -file \" + abc_path\n cmds.AbcExport(j=args)\n\n if update:\n utilities.newFile()\n return\n\n # export render layer metadata\n try:\n lights = cmds.sets(\"BL_EXPORT_LIGHTS\", q=True)\n except:\n lights = cmds.listRelatives(cmds.ls(type=\"light\"), p=True)\n\n root = {\n \"render_layers\": layer_data, \n \"render_output\": os.path.join(rc.getCompDirFor(shot.name), 'Footage'),\n \"main_cam\": camera,\n \"lights\": lights\n }\n\n j = json.dumps(root, indent=4, separators=(',',':'))\n j_path = os.path.join(path, shot.name + \".json\")\n\n with open(j_path, mode='w') as file:\n file.write(j)\n\n # run blender with command\n blender.run((\"snp_importLibrary()\",\n \"snp_loadScene(\\\"\" + abc_path + '\\\", \\\"' + j_path + \"\\\")\"))\n\n utilities.newFile()", "def fetchAllShaders():\n\tfrom mentalcore import mapi\n\n\t## Now get a list of assets in the scene\n\tassetDict = {}\n\tdupAssets = {}\n\tfor parentGrp in cmds.ls(assemblies = True, long = True):\n\t\tif cmds.ls(parentGrp, dag = True, type = \"mesh\"):\n\t\t\tfor each in cmds.listRelatives(parentGrp, children = True):\n\t\t\t\t## Check for duplicate or base assets\n\t\t\t\tif not cmds.objExists('%s.dupAsset' % each):\n\t\t\t\t\tassetDict[each.split('_hrc')[0]] = parentGrp\n\t\t\t\telse: # handle the duplicate naming\n\t\t\t\t\torigAssetName = each.split('_hrc')[0]\n\t\t\t\t\tdupAssets[each] = [origAssetName, parentGrp]\n\t\t\t\t\t\t\t \n\t## Now process SHD XML\n\tprocessSHDTemplate(assetDict = assetDict, selected = False)\n\tfinalBuildStuff()", "def _draw_light_sources(self) -> None:\n self._use_shader(self.shaders[\"light_source\"])\n for light in self.point_lights:\n light.draw()\n self.spot_light.draw()", "def setupLights(self) :\n\t\tself.ambientLight = render.attachNewNode(AmbientLight( \\\n\t\t\t\t\t\"ambientLight\"))\n\t\tself.ambientLight.node().setColor(Vec4(.8,.8,.8,1))\n\t\trender.setLight(self.ambientLight)\n\n\t\tdLight1 = DirectionalLight(\"dLight1\")\n\t\tdLight1.setColor(Vec4(6,5,7,1))\n\t\tdLight1.setDirection(Vec3(1,1,1))\n\t\tdlnp1 = render.attachNewNode(dLight1)\n\t\tdlnp1.setHpr(30,-160,0)\n\t\trender.setLight(dlnp1)\n\n\t\tdLight2 = DirectionalLight(\"dLight2\")\n\t\tdLight2.setColor(Vec4(.6,.7,1,1))\n\t\tdLight2.setDirection(Vec3(-1,-1,-1))\n\t\tself.dlnp2 = render.attachNewNode(dLight2)\n\t\tself.dlnp2.node().setScene(render)\n\t\tself.dlnp2.setHpr(-70,-60,0)\n\t\trender.setLight(self.dlnp2)", "def link_residues(self) -> None:\n ...", "def render(shots):\n for shot in shots:\n shot_asset = shot.shotstages[\"Lighting\"]\n ver = shot_asset.latest\n\n if not ver:\n continue\n\n #shot_asset.openVersion(ver)\n\n # set stuff up\n initShadMap(shot)\n\n fixRenderSettings()\n\n # render\n rc.renderSeq(shot.name, IMAGE_FORMAT)\n\n utilities.newFile()", "def lightlink(*args, b: bool=True, hierarchy: bool=True, light: Union[name, List[name]]=None,\n make: bool=True, object: Union[name, List[name]]=None, sets: bool=True, shadow:\n bool=True, shapes: bool=True, transforms: bool=True, useActiveLights: bool=True,\n useActiveObjects: bool=True, q=True, query=True, **kwargs)->Union[AnyStr, Any]:\n pass", "def export_mesh_lods(asset_name, properties):\n mesh_collection = bpy.data.collections.get(properties.mesh_collection_name)\n\n if mesh_collection:\n lod_objects = []\n\n # deselect everything\n utilities.deselect_all_objects()\n\n # create an empty object with a property that will define this empties children as a lod group in the fbx file\n empty_object = bpy.data.objects.new(f'LOD_{asset_name}', None)\n empty_object['fbx_type'] = 'LodGroup'\n\n # link the empty object to the mesh collection\n mesh_collection.objects.link(empty_object)\n empty_object.select_set(True)\n\n # get all the lod mesh objects that contain the same name as the asset\n for mesh_object in get_from_collection(properties.mesh_collection_name, 'MESH', properties):\n if asset_name in mesh_object.name:\n # add it to the list of lod objects\n lod_objects.append((mesh_object, mesh_object.parent))\n\n # select any rig the mesh is parented to\n set_parent_rig_selection(mesh_object, properties)\n\n # parent lod objects to the lod empty\n mesh_object.parent = empty_object\n\n # select the lod mesh\n mesh_object.select_set(True)\n\n # select collsion meshes\n select_asset_collisions(asset_name, properties)\n\n # select sockets\n select_asset_sockets(asset_name, properties)\n\n # export the selected lod meshes and empty\n fbx_file_paths = get_fbx_paths(asset_name, 'MESH')\n export_fbx_files(fbx_file_paths, properties)\n\n # un-parent the empty from the lod objects and deselect them\n for lod_object, lod_object_parent in lod_objects:\n lod_object.parent = lod_object_parent\n lod_object.select_set(False)\n\n # remove the empty object\n bpy.data.objects.remove(empty_object)\n\n return fbx_file_paths", "def targets(self):\n self.renderer.begin_rendering(\"targets\")\n for target in self.targets:\n self.renderer.draw_rect_3d(target, 10, 10, True, self.renderer.blue())\n self.renderer.end_rendering()" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
export aovs from scene
def exportAovs(self): aovs = mc.ls( typ = 'aiAOV' ) aovData = {} for a in aovs: aovData[a] = {} aovData[a]['enabled'] = mc.getAttr( a + '.enabled' ) aovData[a]['name'] = mc.getAttr( a + '.name' ) aovData[a]['type'] = mc.getAttr( a + '.type' ) pickle.dump( aovData, open( self.aovsPath.path, "wb" ) )
[ "def export_scene():\n try:\n # check for mesh\n mesh = cmds.ls(typ='mesh')\n if not mesh:\n cmds.warning('{0}: No mesh to export.'.format(__file__))\n return None\n else:\n cmds.select(mesh)\n # check plug-in\n if not cmds.pluginInfo('fbxmaya', q=True, l=True):\n cmds.loadPlugin('fbxmaya')\n\n file_path = cmds.file(q=True, exn=True)\n # todo: exclude unnecessary stuff for exporting, the engine only read .mesh now\n # painter doesn't support fbx 2016 yet\n # todo: checkout exported file from perforce\n return cmds.file(file_path, typ='DAE_FBX export', es=True)\n except: # todo: too broad exception, need to narrow down\n cmds.warning('{0}: Failed to export scene.'.format(__file__))", "def export_stl(self, **kwargs):\n self.scene.export_stl(**kwargs)", "def import_scene(file_path):\n\n pass", "def __write_scene(self):\n self.__file.write(\" <scene>\\n\")\n self.__file.write(\" <ambient>0.4 0.4 0.4 1</ambient>\\n\")\n self.__file.write(\" <background>0.7 0.7 0.7 1</background>\\n\")\n self.__file.write(\" <shadows>1</shadows>\\n\")\n self.__file.write(\" </scene>\\n\")", "def toS(remote, v):\n return to_scene(remote,v)", "def exportAnim(self, variant=\"\"):\r\n curSelection = cmds.ls(sl=1)\r\n for topNode in curSelection:\r\n\r\n savePath, startFrame, endFrame, aeDirPath = self.getSavePath(topNode, variant)\r\n\r\n if os.path.exists(savePath):\r\n myChoice = cmds.confirmDialog(title='File Exists!!',\r\n message='This wip version already has an animMAF file. Do you want to overwrite?',\r\n button=['Yes', 'No'], defaultButton='No', cancelButton='No',\r\n dismissString='No')\r\n if myChoice == 'No':\r\n sys.exit(0)\r\n cmds.warning('Currently Writing Out Frames %d to %d for object %s. You have not crashed.' % (\r\n startFrame, endFrame, topNode))\r\n cmds.refresh()\r\n\r\n masterDict = {}\r\n ctlDict = {}\r\n\r\n initT = cmds.getAttr(topNode + \".t\")\r\n initR = cmds.getAttr(topNode + \".r\")\r\n initS = cmds.getAttr(topNode + \".s\")\r\n initPos = initT + initR + initS\r\n\r\n parList = cmds.listRelatives(topNode, ad=1, f=1, type=\"transform\")\r\n # parList = list(set([cmds.listRelatives(i,f=1,p=1)[0] for i in hi]))\r\n for par in parList:\r\n self.constraintBake(par)\r\n # off = cmds.listRelatives(par,p=1)[0]\r\n shortPar = par.split(':')[-1].split('|')[-1]\r\n\r\n # shortOff = off.split(':')[-1].split('|')[-1]\r\n if shortPar == 'MASTER_CONTROL':\r\n if initT == [(0.0, 0.0, 0.0)]:\r\n initT = cmds.getAttr(par + \".t\", t=startFrame)\r\n initR = cmds.getAttr(par + \".r\", t=startFrame)\r\n initS = cmds.getAttr(par + \".s\", t=startFrame)\r\n\r\n initPos = initT + initR + initS\r\n\r\n elif \"tranRot_CTL\" in shortPar:\r\n if initT == [(0.0, 0.0, 0.0)]:\r\n initT = cmds.getAttr(par + \".t\", t=startFrame)\r\n initR = cmds.getAttr(par + \".r\", t=startFrame)\r\n initS = cmds.getAttr(par + \".s\", t=startFrame)\r\n\r\n initPos = initT + initR + initS\r\n\r\n '''\r\n So somewhere in here, I need to check if the offset is constrained, and bake it if so.\r\n Or maybe just have an option. But these people generally don't bake the constraints down.\r\n 1st world python problems...\r\n '''\r\n # is animated?\r\n numKeys = cmds.keyframe(par, q=1, kc=1, t=(startFrame, endFrame))\r\n if numKeys > 0:\r\n # animated\r\n print shortPar\r\n shortParAttrDict = self.getAnim(par, startFrame, endFrame)\r\n ctlDict[shortPar] = shortParAttrDict\r\n\r\n '''\r\n offKeys = cmds.keyframe(off, q=1, kc=1, t=(startFrame, endFrame))\r\n if offKeys > 0:\r\n\r\n shortOffAttrDict = self.getAnim(off,startFrame,endFrame)\r\n ctlDict[shortOff] = shortOffAttrDict\r\n\r\n # attrDict.keys()\r\n # ctlDict.keys() ctlDict['x_ctrl']\r\n # masterDict.keys()\r\n '''\r\n topNodeShort = topNode.split(\":\")[-1]\r\n masterDict[topNodeShort] = ctlDict\r\n masterDict['_init'] = initPos\r\n with open(savePath, 'w') as file:\r\n data = json.dump(masterDict, file)\r\n\r\n print(savePath)\r\n return savePath", "def menu_save_scene(self):\n file_name = QtGui.QFileDialog().getSaveFileName(self, \"Save Scene to File\", get_data_path(), \"*.pkl\")\n with open(file_name, \"wb\") as f:\n pickle.dump(self.scene, f, pickle.HIGHEST_PROTOCOL)", "def saveAsset(self, assetName, exportUV=True, exportOBJ=True, exportFBX=True, exportABC=True, selectionOnly=True, sceneFormat=\"mb\", notes=\"N/A\", **info):\n # hou.ui.displayMessage(\"This Function is not available yet\", title=\"Not yet available\")\n # self.ssResolution = 1000\n if assetName == \"\":\n msg = \"Asset Name cannot be empty\"\n hou.ui.displayMessage(msg)\n return\n\n if assetName in self.assetsList:\n msg = \"This Asset already exists.\\nDo you want to overwrite?\"\n state = hou.ui.displayConfirmation(msg, title='Overwrite?')\n if state:\n pass\n else:\n return\n\n # if hou.isApprentice():\n # sceneFormat=\"hipnc\"\n\n # if self._isSceneModified():\n # msg = \"Current scene is modified. It must be saved before continue.\\nDo you wish to save?\"\n # state = hou.ui.displayConfirmation(msg, title='Save?')\n # if not state:\n # return\n\n\n originalPath = self._getSceneFile()\n\n\n # if sceneFormat == \"mb\":\n # ext = u'.mb'\n # saveFormat = \"mayaBinary\"\n # else:\n # ext = u'.ma'\n # saveFormat = \"mayaAscii\"\n #\n #\n # dump, origExt = os.path.splitext(originalPath)\n #\n # if len(cmds.ls(type=\"unknown\")) > 0 and ext != origExt:\n # msg = \"There are unknown nodes in the scene. Cannot proceed with %s extension.\\n\\nDo you want to continue with %s?\" %(ext, origExt)\n # state = cmds.confirmDialog(title='Cannot Continue', message=msg, button=['Ok', 'Cancel'])\n # # cmds.warning(\"There are unknown nodes in the scene. Cannot proceed with %s extension. Do you want to proceed with %s?\" %(ext, origExt))\n # if state == \"Ok\":\n # if origExt == u'.mb':\n # ext = u'.mb'\n # saveFormat = \"mayaBinary\"\n # else:\n # ext = u'.ma'\n # saveFormat = \"mayaAscii\"\n #\n # elif state == \"Cancel\":\n # return\n #\n assetDirectory = os.path.join(self.directory, assetName)\n #\n assetAbsPath = os.path.join(assetDirectory, \"%s.%s\" %(assetName, sceneFormat))\n #\n #\n if selectionOnly:\n selection = hou.selectedNodes()\n if len(selection) == 0:\n hou.ui.displayMessage(\"No Transform object selected\")\n return\n else:\n rootNode = hou.node('obj')\n selection = rootNode.children()\n\n if not os.path.exists(assetDirectory):\n os.mkdir(assetDirectory)\n\n # # GET TEXTURES\n # # ------------\n #\n # possibleFileHolders = cmds.listRelatives(selection, ad=True, type=[\"mesh\", \"nurbsSurface\"], fullPath=True)\n # allFileNodes = self._getFileNodes(possibleFileHolders)\n #\n # textureDatabase = [x for x in self._buildPathDatabase(allFileNodes, assetDirectory)]\n #\n # self._copyTextures(textureDatabase)\n #\n # CREATE PREVIEWS\n # ---------------\n thumbPath, ssPath, swPath = self._createThumbnail(assetName, selectionOnly=selectionOnly, viewFit=True)\n #\n # # CREATE UV SNAPSHOTS\n # # ----------------\n # if exportUV:\n # self._uvSnaps(assetName)\n #\n # SAVE SOURCE\n # -----------\n # cmds.file(assetAbsPath, type=saveFormat, exportSelected=True, force=True)\n # p = selection[0].parent()\n # p.saveChildrenToFile(selection,\"\",assetAbsPath)\n # p.saveChildrenToFile(selection,\"\",assetAbsPath)\n # p.saveItemsToFile(selection, assetAbsPath)\n node = hou.node('/obj')\n subnet = node.collapseIntoSubnet(selection)\n hda_node = subnet.createDigitalAsset(\"TM_%s\" %assetName, hda_file_name=assetAbsPath, description=\"TM_%s\" %assetName, min_num_inputs=0, max_num_inputs=0)\n hda_node.extractAndDelete()\n\n # EXPORT OBJ\n # ----------\n\n if exportOBJ:\n # objSettings = self.exportSettings[\"objExportMaya\"]\n objFilePath = os.path.join(assetDirectory, \"%s.obj\" %assetName)\n if self._exportObj(objFilePath, exportSettings=self.exportSettings):\n objName = \"{0}.obj\".format(assetName)\n else:\n objName = \"N/A\"\n\n else:\n objName = \"N/A\"\n\n # EXPORT FBX\n # ----------\n if exportFBX:\n # fbxSettings = self.exportSettings[\"fbxExportMaya\"]\n fbxFilePath = os.path.join(assetDirectory, \"%s.fbx\" % assetName)\n frame = self._getCurrentFrame()\n\n if self._exportFbx(fbxFilePath, exportSettings=self.exportSettings, timeRange=[frame, frame]):\n fbxName = \"{0}.fbx\".format(assetName)\n else:\n fbxName = \"N/A\"\n\n else:\n fbxName = \"N/A\"\n\n # EXPORT ALEMBIC\n # --------------\n\n if exportABC:\n # abcSettings = self.exportSettings[\"alembicExportMaya\"]\n abcFilePath = os.path.join(assetDirectory, \"%s.abc\" % assetName)\n frame = self._getCurrentFrame()\n\n if self._exportAlembic(abcFilePath, exportSettings=self.exportSettings, timeRange=[frame, frame]):\n abcName = \"{0}.abc\".format(assetName)\n else:\n abcName = \"N/A\"\n\n else:\n abcName = \"N/A\"\n\n # # NUMERIC DATA\n # # ------------\n # cmds.select(hi=True)\n # polyCount = cmds.polyEvaluate(f=True)\n # tiangleCount = cmds.polyEvaluate(t=True)\n #\n # DATABASE\n # --------\n\n dataDict = {}\n dataDict['sourceProject'] = \"Houdini(%s)\" %sceneFormat\n dataDict['version'] = self._getVersion()\n dataDict['assetName'] = assetName\n dataDict['objPath'] = objName\n dataDict['fbxPath'] = fbxName\n dataDict['abcPath'] = abcName\n dataDict['sourcePath'] = os.path.basename(assetAbsPath)\n dataDict['thumbPath'] = os.path.basename(thumbPath)\n # dataDict['thumbPath'] = \"\"\n dataDict['ssPath'] = os.path.basename(ssPath)\n # dataDict['ssPath'] = \"\"\n dataDict['swPath'] = os.path.basename(swPath)\n # dataDict['swPath'] = \"\"\n # dataDict['textureFiles'] = [x[\"Texture\"] for x in textureDatabase]\n dataDict['textureFiles'] = \"\"\n # dataDict['Faces/Triangles'] = (\"%s/%s\" % (str(polyCount), str(tiangleCount)))\n dataDict['Faces/Triangles'] = \"\"\n dataDict['origin'] = originalPath\n dataDict['notes'] = notes\n\n self._setData(assetName, dataDict)\n\n # cmds.select(d=True)\n # self._returnOriginal(textureDatabase)\n\n # self.scanAssets() # scanning issued at populate function on ui class\n\n hou.ui.displayMessage(\"Asset Created Successfully\")", "def export(self, *args):\r\n if not self._hasSelection():\r\n return\r\n if self._settings.get('settings', 'exportdir') == '':\r\n if self._settings.updateConfig(menuExportDir, 'exportdir') == 'canceled':\r\n return\r\n exportMeshes = cmds.ls(selection=True, l=True)\r\n #Checks for duplicate objects selected\r\n for i in range(len(exportMeshes)):\r\n exportMeshes[i] = self._checkRenderMeshName(exportMeshes[i])\r\n\r\n #Removes duplicate items\r\n exportMeshes = list(set(exportMeshes))\r\n for renderMeshName in exportMeshes:\r\n meshes = self._getMeshes(renderMeshName)\r\n if meshes:\r\n mainMesh = meshes[0]\r\n else:\r\n mainMesh = renderMeshName\r\n fileName = mainMesh.split('|')[-1]\r\n path = self._settings.get('settings', 'exportDir') + fileName\r\n position = cmds.xform(mainMesh, ws=True, q=True, t=True)\r\n rotation = cmds.xform(mainMesh, ws=True, q=True, ro=True)\r\n cmds.select(d=True)\r\n #Center Meshes\r\n if self._settings.getboolean('settings', 'centerMeshes'):\r\n cmds.xform(mainMesh, r=True, t=([axis * -1 for axis in position]))\r\n cmds.xform(mainMesh, r=True, eu=True, ro=([axis * -1 for axis in rotation]))\r\n cmds.select(mainMesh)\r\n #FBX export\r\n if self._settings.getboolean('settings', 'exportFBX'):\r\n cmds.file(path + '.fbx', exportSelected=True, type='FBX export', force=True)\r\n print('Exported: ' + fileName + '.fbx')\r\n #OBJ export\r\n if self._settings.getboolean('settings', 'exportOBJ'):\r\n cmds.file(path + '.obj', exportSelected=True, type='OBJexport', force=True, op=\"materials=0\")\r\n print('Exported: ' + fileName + '.obj')\r\n cmds.select(d=True)\r\n #Decenter Meshes\r\n if self._settings.getboolean('settings', 'centerMeshes'):\r\n cmds.xform(mainMesh, r=True, eu=True, ro=rotation)\r\n cmds.xform(mainMesh, r=True, t=position)\r\n cmds.select(d=True)", "def export( self, captionMode, copyFiles, outputDir ):\n scene = slicer.mrmlScene\n nodes = scene.GetNumberOfNodes()\n\n self.__nodes = {}\n\n # 1 for model name, 2 for parent name\n self.__captionMode = captionMode\n # TRUE if we shall copy the files to the outputDir\n self.__copyFiles = copyFiles\n self.__outputDir = outputDir\n\n self.__tree = Tree()\n self.__tree.create_node( \"Scene\", \"scene\" )\n\n for n in xrange( nodes ):\n\n node = scene.GetNthNode( n )\n\n self.parseNode( node )\n\n [header, footer] = self.configureXrenderers()\n output = header\n output += self.createXtree( \"scene\" )\n output += footer\n\n return output", "def save_scene_to_file(self, file_name):\n self.convert_scene_to_numpy_array()\n self.output = cv2.cvtColor(self.output, cv2.COLOR_RGB2BGR)\n cv2.imwrite(file_name, self.output)", "def importAovs(self):\n\t\tLayersInfo = pickle.load( open( self.aovsPath.path, \"rb\") )\n\t\tmc.refresh( su = 1 )\n\t\tfor ao in LayersInfo.keys():\n\t\t\taov.create( ao, LayersInfo[ao]['name'], LayersInfo[ao]['type'], LayersInfo[ao]['enabled'] )\n\t\tmc.refresh( su = 0 )", "def on_enable_scene(self):", "def scan_scene (file_out):\n file_out.write(\"from morse.builder.morsebuilder import *\\n\\n\")\n\n robot_text = ''\n component_text = ''\n\n for obj in bpy.data.objects:\n try:\n component_path = obj.game.properties['Path'].value\n # Exit if the object is not a MORSE component\n except KeyError as detail:\n continue\n\n # Ignore middleware and modifier empties.\n # These will be added dinamically by the builder\n if 'middleware' in component_path or 'modifiers' in component_path:\n continue\n\n # Read what type of component this is,\n # from the source of its python file\n path_elements = component_path.split('/')\n component_type = path_elements[-2]\n component_name = path_elements[-1]\n\n builder_type = morse_types[component_type]\n\n # Swap dots for underscores in object names\n obj_name = re.sub('\\.', '_', obj.name)\n # Create the object instance\n if component_type == 'robots':\n robot_text += \"%s = %s('%s')\\n\" % (obj_name, builder_type, component_name)\n robot_text += save_translation(obj, obj_name)\n robot_text += save_rotation(obj, obj_name)\n robot_text += save_properties(obj, obj_name)\n robot_text += \"\\n\"\n\n # Assign component to the parent\n if component_type == 'sensors' or component_type == 'actuators':\n component_text += \"%s = %s('%s')\\n\" % (obj_name, builder_type, component_name)\n component_text += save_translation(obj, obj_name)\n component_text += save_rotation(obj, obj_name)\n parent_name = re.sub('\\.', '_', obj.parent.name)\n component_text += \"%s.append(%s)\\n\" % (parent_name, obj_name)\n component_text += save_properties(obj, obj_name)\n component_text += \"\\n\"\n\n # Write the buffers to the text file\n file_out.write(\"# Robots\\n\")\n file_out.write(robot_text)\n file_out.write(\"# Components\\n\")\n file_out.write(component_text)", "def IExportSelectedObjectToUvLayout():\n\n\tselection = list(set(cmds.ls(sl=True, l=True, o=True)))\n\tselection and exportSelectedObjectToUvLayout(selection[0])", "def pov_render(self, camera_position = (0,0,-10), camera_target = (0,0,0)):\n\n \"\"\"\n f=pov.File(\"demo.pov\",\"colors.inc\",\"stones.inc\")\n \n cam = pov.Camera(location=camera_position, sky=(1,0,1),look_at=camera_target)\n light = pov.LightSource( camera_position, color=\"White\")\n \n povObjs = [cam, light]\n for obj in self.objects[1:]:\n # test coordinate transfroms\n # print M\n # vectors = np.array([[0,0,0,1], #origin\n # [1,0,0,1], # x\n # [0,1,0,1], # y\n # [0,0,1,1]]).transpose() # z\n # origin,x,y,z = (T*vectors).transpose()\n povObjs.append(povObj(obj))\n \n #print tuple(povObjs)\n f.write(*tuple(povObjs))\n f.close()\n #sphere1 = pov.Sphere( (1,1,2), 2, pov.Texture(pov.Pigment(color=\"Yellow\")))\n #sphere2 = pov.Sphere( (0,1,2), 2, pov.Texture(pov.Pigment(color=\"Yellow\")))\n # composite2 = None#pov.Difference(sphere1, sphere2)\n # \n \n \n \n \n \n # f.write( cam, composite2, light )\n # f.close()\n subprocess.call(\"povray +H2400 +W3200 demo.pov\", shell=True)\n os.system(\"open demo.png\")\n \"\"\"", "def open_scene(file_path, save=True):\n\n pass", "def save_studio_library():\n #Studio Library\n camera_creation = mc.camera(n = \"Hip_Camera\")\n hip_camera= camera_creation[0]\n hip_camera_shape = camera_creation[1]\n\n hip_control_w_namespace = mc.ls(full_namespace + \":*hip*CTL\")\n hip_control_wo_namespace = mc.ls(\"*hip*CTL\")\n hip_control = hip_control_w_namespace + hip_control_wo_namespace\n hip_control = hip_control[0]\n temp_constraint = mc.parentConstraint(hip_control,hip_camera, mo = False)\n mc.delete(temp_constraint)\n mc.parent (hip_camera ,hip_control)\n\n mc.setAttr(hip_camera + \".translateZ\",50)\n mc.parent(hip_camera, world = True)\n mc.pointConstraint(hip_control,hip_camera, mo=True)\n\n #set the model panel to be the new camera hide undesirable model panel elements\n mps = mc.getPanel(visiblePanels = True)\n for each in mps:\n if each.find(\"modelPanel\")!=-1:\n mp = each\n\n mc.modelPanel(mp, e = True, cam = hip_camera_shape )\n #myPanel=str(mc.getPanel(wf=1))\n if mc.getPanel(to=mp) == \"modelPanel\":\n\n #mc.modelEditor(mp, e=1, all=0)\n mc.modelEditor(mp, e=1, rendererName=\"base_OpenGL_Renderer\")\n mc.modelEditor(mp, e=1, nurbsCurves=0)\n mc.modelEditor(mp, e=1, locators=0)\n mc.modelEditor(mp, e=1, deformers=0)\n mc.modelEditor(mp, e=1, ikHandles=0)\n mc.modelEditor(mp, e=1, joints=0)\n\n #get the start and end frames from the scene\n start_frame = int(mc.playbackOptions(min=1, q=1))\n end_frame = int(mc.playbackOptions(max=1, q=1))\n\n #set the permissions on the folder to allow for read, write, copy\n\n #make sure the nessesary controls are selected\n #mocapTools.utilities.selectCtrls(anim_namespace+ \":\")\n unknown = mc.ls(type = \"unknown\")\n mc.delete(unknown)\n\n item = studiolibraryitems.animitem.AnimItem()\n\n path = os.path.join(dest_folder, action + \".anim\")\n\n item.setDescription(fileX) #this will be from the ingest document\n item.save(objects=controls, path =path, startFrame=start_frame, endFrame=end_frame, fileType = \"mayaAscii\")\n\n #create a capture playblast w studio library tool so animators can see the results in a window!\n #create a jpeg sequence folder\n thumbnail_sequence_folder = os.path.join(path, \"sequence\")\n\n if not os.path.exists(thumbnail_sequence_folder):\n os.makedirs(thumbnail_sequence_folder)\n\n #playblast\n full_path_thumbnail_filename = os.path.join(path, \"thumbnail.jpg\")\n sequence_filename_template = os.path.join(thumbnail_sequence_folder, \"thumbnail.jpg\")\n mc.select(cl = True)\n mutils.playblast(sequence_filename_template , mp, int(start_frame), int(end_frame), 250, 250, step=1)\n #generate a thumbnail\n generated_frames = os.listdir(thumbnail_sequence_folder)\n generated_frames.sort()\n first_generated_frame = os.path.join(thumbnail_sequence_folder, generated_frames[0])\n shutil.copyfile(first_generated_frame, full_path_thumbnail_filename)", "def __render_scene(self, scene):\n\n # Name and location of the exported project.\n project_dir = os.path.join(tempfile.gettempdir(), \"blenderseed\", \"render\")\n project_filepath = os.path.join(project_dir, \"render.appleseed\")\n\n # Create target directories if necessary.\n if not os.path.exists(project_dir):\n try:\n os.makedirs(project_dir)\n except os.error:\n self.report({\"ERROR\"}, \"The directory {0} could not be created. Check directory permissions.\".format(project_dir))\n return\n\n # Generate project on disk.\n self.update_stats(\"\", \"appleseed Rendering: Exporting Scene\")\n writer = projectwriter.Writer()\n writer.write(scene, project_filepath)\n\n # Render project.\n self.__render_project_file(scene, project_filepath, project_dir)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
import lights in scene
def importLights(self, asset = '', searchAndReplace = ['',''] ): if self.lightPath.exists: self.lightPath.imp() if self.lightLinkPath.exists: self.importLightLinking( asset, searchAndReplace )
[ "def setupLights(self) :\n\t\tself.ambientLight = render.attachNewNode(AmbientLight( \\\n\t\t\t\t\t\"ambientLight\"))\n\t\tself.ambientLight.node().setColor(Vec4(.8,.8,.8,1))\n\t\trender.setLight(self.ambientLight)\n\n\t\tdLight1 = DirectionalLight(\"dLight1\")\n\t\tdLight1.setColor(Vec4(6,5,7,1))\n\t\tdLight1.setDirection(Vec3(1,1,1))\n\t\tdlnp1 = render.attachNewNode(dLight1)\n\t\tdlnp1.setHpr(30,-160,0)\n\t\trender.setLight(dlnp1)\n\n\t\tdLight2 = DirectionalLight(\"dLight2\")\n\t\tdLight2.setColor(Vec4(.6,.7,1,1))\n\t\tdLight2.setDirection(Vec3(-1,-1,-1))\n\t\tself.dlnp2 = render.attachNewNode(dLight2)\n\t\tself.dlnp2.node().setScene(render)\n\t\tself.dlnp2.setHpr(-70,-60,0)\n\t\trender.setLight(self.dlnp2)", "def lightsON():\n # TODO call a function that turns the lights on", "def _set_light(self):\n self.scene_shader.use()\n diffuseColor = np.array([1, 1, 1])\n ambientColor = np.array([0.9, 0.875, 0.85])\n self.scene_shader.set_value(\"light.ambient\", ambientColor)\n self.scene_shader.set_value(\"light.diffuse\", diffuseColor)\n self.scene_shader.set_value(\"light.specular\",\n np.array([0.95, 0.95, 1.0]))", "def plant_lights():", "def setup_lights(self, settings):\n\n for light in settings.lights: # for each light listed in yaml file\n lst = Light(light, settings.lights, settings) # create a Light instance with settings\n self.lights.append(lst) # add it to the list of lights", "def exportLights(self):\n\t\t#TODO! REMOVE CONSTRAINS\n\t\tlights = mc.ls( typ=['light','aiAreaLight','aiSkyDomeLight','aiVolumeScattering','aiSky'], l=1 )\n\t\tmc.editRenderLayerGlobals( currentRenderLayer = 'defaultRenderLayer' )\n\t\tlitsToExport = []\n\t\tfor li in lights:\n\t\t\tfinalLi = li.split( '|' )\n\t\t\tif len(finalLi) == 1:\n\t\t\t\tlitsToExport.append( finalLi[0] )\n\t\t\telse:\n\t\t\t\tlitsToExport.append( finalLi[1] )\n\t\tif litsToExport:\n\t\t\tmc.select( litsToExport, r=1, ne=1 )\n\t\t\tmc.file( self.lightPath.path, op=\"v=0\", typ=\"mayaAscii\", pr=1, es=1 )\n\t\t\t#export Light Linking\n\t\t\tself.exportLightLinking()", "def import_scene(file_path):\n\n pass", "def dynamic_light():\r\n\r\n settings[\"redColor\"] = (150, 150, 150)\r\n settings[\"yellowColor\"] = (255, 255, 0)\r\n settings[\"greenColor\"] = (150, 150, 150)", "def createLights(self):\n\n self.render.clearLight()\n\n alight = AmbientLight('ambientLight')\n alight.setColor(Vec4(0.7, 0.7, 0.7, 1))\n alightNP = self.worldRender.attachNewNode(alight)\n self.worldRender.setLight(alightNP)\n\n # Create a directional light for shadows\n dlight = DirectionalLight('dLight')\n dlight.setColor(Vec4(0.6, 0.6, 0.6, 1))\n dlight.setShadowCaster(True, 1024, 1024)\n dlight.getLens().setNearFar(1, 15)\n dlight.getLens().setFilmSize(128, 128)\n dlightNP = self.worldRender.attachNewNode(dlight)\n dlightNP.setPos(0, 0, 10)\n dlightNP.lookAt(0, 0, 0)\n self.worldRender.setLight(dlightNP)", "def static_light():\r\n # set color accordingly\r\n if settings[\"clicked\"] == \"go\":\r\n\r\n settings[\"redColor\"] = (150, 150, 150)\r\n settings[\"yellowColor\"] = (150, 150, 150)\r\n settings[\"greenColor\"] = (0, 255, 0)\r\n\r\n elif settings[\"clicked\"] == \"stop\":\r\n settings[\"redColor\"] = (255, 0, 0)\r\n settings[\"yellowColor\"] = (150, 150, 150)\r\n settings[\"greenColor\"] = (150, 150, 150)", "def lightning():", "def gl_lighting():\n for viewer in nuke.allNodes('Viewer'):\n val = int(viewer.knob('gl_lighting').getValue())\n viewer.knob('gl_lighting').setValue(not val)", "def on_enable_scene(self):", "def setupScene():\n global ogre_scene_manager\n global ogre_render_window\n global ogre_root_node\n\n ogre_render_window = ogre_root.getAutoCreatedWindow()\n ogre_scene_manager = ogre_root.createSceneManager(ogre.ST_GENERIC,\n \"Default SceneManager\")\n camera = ogre_scene_manager.createCamera(\"Camera\")\n ogre_root.getAutoCreatedWindow().addViewport(camera)\n\n camera.setPosition(ogre.Vector3(0, 0, 120))\n camera.lookAt(ogre.Vector3(0, 0, 0))\n\n ogre_scene_manager.setAmbientLight(ogre.ColourValue(0.7, 0.7, 0.7))\n ogre_scene_manager.setFog(ogre.FOG_EXP, ogre.ColourValue(1, 1, 1), 0.0002)\n light = ogre_scene_manager.createLight('lightMain')\n light.setPosition(ogre.Vector3(10, 10, 10))\n\n ogre_root_node = ogre_scene_manager.getRootSceneNode()", "def export_lights(lamps, file, scene, global_matrix, tab_write):\n\n from .render import write_matrix, tab_write\n\n # Incremented after each lamp export to declare its target\n # currently used for Fresnel diffuse shader as their slope vector:\n global exported_lights_count\n # Get all lamps and keep their count in a global variable\n for exported_lights_count, ob in enumerate(lamps, start=1):\n lamp = ob.data\n\n matrix = global_matrix @ ob.matrix_world\n\n # Color is no longer modified by energy\n # any way to directly get bpy_prop_array as tuple?\n color = tuple(lamp.color)\n\n tab_write(file, \"light_source {\\n\")\n tab_write(file, \"< 0,0,0 >\\n\")\n tab_write(file, \"color srgb<%.3g, %.3g, %.3g>\\n\" % color)\n\n if lamp.type == \"POINT\":\n pass\n elif lamp.type == \"SPOT\":\n tab_write(file, \"spotlight\\n\")\n\n # Falloff is the main radius from the centre line\n tab_write(file, \"falloff %.2f\\n\" % (degrees(lamp.spot_size) / 2.0)) # 1 TO 179 FOR BOTH\n tab_write(\n file, \"radius %.6f\\n\" % ((degrees(lamp.spot_size) / 2.0) * (1.0 - lamp.spot_blend))\n )\n\n # Blender does not have a tightness equivalent, 0 is most like blender default.\n tab_write(file, \"tightness 0\\n\") # 0:10f\n\n tab_write(file, \"point_at <0, 0, -1>\\n\")\n if lamp.pov.use_halo:\n tab_write(file, \"looks_like{\\n\")\n tab_write(file, \"sphere{<0,0,0>,%.6f\\n\" % lamp.distance)\n tab_write(file, \"hollow\\n\")\n tab_write(file, \"material{\\n\")\n tab_write(file, \"texture{\\n\")\n tab_write(file, \"pigment{rgbf<1,1,1,%.4f>}\\n\" % (lamp.pov.halo_intensity * 5.0))\n tab_write(file, \"}\\n\")\n tab_write(file, \"interior{\\n\")\n tab_write(file, \"media{\\n\")\n tab_write(file, \"emission 1\\n\")\n tab_write(file, \"scattering {1, 0.5}\\n\")\n tab_write(file, \"density{\\n\")\n tab_write(file, \"spherical\\n\")\n tab_write(file, \"color_map{\\n\")\n tab_write(file, \"[0.0 rgb <0,0,0>]\\n\")\n tab_write(file, \"[0.5 rgb <1,1,1>]\\n\")\n tab_write(file, \"[1.0 rgb <1,1,1>]\\n\")\n tab_write(file, \"}\\n\")\n tab_write(file, \"}\\n\")\n tab_write(file, \"}\\n\")\n tab_write(file, \"}\\n\")\n tab_write(file, \"}\\n\")\n tab_write(file, \"}\\n\")\n tab_write(file, \"}\\n\")\n elif lamp.type == \"SUN\":\n tab_write(file, \"parallel\\n\")\n tab_write(file, \"point_at <0, 0, -1>\\n\") # *must* be after 'parallel'\n\n elif lamp.type == \"AREA\":\n tab_write(file, \"fade_distance %.6f\\n\" % (lamp.distance / 2.0))\n # Area lights have no falloff type, so always use blenders lamp quad equivalent\n # for those?\n tab_write(file, \"fade_power %d\\n\" % 2)\n size_x = lamp.size\n samples_x = lamp.pov.shadow_ray_samples_x\n if lamp.shape == \"SQUARE\":\n size_y = size_x\n samples_y = samples_x\n else:\n size_y = lamp.size_y\n samples_y = lamp.pov.shadow_ray_samples_y\n\n tab_write(\n file,\n \"area_light <%.6f,0,0>,<0,%.6f,0> %d, %d\\n\"\n % (size_x, size_y, samples_x, samples_y),\n )\n tab_write(file, \"area_illumination\\n\")\n if lamp.pov.shadow_ray_sample_method == \"CONSTANT_JITTERED\":\n if lamp.pov.use_jitter:\n tab_write(file, \"jitter\\n\")\n else:\n tab_write(file, \"adaptive 1\\n\")\n tab_write(file, \"jitter\\n\")\n\n # No shadow checked either at global or light level:\n if not scene.pov.use_shadows or (lamp.pov.shadow_method == \"NOSHADOW\"):\n tab_write(file, \"shadowless\\n\")\n\n # Sun shouldn't be attenuated. Area lights have no falloff attribute so they\n # are put to type 2 attenuation a little higher above.\n if lamp.type not in {\"SUN\", \"AREA\"}:\n if lamp.falloff_type == \"INVERSE_SQUARE\":\n tab_write(file, \"fade_distance %.6f\\n\" % (sqrt(lamp.distance / 2.0)))\n tab_write(file, \"fade_power %d\\n\" % 2) # Use blenders lamp quad equivalent\n elif lamp.falloff_type == \"INVERSE_LINEAR\":\n tab_write(file, \"fade_distance %.6f\\n\" % (lamp.distance / 2.0))\n tab_write(file, \"fade_power %d\\n\" % 1) # Use blenders lamp linear\n elif lamp.falloff_type == \"CONSTANT\":\n tab_write(file, \"fade_distance %.6f\\n\" % (lamp.distance / 2.0))\n tab_write(file, \"fade_power %d\\n\" % 3)\n # Use blenders lamp constant equivalent no attenuation.\n # Using Custom curve for fade power 3 for now.\n elif lamp.falloff_type == \"CUSTOM_CURVE\":\n tab_write(file, \"fade_power %d\\n\" % 4)\n\n write_matrix(file, matrix)\n\n tab_write(file, \"}\\n\")\n\n # v(A,B) rotates vector A about origin by vector B.\n file.write(\n \"#declare lampTarget%s= vrotate(<%.4g,%.4g,%.4g>,<%.4g,%.4g,%.4g>);\\n\"\n % (\n exported_lights_count,\n -ob.location.x,\n -ob.location.y,\n -ob.location.z,\n ob.rotation_euler.x,\n ob.rotation_euler.y,\n ob.rotation_euler.z,\n )\n )", "def setupScene():\n global ogre_scene_manager\n global ogre_render_window\n global ogre_root_node\n global ogre_camera\n\n ogre_render_window = ogre_root.getAutoCreatedWindow()\n ogre_scene_manager = ogre_root.createSceneManager(ogre.ST_GENERIC,\n \"Default SceneManager\")\n ogre_camera = ogre_scene_manager.createCamera(\"Camera\")\n ogre_root.getAutoCreatedWindow().addViewport(ogre_camera)\n\n ogre_camera.setPosition(ogre.Vector3(0, 40, 5))\n ogre_camera.lookAt(ogre.Vector3(0, 0, 0))\n ogre_camera.nearClipDistance = 5\n\n ogre_scene_manager.setAmbientLight(ogre.ColourValue(0.05, 0.05, 0.05))\n ogre_scene_manager.setShadowTechnique(ogre.SHADOWTYPE_STENCIL_ADDITIVE)\n ogre_scene_manager.setFog(ogre.FOG_EXP, ogre.ColourValue(1, 1, 1), 0.002)\n\n directional_light = ogre_scene_manager.createLight('Light-Directional')\n directional_light.setType(ogre.Light.LT_DIRECTIONAL)\n directional_light.setDirection(0.1, -1, 0.5)\n directional_light.setDiffuseColour(0.5, 0.5, 0.5)\n directional_light.setSpecularColour(0.02, 0, 0)\n\n ogre_root_node = ogre_scene_manager.getRootSceneNode()", "def run(self) -> None:\n self._hass.turn_on('scene.{0}'.format(self._args['scene']))", "def setCartoonShader(self, switchtoon = False):\n\n this_dir, this_filename = os.path.split(__file__)\n if switchtoon:\n lightinggen = Filename.fromOsSpecific(os.path.join(this_dir, \"shaders\", \"lightingGen.sha\"))\n tempnode = NodePath(PandaNode(\"temp node\"))\n tempnode.setShader(loader.loadShader(lightinggen))\n self.cam.node().setInitialState(tempnode.getState())\n # self.render.setShaderInput(\"light\", self.cam)\n self.render.setShaderInput(\"light\", self.__ptlightnode0)\n #\n normalsBuffer = self.win.makeTextureBuffer(\"normalsBuffer\", 0, 0)\n normalsBuffer.setClearColor(LVecBase4(0.5, 0.5, 0.5, 1))\n normalsCamera = self.makeCamera(\n normalsBuffer, lens=self.cam.node().getLens(), scene = self.render)\n normalsCamera.reparentTo(self.cam)\n normalgen = Filename.fromOsSpecific(os.path.join(this_dir, \"shaders\", \"normalGen.sha\"))\n tempnode = NodePath(PandaNode(\"temp node\"))\n tempnode.setShader(loader.loadShader(normalgen))\n normalsCamera.node().setInitialState(tempnode.getState())\n\n drawnScene = normalsBuffer.getTextureCard()\n drawnScene.setTransparency(1)\n drawnScene.setColor(1, 1, 1, 0)\n drawnScene.reparentTo(render2d)\n self.drawnScene = drawnScene\n self.separation = 0.0007\n self.cutoff = 0.05\n normalgen = Filename.fromOsSpecific(os.path.join(this_dir, \"shaders\", \"inkGen.sha\"))\n drawnScene.setShader(loader.loadShader(normalgen))\n drawnScene.setShaderInput(\"separation\", LVecBase4(self.separation, 0, self.separation, 0))\n drawnScene.setShaderInput(\"cutoff\", LVecBase4(self.cutoff))", "def _create_example_light():\n return Light({\"warning\": False, \"off\": True})" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
import light linking to lights
def importLightLinking(self, asset = '', searchAndReplace = ['',''] ): LayersInfo = pickle.load( open( self.lightLinkPath.path, "rb") ) mc.refresh( su = 1 ) if not asset == '': LayersInfo = self.filterLightLinksData( LayersInfo , asset, searchAndReplace ) for l in LayersInfo.keys(): objsToBreakLink = [] for link in LayersInfo[l]: if mc.objExists( link ): objsToBreakLink.append( link ) mc.lightlink( b = True, light = l, o = objsToBreakLink ) mc.refresh( su = 0 )
[ "def importLights(self, asset = '', searchAndReplace = ['',''] ):\n\t\tif self.lightPath.exists:\n\t\t\tself.lightPath.imp()\n\t\t\tif self.lightLinkPath.exists:\n\t\t\t\tself.importLightLinking( asset, searchAndReplace )", "def setupLights(self) :\n\t\tself.ambientLight = render.attachNewNode(AmbientLight( \\\n\t\t\t\t\t\"ambientLight\"))\n\t\tself.ambientLight.node().setColor(Vec4(.8,.8,.8,1))\n\t\trender.setLight(self.ambientLight)\n\n\t\tdLight1 = DirectionalLight(\"dLight1\")\n\t\tdLight1.setColor(Vec4(6,5,7,1))\n\t\tdLight1.setDirection(Vec3(1,1,1))\n\t\tdlnp1 = render.attachNewNode(dLight1)\n\t\tdlnp1.setHpr(30,-160,0)\n\t\trender.setLight(dlnp1)\n\n\t\tdLight2 = DirectionalLight(\"dLight2\")\n\t\tdLight2.setColor(Vec4(.6,.7,1,1))\n\t\tdLight2.setDirection(Vec3(-1,-1,-1))\n\t\tself.dlnp2 = render.attachNewNode(dLight2)\n\t\tself.dlnp2.node().setScene(render)\n\t\tself.dlnp2.setHpr(-70,-60,0)\n\t\trender.setLight(self.dlnp2)", "def lightsON():\n # TODO call a function that turns the lights on", "def exportLightLinking(self):\n\t\tlights = [a for a in mc.ls( typ = ['light','aiAreaLight'] ) if not 'eye' in a]\n\t\tallShapes = [s for s in mc.ls( type = 'geometryShape', ni = 1) if not (mc.objectType( s ) in ( 'aiAreaLight','aiSkyDomeLight' ))]\n\t\tlitLinks = {}\n\t\tfor l in lights:\n\t\t\tlightLinkShapes = mc.lightlink( query=True, light=l ,shp=1,t=0,set=0,h=0)\n\t\t\tlitLinks[l]\t = list( set( allShapes ) - set( lightLinkShapes ) )#SHAPES WITH NO LINK TO THIS LIGHT\n\t\tpickle.dump( litLinks, open( self.lightLinkPath.path, \"wb\" ) )", "def lightning():", "def exportLights(self):\n\t\t#TODO! REMOVE CONSTRAINS\n\t\tlights = mc.ls( typ=['light','aiAreaLight','aiSkyDomeLight','aiVolumeScattering','aiSky'], l=1 )\n\t\tmc.editRenderLayerGlobals( currentRenderLayer = 'defaultRenderLayer' )\n\t\tlitsToExport = []\n\t\tfor li in lights:\n\t\t\tfinalLi = li.split( '|' )\n\t\t\tif len(finalLi) == 1:\n\t\t\t\tlitsToExport.append( finalLi[0] )\n\t\t\telse:\n\t\t\t\tlitsToExport.append( finalLi[1] )\n\t\tif litsToExport:\n\t\t\tmc.select( litsToExport, r=1, ne=1 )\n\t\t\tmc.file( self.lightPath.path, op=\"v=0\", typ=\"mayaAscii\", pr=1, es=1 )\n\t\t\t#export Light Linking\n\t\t\tself.exportLightLinking()", "def setup_lights(self, settings):\n\n for light in settings.lights: # for each light listed in yaml file\n lst = Light(light, settings.lights, settings) # create a Light instance with settings\n self.lights.append(lst) # add it to the list of lights", "def turn_on_lights(bridge):\n for light in bridge.lights:\n bridge.set_light(light.light_id, {'ct': 350, 'bri': 254, 'on': True})", "def dynamic_light():\r\n\r\n settings[\"redColor\"] = (150, 150, 150)\r\n settings[\"yellowColor\"] = (255, 255, 0)\r\n settings[\"greenColor\"] = (150, 150, 150)", "def plant_lights():", "def addLight(self, id):\r\n\t\t\r\n\t\tnewLight = Light(id)\r\n\t\tself.lights[id] = newLight", "def static_light():\r\n # set color accordingly\r\n if settings[\"clicked\"] == \"go\":\r\n\r\n settings[\"redColor\"] = (150, 150, 150)\r\n settings[\"yellowColor\"] = (150, 150, 150)\r\n settings[\"greenColor\"] = (0, 255, 0)\r\n\r\n elif settings[\"clicked\"] == \"stop\":\r\n settings[\"redColor\"] = (255, 0, 0)\r\n settings[\"yellowColor\"] = (150, 150, 150)\r\n settings[\"greenColor\"] = (150, 150, 150)", "async def Turn_On_Light_With_Color(\n light_id: int = Path(..., title=\"Numeric light identifier\", ge=0),\n color: str = Path(..., title=\"Color name or hexadecimal string\"),\n) -> Dict[str, Any]:\n busylightapi.manager.light_on(light_id, color)\n return {\n \"action\": \"on\",\n \"light_id\": light_id,\n \"color\": color,\n }", "def lightlink(*args, b: bool=True, hierarchy: bool=True, light: Union[name, List[name]]=None,\n make: bool=True, object: Union[name, List[name]]=None, sets: bool=True, shadow:\n bool=True, shapes: bool=True, transforms: bool=True, useActiveLights: bool=True,\n useActiveObjects: bool=True, q=True, query=True, **kwargs)->Union[AnyStr, Any]:\n pass", "def nextLight():\n global light\n pin.setAllOutPinsLow()\n light += 1\n light %= len(traffic_lights)\n print traffic_colors[light]\n pin.setOutPinHigh(traffic_lights[light])", "def _create_example_light():\n return Light({\"warning\": False, \"off\": True})", "def create_light(self, li: Light):\n pass", "async def Turn_On_Lights() -> Dict[str, Any]:\n busylightapi.manager.light_on(ALL_LIGHTS)\n return {\n \"action\": \"on\",\n \"light_id\": \"all\",\n \"color\": \"green\",\n }", "def createLights(self):\n\n self.render.clearLight()\n\n alight = AmbientLight('ambientLight')\n alight.setColor(Vec4(0.7, 0.7, 0.7, 1))\n alightNP = self.worldRender.attachNewNode(alight)\n self.worldRender.setLight(alightNP)\n\n # Create a directional light for shadows\n dlight = DirectionalLight('dLight')\n dlight.setColor(Vec4(0.6, 0.6, 0.6, 1))\n dlight.setShadowCaster(True, 1024, 1024)\n dlight.getLens().setNearFar(1, 15)\n dlight.getLens().setFilmSize(128, 128)\n dlightNP = self.worldRender.attachNewNode(dlight)\n dlightNP.setPos(0, 0, 10)\n dlightNP.lookAt(0, 0, 0)\n self.worldRender.setLight(dlightNP)" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
filter light linking data for the specific asset
def filterLightLinksData(self, LayersInfo , asset, sAr = ['',''] ): lightData = [(a.replace( sAr[0], sAr[1] ),LayersInfo[a].replace( sAr[0], sAr[1] )) for a in LayersInfo.keys() if asset in a] return dict( lightData )
[ "def importLightLinking(self, asset = '', searchAndReplace = ['',''] ):\n\t\tLayersInfo = pickle.load( open( self.lightLinkPath.path, \"rb\") )\n\t\tmc.refresh( su = 1 )\n\t\tif not asset == '':\n\t\t\tLayersInfo = self.filterLightLinksData( LayersInfo , asset, searchAndReplace )\n\t\tfor l in LayersInfo.keys():\n\t\t\tobjsToBreakLink = []\n\t\t\tfor link in LayersInfo[l]:\n\t\t\t\tif mc.objExists( link ):\n\t\t\t\t\tobjsToBreakLink.append( link )\n\t\t\tmc.lightlink( b = True, light = l, o = objsToBreakLink )\n\t\tmc.refresh( su = 0 )", "def filter(self, filters):", "def get_filters(self):", "def filter(self, data):\n pass", "def _filter(self, rgba):\n raise NotImplementedError()", "def filter(ctx):\n pass", "def _filter(self):", "def lightning():", "def apply_filter(self, image):\n pass", "def HighLightByMat(self):\n print('highLight image file by selected material')", "def broadbandfilter(self):\n _, = self.broadbandfilters\n return _", "def create_filters(self):", "def vol_filter(self,vol_series,threshold=\"25%\"):\n \n thres_vol = vol_series.describe()[threshold]\n valid_asset = list(vol_series[vol_series>=thres_vol].index)\n \n self.update_universe = valid_asset", "def exportLightLinking(self):\n\t\tlights = [a for a in mc.ls( typ = ['light','aiAreaLight'] ) if not 'eye' in a]\n\t\tallShapes = [s for s in mc.ls( type = 'geometryShape', ni = 1) if not (mc.objectType( s ) in ( 'aiAreaLight','aiSkyDomeLight' ))]\n\t\tlitLinks = {}\n\t\tfor l in lights:\n\t\t\tlightLinkShapes = mc.lightlink( query=True, light=l ,shp=1,t=0,set=0,h=0)\n\t\t\tlitLinks[l]\t = list( set( allShapes ) - set( lightLinkShapes ) )#SHAPES WITH NO LINK TO THIS LIGHT\n\t\tpickle.dump( litLinks, open( self.lightLinkPath.path, \"wb\" ) )", "def _filter(self, filter_condition):", "def encode_light_objects(self, light_type):\n baselights = list(map( lambda obj : { \"position\" : obj.p, \"color\" : obj.light_color, \"radius\" : obj.light_radius }, \n filter(lambda obj : obj.light_type == light_type and self.visible_light(obj), self.objects)))\n\n\n if light_type == Object.LightTypes.DYNAMIC_SHADOWCASTER and len(baselights)> DFRenderer.max_lights:\n return sample( baselights, DFRenderer.max_lights )\n return baselights", "def parse_raw_data(assets, authorizations, raw_data, name_filter, cache, _):\n if 'Items' in raw_data['cf_raw']:\n for cf_dist in raw_data['cf_raw']['Items']:\n asset = cache.get_asset(f'CF_{cf_dist[\"DomainName\"]}')\n if asset is None:\n asset = scan(cf_dist)\n cache.save_asset(f'CF_{cf_dist[\"DomainName\"]}', asset)\n # If the CloudFront is disabled\n if asset is None:\n continue\n name_in_aliases = False\n for alias in asset.aliases:\n name_in_aliases = name_in_aliases or name_filter.lower() in alias.lower()\n if search_filter_in(asset, name_filter):\n assets.append(asset)\n return assets, authorizations", "def _FilterForBasic(artifacts):\n return [i for i in _FilterForImages(artifacts) if i.image_channel is None]", "def apply_filter(self, src_img, slider1, slider2, slider3):\n pass" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
import aovs into scene
def importAovs(self): LayersInfo = pickle.load( open( self.aovsPath.path, "rb") ) mc.refresh( su = 1 ) for ao in LayersInfo.keys(): aov.create( ao, LayersInfo[ao]['name'], LayersInfo[ao]['type'], LayersInfo[ao]['enabled'] ) mc.refresh( su = 0 )
[ "def import_scene(file_path):\n\n pass", "def importAbcAsset ():\n\n help(importAbcAsset)\n\n import hou\n import os\n \n \n #set path\n hipPath = hou.expandString('$HIP')\n path = hipPath + \"/abc/\"\n print (path)\n \n listPath = os.listdir(path)\n \n obj = hou.node(\"/obj\")\n alembicImport= obj.createNode (\"geo\",\"alembicImport\")\n \n file1 = hou.node(\"/obj/alembicImport/file1\")\n file1.destroy()\n \n for n in listPath:\n print (n)\n currentFile=alembicImport.createNode(\"alembic\",n)\n #set fileName\n currentFile.setParms({\"fileName\":\"$\"+\"HIP/abc/\"+n})\n\n #reload geo callback\n #prepa param\n parm_group = alembicImport.parmTemplateGroup()\n parm_folder = hou.FolderParmTemplate(\"folder\",\"reload\")\n #button run code\n button=hou.ButtonParmTemplate(\"reload\",\"Reload\")\n button.setTags({\"script_callback_language\":\"python\",\"script_callback\":\"import y \\ny.reloadAlembic()\"})\n parm_folder.addParmTemplate(button)\n #append param\n parm_group.append(parm_folder)\n alembicImport.setParmTemplateGroup(parm_group)", "def import_ig_scene(self, scene):\n assert isinstance(scene, InteractiveIndoorScene), \\\n 'import_ig_scene can only be called with InteractiveIndoorScene'\n new_object_ids = scene.load()\n self.objects += new_object_ids\n if scene.texture_randomization:\n # use randomized texture\n for body_id, visual_mesh_to_material in \\\n zip(new_object_ids, scene.visual_mesh_to_material):\n shadow_caster = True\n if scene.objects_by_id[body_id].category == 'ceilings':\n shadow_caster = False\n class_id = self.class_name_to_class_id.get(\n scene.objects_by_id[body_id].category, SemanticClass.SCENE_OBJS)\n self.load_articulated_object_in_renderer(\n body_id,\n class_id=class_id,\n visual_mesh_to_material=visual_mesh_to_material,\n shadow_caster=shadow_caster)\n else:\n # use default texture\n for body_id in new_object_ids:\n use_pbr = True\n use_pbr_mapping = True\n shadow_caster = True\n if scene.scene_source == 'IG':\n if scene.objects_by_id[body_id].category in ['walls', 'floors', 'ceilings']:\n use_pbr = False\n use_pbr_mapping = False\n if scene.objects_by_id[body_id].category == 'ceilings':\n shadow_caster = False\n class_id = self.class_name_to_class_id.get(\n scene.objects_by_id[body_id].category, SemanticClass.SCENE_OBJS)\n self.load_articulated_object_in_renderer(\n body_id,\n class_id=class_id,\n use_pbr=use_pbr,\n use_pbr_mapping=use_pbr_mapping,\n shadow_caster=shadow_caster)\n self.scene = scene\n\n return new_object_ids", "def importGeoAsset ():\n\n help(importGeoAsset)\n\n import hou\n import os\n \n \n #set path\n hipPath = hou.expandString('$HIP')\n path = hipPath + \"/geo/\"\n print (path)\n \n listPath = os.listdir(path)\n \n obj = hou.node(\"/obj\")\n geoImport= obj.createNode (\"geo\",\"geoImport\")\n\n file1 = hou.node(\"/obj/geoImport/file1\")\n file1.destroy()\n \n for n in listPath :\n print (n)\n currentFile=geoImport.createNode(\"file\",n)\n #set fileNames\n currentFile.setParms({\"file\":\"$\"+\"HIP/geo/\"+n})\n \n #reload geo callback\n #prepa param\n parm_group = geoImport.parmTemplateGroup()\n parm_folder = hou.FolderParmTemplate(\"folder\",\"reload\")\n #button run code\n button=hou.ButtonParmTemplate(\"reload\",\"Reload\")\n button.setTags({\"script_callback_language\":\"python\",\"script_callback\":\"import y \\ny.reloadGeo()\"})\n parm_folder.addParmTemplate(button)\n #append param\n parm_group.append(parm_folder)\n geoImport.setParmTemplateGroup(parm_group)", "def importExternal(*args):\n goTo = pi.currentProject\n impFile = cmds.fileDialog2(fm=1, dir = goTo)[0]\n if impFile:\n cmds.file(impFile, i=True)", "def __init__(self, *args, **kwargs):\n super(MayaScene, self).__init__(*args, **kwargs)", "def on_enable_scene(self):", "def set_up_scenes():\n cmd.zoom('Cathepsin', 10) # Zoom out to get a view on the whole complex\n cmd.scene('001', 'store', message='This is the first scene with a view on the complex!')\n cmd.set_view(closeup) # Get a close-up of the ligand by using the manually chosen viewpoint\n cmd.scene('002', 'store', message='This is the second scene with a close-up on the ligand!')", "def open_scene(file_path, save=True):\n\n pass", "def load(self, context, name, namespace, options=None):\n\n # Create directory for asset and avalon container\n root = \"/Game/Avalon/Assets\"\n asset = context.get('asset').get('name')\n suffix = \"_CON\"\n if asset:\n asset_name = \"{}_{}\".format(asset, name)\n else:\n asset_name = \"{}\".format(name)\n\n tools = unreal.AssetToolsHelpers().get_asset_tools()\n asset_dir, container_name = tools.create_unique_asset_name(\n \"{}/{}/{}\".format(root, asset, name), suffix=\"\")\n\n container_name += suffix\n\n unreal.EditorAssetLibrary.make_directory(asset_dir)\n\n automated = False\n actor = None\n\n task = unreal.AssetImportTask()\n task.options = unreal.FbxImportUI()\n\n libpath = self.fname.replace(\"fbx\", \"json\")\n\n with open(libpath, \"r\") as fp:\n data = json.load(fp)\n\n instance_name = data.get(\"instance_name\")\n\n if instance_name:\n automated = True\n actor_name = 'PersistentLevel.' + instance_name\n actor = unreal.EditorLevelLibrary.get_actor_reference(actor_name)\n skeleton = actor.skeletal_mesh_component.skeletal_mesh.skeleton\n task.options.set_editor_property('skeleton', skeleton)\n\n if not actor:\n return None\n\n task.set_editor_property('filename', self.fname)\n task.set_editor_property('destination_path', asset_dir)\n task.set_editor_property('destination_name', asset_name)\n task.set_editor_property('replace_existing', False)\n task.set_editor_property('automated', automated)\n task.set_editor_property('save', False)\n\n # set import options here\n task.options.set_editor_property(\n 'automated_import_should_detect_type', False)\n task.options.set_editor_property(\n 'original_import_type', unreal.FBXImportType.FBXIT_SKELETAL_MESH)\n task.options.set_editor_property(\n 'mesh_type_to_import', unreal.FBXImportType.FBXIT_ANIMATION)\n task.options.set_editor_property('import_mesh', False)\n task.options.set_editor_property('import_animations', True)\n task.options.set_editor_property('override_full_name', True)\n\n task.options.anim_sequence_import_data.set_editor_property(\n 'animation_length',\n unreal.FBXAnimationLengthImportType.FBXALIT_EXPORTED_TIME\n )\n task.options.anim_sequence_import_data.set_editor_property(\n 'import_meshes_in_bone_hierarchy', False)\n task.options.anim_sequence_import_data.set_editor_property(\n 'use_default_sample_rate', True)\n task.options.anim_sequence_import_data.set_editor_property(\n 'import_custom_attribute', True)\n task.options.anim_sequence_import_data.set_editor_property(\n 'import_bone_tracks', True)\n task.options.anim_sequence_import_data.set_editor_property(\n 'remove_redundant_keys', True)\n task.options.anim_sequence_import_data.set_editor_property(\n 'convert_scene', True)\n\n unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task])\n\n # Create Asset Container\n lib.create_avalon_container(\n container=container_name, path=asset_dir)\n\n data = {\n \"schema\": \"openpype:container-2.0\",\n \"id\": pipeline.AVALON_CONTAINER_ID,\n \"asset\": asset,\n \"namespace\": asset_dir,\n \"container_name\": container_name,\n \"asset_name\": asset_name,\n \"loader\": str(self.__class__.__name__),\n \"representation\": context[\"representation\"][\"_id\"],\n \"parent\": context[\"representation\"][\"parent\"],\n \"family\": context[\"representation\"][\"context\"][\"family\"]\n }\n unreal_pipeline.imprint(\n \"{}/{}\".format(asset_dir, container_name), data)\n\n asset_content = unreal.EditorAssetLibrary.list_assets(\n asset_dir, recursive=True, include_folder=True\n )\n\n animation = None\n\n for a in asset_content:\n unreal.EditorAssetLibrary.save_asset(a)\n imported_asset_data = unreal.EditorAssetLibrary.find_asset_data(a)\n imported_asset = unreal.AssetRegistryHelpers.get_asset(\n imported_asset_data)\n if imported_asset.__class__ == unreal.AnimSequence:\n animation = imported_asset\n break\n\n if animation:\n animation.set_editor_property('enable_root_motion', True)\n actor.skeletal_mesh_component.set_editor_property(\n 'animation_mode', unreal.AnimationMode.ANIMATION_SINGLE_NODE)\n actor.skeletal_mesh_component.animation_data.set_editor_property(\n 'anim_to_play', animation)\n\n return asset_content", "def menu_load_scene(self):\n file_name = QtGui.QFileDialog().getOpenFileName(self, \"Choose Scene File\", get_data_path(), \"*.pkl\")\n with open(file_name, \"rb\") as f:\n self.scene = pickle.load(f)", "def import_builder(self):\n\n if not tp.is_maya():\n tp.logger.warning('Import model functionality is only available in Maya')\n return\n\n assert self._asset\n\n track = scene.TrackNodes()\n track.load('transform')\n self._asset.import_builder_file()\n mc.refresh()\n imported_objs = track.get_delta()\n mc.select(imported_objs)\n mc.viewFit(animate=True)\n mc.select(clear=True)", "def main():\r\n \r\n world = WorldModel()\r\n #uncomment these lines and comment out the next 2 if you want to use the\r\n #full Baxter model\r\n #print \"Loading full Baxter model (be patient, this will take a minute)...\"\r\n #world.loadElement(os.path.join(model_dir,\"baxter.rob\"))\r\n print \"Loading simplified Baxter model...\"\r\n world.loadElement(os.path.join(model_dir,\"baxter_col.rob\"))\r\n print \"Loading Kiva pod model...\"\r\n world.loadElement(os.path.join(model_dir,\"kiva_pod/model.obj\"))\r\n print \"Loading plane model...\"\r\n world.loadElement(os.path.join(model_dir,\"plane.env\"))\r\n \r\n #shift the Baxter up a bit (95cm)\r\n Rbase,tbase = world.robot(0).getLink(0).getParentTransform()\r\n world.robot(0).getLink(0).setParentTransform(Rbase,(0,0,0.95))\r\n \r\n #translate pod to be in front of the robot, and rotate the pod by 90 degrees \r\n Trel = (so3.rotation((0,0,1),math.pi/2),[1.1,0,0])\r\n T = world.rigidObject(0).getTransform()\r\n world.rigidObject(0).setTransform(*se3.mul(Trel,T))\r\n \r\n #run the visualizer\r\n visualizer = MyGLViewer(world)\r\n visualizer.run()", "def reference_scene(file_path, **kwargs):\n\n pass", "def visualize_obj(obj_path, *args, color=\"lightcoral\", **kwargs):\n print(\"Visualizing : \" + obj_path)\n scene = Scene(add_root=True)\n scene.add_from_file(obj_path, *args, c=color, **kwargs)\n\n return scene", "def load_velo(self):\n # Find all the Velodyne files\n\n velo_files = []\n image_files = []\n calib_files = []\n\n for file in self.image_sets:\n velo_file = '{}.bin'.format(file)\n velo_files.append(os.path.join(KITTI_PATH, 'training', 'velodyne', velo_file))\n image_file = '{}.png'.format(file)\n image_files.append(os.path.join(KITTI_PATH, 'training', 'image_2', image_file))\n calib_file = '{}.txt'.format(file)\n calib_files.append(os.path.join(KITTI_PATH, 'training', 'calib', calib_file))\n\n print('Found ' + str(len(velo_files)) + ' Velodyne scans...')\n # Read the Velodyne scans. Each point is [x,y,z,reflectance]\n self.velo = velo_files\n self.image = image_files\n self.calib = calib_files\n\n print('done.')", "def WriteImport(self, filename, logname, outputDir, settings, isAnimated, cameraRig, lightingRig):\r\n step = os.path.basename(outputDir)\r\n execution = os.path.basename(os.path.dirname(outputDir))\r\n test = os.path.basename(os.path.dirname(os.path.dirname(outputDir)))\r\n path = os.path.join(self.__scenesDir, test, execution, step)\r\n if (not os.path.isdir(path)):\r\n os.makedirs(path)\r\n self.__pathMap.append((path, outputDir))\r\n \r\n self.__logFiles.append(os.path.join(path, os.path.basename(logname)))\r\n self.__importLogFiles.append(self.__logFiles[-1])\r\n \r\n command = (\"SetValue \\\"preferences.scripting.cmdlogfilename\\\", \\\"\" + \r\n self.__logFiles[-1].replace(\"\\\\\", \"\\\\\\\\\") + \"\\\"\\n\"\r\n \"NewScene, false\\n\")\r\n if (FUtils.GetExtension(filename) == \"dae\"):\r\n command = (command + \r\n \"set myIProp = CreateImportFTKOptions()\\n\" +\r\n \"myIProp.Parameters(\\\"Filename\\\").Value = \\\"\" + \r\n filename.replace(\"\\\\\", \"\\\\\\\\\") +\"\\\"\\n\" +\r\n \"myIProp.Parameters(\\\"Verbose\\\").Value = True\\n\")\r\n for setting in settings:\r\n value = setting.GetValue().strip()\r\n if (value == \"\"):\r\n value = self.FindDefault(FXsi.__IMPORT_OPTIONS, \r\n setting.GetPrettyName())\r\n command = (command + \"myIProp.Parameters(\\\"\" + \r\n setting.GetCommand() + \"\\\").Value = \" + value + \"\\n\")\r\n command = command + \"ImportFTK myIProp.Name \\n\"\r\n elif (FUtils.GetExtension(filename) == \"scn\"):\r\n command = (command +\r\n \"OpenScene \\\"\" + filename.replace(\"\\\\\",\"\\\\\\\\\") + \"\\\"\\n\")\r\n else: \r\n return\r\n \r\n self.__currentImportProperName = FUtils.GetProperFilename(filename)\r\n basename = self.__currentImportProperName + \".scn\"\r\n\r\n# self.__script.write(\r\n# command +\r\n# \"SearchAndReplacePath \\\"All\\\", \\\"\" + FXsi.__REPLACE_PATH + \r\n# \"\\\", \\\"\" + \r\n# os.path.dirname(filename).replace(\"\\\\\", \"\\\\\\\\\") + \r\n# \"\\\", True\\n\" +\r\n# \"SaveSceneAs \\\"\" + \r\n# os.path.join(path, basename).replace(\"\\\\\", \"\\\\\\\\\") +\r\n# \"\\\"\\n\"\r\n# )\r\n \r\n self.__script.write(\r\n command +\r\n \"SaveSceneAs \\\"\" + \r\n os.path.join(path, basename).replace(\"\\\\\", \"\\\\\\\\\") +\r\n \"\\\"\\n\"\r\n )\r\n \r\n self.__testCount = self.__testCount + 1\r\n \r\n return [basename,]", "def visualize(self):\n app = QtGui.QApplication([''])\n SceneGUI(self)\n app.exec_()", "def connectMasterScene():\n try:\n nuke.toNode('Viewer1').setInput(0, nuke.toNode('MASTER_SCENE'))\n except:\n print 'no master scene found!'" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
import shaders into scene
def importShaders(self): if self.shaderPath.exists: self.shaderPath.imp()
[ "def _on_load_scene_shaders(self):\n\n artellapipe.ShadersMgr().load_scene_shaders()", "def importShader(path):\r\n try:\r\n pm.importFile(path)\r\n print 'Success import {}'.format(path)\r\n except:\r\n print 'Failed import {}'.format(path)\r\n return\r\n assignShader()", "def _setupShaders(self):\r\n if not haveShaders:\r\n return\r\n self._shaders={}\r\n self._shaders['mono++'] = shaders.compileProgram(shaders.vertSimple,\r\n shaders.bitsMonoModeFrag)\r\n self._shaders['color++'] = shaders.compileProgram(shaders.vertSimple,\r\n shaders.bitsColorModeFrag)", "def bs_importShaders(shaderPath, jsonPath):\n # import shaders.\n bs_mayaFile.bs_importFile(shaderPath)\n # read shader data from json file.\n with open(jsonPath) as json_data:\n shaderData = json.load(json_data)\n print shaderData\n # apply shaders.\n for each in shaderData.keys():\n # for x in shaderData[each]:\n # pm.select(shaderData[each][x],r=True)\n pm.select(shaderData[each], r=True)\n pm.windows.hyperShade(a=each)\n bs_qui.bs_displayMessage('success', 'shader import success.')\n return True", "def convert_shaders(self):\n raise NotImplementedError()", "def agregar_shader(self, nombre, archivo_vert, archivo_frag):\n self.shaders[nombre] = Shader(archivo_vert, archivo_frag)", "def importShaders(self, namespace=':'):\n self.logger.info(\"Import Shaders\")\n\n if self.data['abcShadersAttr']:\n\n abcfile = self.data['abcShadersAttr']\n \n # shotgun query for maya file\n mayafile = find_shader_package_from_shader_file(file_path=abcfile, file_type='ma')\n if mayafile != {}:\n mayafile = mayafile['ma']\n self.logger.debug(\"Found maya shader file: %s\" % mayafile)\n else:\n localfile = abcfile.replace('.abc', '.ma')\n if os.path.isfile(localfile):\n mayafile = localfile\n self.logger.debug(\"Found maya shader file: %s\" % mayafile)\n else:\n self.logger.error(\"Missing file : %s\" % self.data['abcShadersAttr'])\n return False\n\n if os.path.isfile(mayafile):\n try: \n imported_shaders = cmds.file(mayafile, i=True, returnNewNodes=True, renameAll=True, mergeNamespacesOnClash=True, namespace=namespace)\n self.setAttr(\"abcShaders\", \"\")\n self.logger.debug(\"Imported under %s namespace\" % namespace)\n\n # reset selection back to alembicHolder\n cmds.select(self.data['shapeNode'])\n self.logger.info(\"Imported : %s\" % self.data['abcShadersAttr'])\n return True\n\n except Exception, e:\n self.logger.error(\"Import Json Error : %s\" % e)\n return False\n else:\n self.logger.error(\"Missing file : %s\" % self.data['abcShadersAttr'])\n return False\n else:\n self.logger.info(\"Empty attribute : %s.abcShadersAttr\" % self.data['shapeNode'])\n return False", "def loadShader(shaderpath, shadername, vertexFormatList=None, fragmentFormatlist=None):\n fragment = Shader(shaderpath + shadername + \".fsh\", FRAGMENT, True, fragmentFormatlist)\n vertex = Shader(shaderpath + shadername + \".vsh\", VERTEX, True, vertexFormatList)\n return ShaderProgram(vertex, fragment, True)", "def assignShader():\r\n try:\r\n shaderInfoNode = pm.PyNode('shaderInfoNode')\r\n except:\r\n print '\"shaderInfoNode\" not exist!'\r\n \r\n numAttr = shaderInfoNode.shaderInfos.numChildren()\r\n \r\n message = ''\r\n for i in range(numAttr):\r\n shaderInfos = json.loads(shaderInfoNode.attr('shaderInfos{}'.format(i)).get())\r\n geos=[]\r\n for geo in shaderInfos.get('geometry'):\r\n if pm.objExists(geo):\r\n geos.append(geo)\r\n try:\r\n pm.select(geos)\r\n \r\n surfaceShader = pm.PyNode(shaderInfos.get('surfaceShader'))\r\n pm.hyperShade(assign=surfaceShader)\r\n pm.select(cl=True) \r\n try:\r\n if shaderInfos.get('displacement'):\r\n displacement = pm.PyNode(shaderInfos.get('displacement'))\r\n sg = surfaceShader.outColor.outputs()[0]\r\n displacement.displacement.connect(sg.displacementShader)\r\n except:\r\n message += ( str(shaderInfos.get('displacement')) + '-->' + sg.name()+ '\\n')\r\n except:\r\n message += ( str(shaderInfos.get('surfaceShader')) + '-->' + str(geos )+ '\\n')\r\n \r\n\r\n shaderInfoNode.unlock()\r\n pm.delete(shaderInfoNode)\r\n \r\n if message:\r\n print 'Failed list:\\n'\r\n print message", "def reloadShader(self):\n self.debug(\"Reloading shaders\")\n self._createConvertShader()\n self._createClearShader()\n self._createDistributionShader()\n # self._createGenerateMipmapsShader()\n # self._createPhotonBoxShader()\n # self._createBlurShader()\n\n if hasattr(self, \"voxelCube\"):\n self.pipeline.setEffect(self.voxelCube, \"Effects/DisplayVoxels.effect\", {\n \"normalMapping\": False,\n \"castShadows\": False,\n \"castGI\": False\n })", "def _reload_shader(self):\n self.render_pipeline.reload_shaders()\n\n self.render_pipeline.set_effect(self.terrain.get_node(), \"effects/terrain.yaml\", {\n \"render_gbuffer\": True,\n \"render_shadows\": False,\n\n })\n\n self.render_pipeline.set_effect(self.terrain_shadow.get_node(), \"effects/terrain_shadow.yaml\", {\n \"render_gbuffer\": False,\n \"render_shadows\": True,\n }, 5000)", "def _load_opengl(self):\r\n pass", "def compile(self):\n if not self.isCompiled():\n if self.file is not None:\n try:\n if self.tipo == VERTEX:\n self.shader = glCreateShader(GL_VERTEX_SHADER)\n else:\n self.shader = glCreateShader(GL_FRAGMENT_SHADER)\n glShaderSource(self.shader, self.file)\n glCompileShader(self.shader)\n self.compiled = True\n except:\n raise Exception(\"error al compilar el shader\")\n else:\n raise Exception(\"no se ha cargado un archivo\")\n else:\n print \"Error :: el shader ya ha sido compilado\"", "def fetchAllShaders():\n\tfrom mentalcore import mapi\n\n\t## Now get a list of assets in the scene\n\tassetDict = {}\n\tdupAssets = {}\n\tfor parentGrp in cmds.ls(assemblies = True, long = True):\n\t\tif cmds.ls(parentGrp, dag = True, type = \"mesh\"):\n\t\t\tfor each in cmds.listRelatives(parentGrp, children = True):\n\t\t\t\t## Check for duplicate or base assets\n\t\t\t\tif not cmds.objExists('%s.dupAsset' % each):\n\t\t\t\t\tassetDict[each.split('_hrc')[0]] = parentGrp\n\t\t\t\telse: # handle the duplicate naming\n\t\t\t\t\torigAssetName = each.split('_hrc')[0]\n\t\t\t\t\tdupAssets[each] = [origAssetName, parentGrp]\n\t\t\t\t\t\t\t \n\t## Now process SHD XML\n\tprocessSHDTemplate(assetDict = assetDict, selected = False)\n\tfinalBuildStuff()", "def serialise_shaders(nodes):\n\n valid_nodes = cmds.ls(\n nodes,\n long=True,\n recursive=True,\n showType=True,\n objectsOnly=True,\n type=\"transform\"\n )\n\n meshes_by_id = {}\n for mesh in valid_nodes:\n shapes = cmds.listRelatives(valid_nodes[0],\n shapes=True,\n fullPath=True) or list()\n\n if shapes:\n shape = shapes[0]\n if not cmds.nodeType(shape):\n continue\n\n try:\n id_ = cmds.getAttr(mesh + \".modelID\")\n\n if id_ not in meshes_by_id:\n meshes_by_id[id_] = list()\n\n meshes_by_id[id_].append(mesh)\n\n except ValueError:\n continue\n\n meshes_by_shader = dict()\n for id_, mesh in meshes_by_id.items():\n shape = cmds.listRelatives(mesh,\n shapes=True,\n fullPath=True) or list()\n\n for shader in cmds.listConnections(shape,\n type=\"shadingEngine\") or list():\n\n # Objects in this group are those that haven't got\n # any shaders. These are expected to be managed\n # elsewhere, such as by the default model loader.\n if shader == \"initialShadingGroup\":\n continue\n\n if shader not in meshes_by_shader:\n meshes_by_shader[shader] = list()\n\n shaded = cmds.sets(shader, query=True) or list()\n meshes_by_shader[shader].extend(shaded)\n\n shader_by_id = {}\n for shader, shaded in meshes_by_shader.items():\n\n if shader not in shader_by_id:\n shader_by_id[shader] = list()\n\n for mesh in shaded:\n\n # Enable shader assignment to faces.\n name = mesh.split(\".f[\")[0]\n\n transform = name\n if cmds.objectType(transform) == \"mesh\":\n transform = cmds.listRelatives(name, parent=True)[0]\n\n try:\n id_ = cmds.getAttr(transform + \".modelID\")\n shader_by_id[shader].append(mesh.replace(name, id_))\n except KeyError:\n continue\n\n # Remove duplicates\n shader_by_id[shader] = list(set(shader_by_id[shader]))\n\n return shader_by_id", "def import_scene(file_path):\n\n pass", "def set_shaders(self, vert, frag):\n gl = self._gl\n self._linked = False\n # Create temporary shader objects\n vert_handle = gl.createShader(gl.VERTEX_SHADER)\n frag_handle = gl.createShader(gl.FRAGMENT_SHADER)\n # For both vertex and fragment shader: set source, compile, check\n tmp = [(vert, vert_handle, 'vertex'), (frag, frag_handle, 'fragment')]\n for i in range(2):\n code, handle, type_ = tmp[i]\n gl.shaderSource(handle, code)\n gl.compileShader(handle)\n status = gl.getShaderParameter(handle, gl.COMPILE_STATUS)\n if not status:\n errors = gl.getShaderInfoLog(handle)\n raise RuntimeError('errors in ' + type_ + ' shader:\\n' + errors)\n # Attach shaders\n gl.attachShader(self.handle, vert_handle)\n gl.attachShader(self.handle, frag_handle)\n # Link the program and check\n gl.linkProgram(self.handle)\n if not gl.getProgramParameter(self.handle, gl.LINK_STATUS):\n raise RuntimeError(\"Program link error:\\n\" + \n gl.getProgramInfoLog(self.handle))\n # Now we know what variables will be used by the program. Do *before* \n # cleanup, otherwise https://github.com/bokeh/bokeh/issues/2683\n self._unset_variables = self._get_active_attributes_and_uniforms()\n # Now we can remove the shaders. We no longer need them and it frees up\n # precious GPU memory:http://gamedev.stackexchange.com/questions/47910\n gl.detachShader(self.handle, vert_handle)\n gl.detachShader(self.handle, frag_handle)\n gl.deleteShader(vert_handle)\n gl.deleteShader(frag_handle)\n # Set / reset\n self._known_invalid = []\n self._linked = True", "def fetchShadersForSelected(): \n\tfrom mentalcore import mapi\n\n\t## ASSSIGN DEFAULT LAMBERT AND CLEAN THE HYERPSHADE!\n\tfor each in cmds.ls(sl = True):\n\t\ttry:\tcmds.sets(each, e = True , forceElement = 'initialShadingGroup')\n\t\texcept:\tcmds.warning('FAILED to set initial Shading group for %s' % each)\n\n\t[cmds.lockNode(cp, lock = True) for cp in cmds.ls(type = 'core_renderpass')] ## Lock all the core_renderpasses before deleting unused to preserve...\n\tmel.eval(\"MLdeleteUnused();\")\n\n\t## Now get a list of assets in the scene\n\tassetDict = {}\n\tfor grp in cmds.ls(sl = True):\n\t\tif cmds.ls(grp, dag = True, type = \"mesh\"):\n\t\t\tgetParent = cmds.listRelatives(grp, parent = True)\n\t\t\tif getParent:\n\t\t\t\tassetDict[grp.split('_hrc')[0]] = [cmds.listRelatives(grp, parent = True)[0], grp]\n\t\t\telse:\n\t\t\t\tassetDict[grp.split('_hrc')[0]] = ['', grp]\n\n\t## Now process XML\n\tprocessSHDTemplate(assetDict = assetDict, selected = True)\n\tfinalBuildStuff()", "def link_shaders(*shaders):\n program = gl.glCreateProgram()\n for shader in shaders:\n gl.glAttachShader(program, shader)\n gl.glLinkProgram(program)\n # check linking error\n result = gl.glGetProgramiv(program, gl.GL_LINK_STATUS)\n if not(result):\n raise RuntimeError(gl.glGetProgramInfoLog(program))\n return program" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
import data from file asset = Only import for the asset that you want searchAndReplace = Change any part of the objects name to another word
def importData( self, asset = '', searchAndReplace = ['',''] ): pickleData = pickle.load( open( self.dataPath.path, "rb" ) ) layers = [RenderLayerData(l,d) for l,d in pickleData.items() if not ':' in l] for l in layers: if not searchAndReplace [0]== '' or not searchAndReplace[1] == '': l.filterMe( asset, searchAndReplace ) l.create() l.addObjects() l.makeOverrides() l.makeOverrideConnections() l.makeShaderOverride()
[ "def replace(name, newobject):", "def test_ReplaceFromJsonFile(self):\n jsonFilePath = os.path.join(\n TestReplaceResolver.assemblyRepo, \"a\", \"v1\", ReplaceResolver.Tokens.replaceFileName\n )\n\n pair1 = [\"component/c/v1/c.usda\", \"component/c/v2/c.usda\"]\n pair2 = [\"assembly/b/v1/b.usda\", \"assembly/b/v2/b.usda\"]\n\n import json\n\n with open(jsonFilePath, \"w\") as outfile:\n json.dump([pair1, pair2], outfile)\n\n filePath = os.path.join(TestReplaceResolver.assemblyRepo, \"a/v1/a.usda\")\n os.environ[\"PXR_AR_DEFAULT_SEARCH_PATH\"] = os.path.abspath(\n TestReplaceResolver.rootDir\n )\n stage = Usd.Stage.Open(filePath)\n\n prim = stage.GetPrimAtPath(\"/a\")\n self.assertTrue(prim)\n\n cAttr = prim.GetAttribute(\"c\")\n self.assertTrue(cAttr)\n self.assertEqual(cAttr.Get(), \"c_v2\")\n\n bAttr = prim.GetAttribute(\"b\")\n self.assertTrue(bAttr)\n self.assertEqual(bAttr.Get(), \"b_v2\")\n\n # Check that asset name, version and path are matching c/v2\n modelAPI = Usd.ModelAPI(prim)\n self.assertEqual(modelAPI.GetKind(), Kind.Tokens.assembly)\n self.assertEqual(modelAPI.GetAssetName(), \"c\")\n self.assertEqual(modelAPI.GetAssetVersion(), \"v2\")\n self.assertEqual(modelAPI.GetAssetIdentifier().path, \"component/c/v2/c.usda\")", "def clean_objects_name(self, main_part_name):\n # import os.path\n # (CADPath, CADFilename) = os.path.split(CADFile)\n # (CADName, CADExt) = os.path.splitext(CADFilename)\n CADSuffix = main_part_name + \"_\"\n objNames = self.oeditor.GetMatchedObjectName(CADSuffix + \"*\")\n for name in objNames:\n RenameArgs = {}\n RenameArgs[\"NAME\"] = \"Rename Data\"\n RenameArgs[\"Old Name\"] = name\n RenameArgs[\"New Name\"] = name.replace(CADSuffix, \"\")\n self.oeditor.RenamePart(RenameArgs)\n return True", "def edit_item_name_replace(cls, data):\n while re.search(cls.edit_item_name, data):\n random_name = \"科目名称编辑\" + \"\".join(random.sample(\"1234567890\", 4))\n data = re.sub(cls.edit_item_name, random_name, data)\n return data", "def renameAssetObjects(self):\n\t\tfor i,o in enumerate( self.objects ):\n\t\t\tmn.Node( o ).name = self.name + '%i'%i", "def replace_obj_from_module(strings, dict):\n for string in strings:\n if any_key_contains(string, dict):\n full_keys = get_full_keys_containing(string, dict)\n for full_key in full_keys:\n if isinstance(dict[full_key], str):\n dict[full_key] = import_obj_from_string(dict[full_key])", "def importGeoAsset ():\n\n help(importGeoAsset)\n\n import hou\n import os\n \n \n #set path\n hipPath = hou.expandString('$HIP')\n path = hipPath + \"/geo/\"\n print (path)\n \n listPath = os.listdir(path)\n \n obj = hou.node(\"/obj\")\n geoImport= obj.createNode (\"geo\",\"geoImport\")\n\n file1 = hou.node(\"/obj/geoImport/file1\")\n file1.destroy()\n \n for n in listPath :\n print (n)\n currentFile=geoImport.createNode(\"file\",n)\n #set fileNames\n currentFile.setParms({\"file\":\"$\"+\"HIP/geo/\"+n})\n \n #reload geo callback\n #prepa param\n parm_group = geoImport.parmTemplateGroup()\n parm_folder = hou.FolderParmTemplate(\"folder\",\"reload\")\n #button run code\n button=hou.ButtonParmTemplate(\"reload\",\"Reload\")\n button.setTags({\"script_callback_language\":\"python\",\"script_callback\":\"import y \\ny.reloadGeo()\"})\n parm_folder.addParmTemplate(button)\n #append param\n parm_group.append(parm_folder)\n geoImport.setParmTemplateGroup(parm_group)", "def _update_object_content(name, input):\n content = input._content\n\n hrefs = re.compile(r'<\\s*[^\\>]*href\\s*=\\s*([\"\\'])(.*?)\\1')\n srcs = re.compile(r'<\\s*[^\\>]*src\\s*=\\s*([\"\\'])(.*?)\\1')\n\n matches = hrefs.findall(content)\n matches.extend(srcs.findall(content))\n relative_paths = []\n for found in matches:\n found = found[1]\n if found not in relative_paths:\n relative_paths.append(found)\n\n for relative_path in relative_paths:\n if not \"://\" in relative_path: # we don't want to rewrite protocols\n dest_path = os.sep.join((get_relative_path(name), \"static\",\n relative_path))\n content = content.replace(relative_path, dest_path)\n\n return content", "def filter_inp_file_upname(inp_file_in, inp_file_out, replace_with_these):\n with open(inp_file_in, 'r') as input_file_in:\n input_data = input_file_in.readlines()\n replace_pos = dict(find_upname_positions(input_data, replace_with_these))\n output_data = '\\n'.join(replace_lines(input_data, replace_pos))\n with open(inp_file_out, 'w') as input_file_out:\n input_file_out.write(output_data)", "def replace_with_your_name(your_name, madlib):", "def importLightLinking(self, asset = '', searchAndReplace = ['',''] ):\n\t\tLayersInfo = pickle.load( open( self.lightLinkPath.path, \"rb\") )\n\t\tmc.refresh( su = 1 )\n\t\tif not asset == '':\n\t\t\tLayersInfo = self.filterLightLinksData( LayersInfo , asset, searchAndReplace )\n\t\tfor l in LayersInfo.keys():\n\t\t\tobjsToBreakLink = []\n\t\t\tfor link in LayersInfo[l]:\n\t\t\t\tif mc.objExists( link ):\n\t\t\t\t\tobjsToBreakLink.append( link )\n\t\t\tmc.lightlink( b = True, light = l, o = objsToBreakLink )\n\t\tmc.refresh( su = 0 )", "def importAbcAsset ():\n\n help(importAbcAsset)\n\n import hou\n import os\n \n \n #set path\n hipPath = hou.expandString('$HIP')\n path = hipPath + \"/abc/\"\n print (path)\n \n listPath = os.listdir(path)\n \n obj = hou.node(\"/obj\")\n alembicImport= obj.createNode (\"geo\",\"alembicImport\")\n \n file1 = hou.node(\"/obj/alembicImport/file1\")\n file1.destroy()\n \n for n in listPath:\n print (n)\n currentFile=alembicImport.createNode(\"alembic\",n)\n #set fileName\n currentFile.setParms({\"fileName\":\"$\"+\"HIP/abc/\"+n})\n\n #reload geo callback\n #prepa param\n parm_group = alembicImport.parmTemplateGroup()\n parm_folder = hou.FolderParmTemplate(\"folder\",\"reload\")\n #button run code\n button=hou.ButtonParmTemplate(\"reload\",\"Reload\")\n button.setTags({\"script_callback_language\":\"python\",\"script_callback\":\"import y \\ny.reloadAlembic()\"})\n parm_folder.addParmTemplate(button)\n #append param\n parm_group.append(parm_folder)\n alembicImport.setParmTemplateGroup(parm_group)", "def item_name_replace(cls, data):\n while re.search(cls.item_name, data):\n random_name = \"科目名称\" + \"\".join(random.sample(\"1234567890\", 5))\n data = re.sub(cls.item_name, random_name, data)\n return data", "def change_match_res_file_of_package(res_line, rename_list):\n for (rename_name, rename_new_name) in rename_list: # Fo each edited name\n if not rename_name.startswith('!NewNoEdit!'):\n res_line = res_line.replace(rename_name, rename_new_name)\n print res_line, # Print the line back", "def import_data(species_name: str):\n try:\n Species.objects(name=species_name).update_one(set__name=species_name, upsert=True)\n except errors.ValidationError:\n print(traceback.format_exc())", "def rewrite_file_imports(item, vendored_libs):\n text = item.read_text(encoding='utf-8')\n for lib in vendored_libs:\n text = re.sub(\n r'(\\n\\s*)import %s(\\n\\s*)' % lib,\n r'\\1from .vendor import %s\\2' % lib,\n text,\n )\n text = re.sub(\n r'(\\n\\s*)from %s' % lib,\n r'\\1from .vendor.%s' % lib,\n text,\n )\n item.write_text(text, encoding='utf-8')", "def rewrite_file_imports(item, vendored_libs):\n text = item.read_text(encoding=\"utf-8\")\n for lib in vendored_libs:\n text = re.sub(\n r\"(\\n\\s*)import %s(\\n\\s*)\" % lib,\n r\"\\1from pythonfinder._vendor import %s\\2\" % lib,\n text,\n )\n text = re.sub(\n r\"(\\n\\s*)from %s\" % lib, r\"\\1from pythonfinder._vendor.%s\" % lib, text\n )\n item.write_text(text, encoding=\"utf-8\")", "def _map_source(source):\n for pattern, replacement in \\\n settings.REFINERY_FILE_SOURCE_MAP.iteritems():\n translated_source = re.sub(pattern, replacement, source)\n if translated_source != source:\n return translated_source\n return source", "def updateCountryNames(self):\n try:\n with open('countryNameMapping.json', 'r') as file:\n name_mapping = json.loads(file.read())\n except:\n sys.exit('countryNameMapping.json file is unavailable in current directory.')\n \n for key, value in name_mapping.items():\n self.covid_df.replace(key, value, inplace=True)\n \n try:\n with open('countryNameISO2.json', 'r') as file:\n self.name_iso2_mapping = json.loads(file.read())\n except:\n print('countryNameISO2.json file is unavailable in current directory, creating file...')\n self.writeCountryCodeFile()\n print('Re-importing required JSONs...')\n self.updateCountryNames()" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
import master settings from data file
def importMasterSettings(self): pickleData = pickle.load( open( self.masterPath.path, "rb" ) ) master = rlayer.RenderLayer( 'defaultRenderLayer' ) master.makeCurrent() for a in pickleData.keys(): try: a.v = pickleData[a] except: continue
[ "def importsettings(importfile, verbose):\n settings = json.load(importfile)\n if verbose:\n click.echo('Got settings from input file: ')\n click.echo(settings)\n # write into settings.json\n with open(pwd.getpwuid(os.getuid()).pw_dir+'/.ethvigil/settings.json', 'w') as f:\n json.dump(settings, f)", "def loadSettings(self, filename='short_240.settings'):\n global master_run_no\n self.settingsFilename = filename\n # print 'self.settingsFilename = ', self.settingsFilename\n if os.path.exists(filename):\n stream = open(filename, 'r')\n else:\n stream = open(master_lattice_location+filename, 'r')\n self.settings = yaml.load(stream, Loader=yaml.UnsafeLoader)\n self.globalSettings = self.settings['global']\n master_run_no = self.globalSettings['run_no'] if 'run_no' in self.globalSettings else 1\n self.fileSettings = self.settings['files']\n elements = self.settings['elements']\n self.groups = self.settings['groups'] if 'groups' in self.settings and self.settings['groups'] is not None else {}\n stream.close()\n\n # for name, elem in list(self.groups.items()):\n # group = globals()[elem['type']](name, self.elementObjects, **elem)\n # self.groupObjects[name] = group\n\n for name, elem in list(elements.items()):\n self.read_Element(name, elem)\n\n # for name, lattice in list(self.fileSettings.items()):\n # self.read_Lattice(name, lattice)", "def load_config(self):\n self.data.read(self.path)", "def load_measurement_settings_file():\n\n # First update the settings that the state machine is up to date\n self.variables.ui_plugins[\"Settings_window\"].load_new_settings()\n\n fileDialog = QFileDialog()\n file = fileDialog.getOpenFileName()\n\n if file[0]:\n file = open(str(file[0]), \"r\")\n dict = yaml.load(file)\n file.close()\n\n # l.info(\"Loaded new measurement settings file: \" + str(file[0]))\n self.variables.default_values_dict[\"settings\"].update(\n dict\n ) # Updates the values of the dict, it either updates the values or adds them if not incluced\n self.variables.ui_plugins[\"Settings_window\"].configure_settings()", "def load(filename):\n conf = METplusConfig()\n conf.read(filename)\n return conf", "def _load_project_settings_file(ctx):\n\tif hasattr(ctx, 'projects_settings'):\n\t\treturn\n\t\t\n\tprojects_node = _projects_node(ctx)\t\n\tctx.projects_settings = ctx.parse_json_file(projects_node)", "def update_from_file(self):\n config_path = os.environ.get('MINDINSIGHT_CONFIG', '')\n if not config_path:\n return\n\n config_module = None\n\n # python:full.path.for.config.module\n if config_path.startswith('python:'):\n config_module = import_module(config_path[len('python:'):])\n\n # file:full/path/for/config.py\n elif config_path.startswith('file:'):\n config_path = config_path[len('file:'):]\n module_name = '__mindinsightconfig__'\n config_module = types.ModuleType(module_name)\n machinery = import_module('importlib.machinery')\n loader = machinery.SourceFileLoader(module_name, config_path)\n loader.exec_module(config_module)\n\n if config_module is None:\n return\n\n for setting in dir(config_module):\n if setting.isupper() and setting in self._default_settings:\n setting_value = getattr(config_module, setting)\n setattr(self, setting, setting_value)\n self._explicit_settings.add(setting)", "def load(filename):\n conf = CommonConfig.get()\n conf.update(toml.load(filename))\n return conf", "def load_settings(self, outfile='settings.p'):\n settings = pickle.load(open(path,'rb'))\n self.__dict__.update(settings)", "def set_master_table(filepath):\n my_globals['master_table_path'] = filepath\n my_globals['master_table_data'] = None", "def load_from_file(self):\n if not os.path.exists(self.settings_file):\n return\n \n with open(self.settings_file, 'rb') as settings_file:\n try:\n options = json.load(settings_file)\n \n if self._settings_coordinate(options):\n self.options = options\n except:\n self.load_default()", "def import_config(self):\n # Get the config file\n import config\n\n # Get all keys from keyvalue pairs in the config file\n settingsFromConfigFile = [x for x in dir(config) if not x.startswith('__')]\n\n # Convert config file into dict\n for key in settingsFromConfigFile:\n value = getattr(config, key)\n self.config[key] = value\n\n # Settings validation: specify keys which are valid settings\n # If there are rows in the config file which are not listed here, an\n # error will be raised\n validSettings = {\n 'data_dir',\n 'running_data_dir',\n 'unison_log_dir',\n 'unisonctrl_log_dir',\n 'log_file',\n 'make_root_directories_if_not_found',\n 'sync_hierarchy_rules',\n 'unison_local_root',\n 'unison_remote_root',\n 'unison_path',\n 'global_unison_config_options',\n 'unison_remote_ssh_conn',\n 'unison_remote_ssh_keyfile',\n 'unison_local_hostname',\n 'unison_home_dir',\n 'unison_user',\n 'webhooks',\n 'rotate_logs',\n }\n\n # If a setting contains a directory path, add it's key here and it will\n # be sanatized (whitespace and trailing whitespaces stripped)\n settingPathsToSanitize = {\n 'data_dir',\n 'unison_home_dir',\n 'running_data_dir',\n 'unison_log_dir',\n 'unisonctrl_log_dir',\n }\n\n # Values here are used as config values unless overridden in the\n # config.py file\n defaultSettings = {\n 'data_dir': '/tmp/unisonctrl',\n 'log_file': '/dev/null',\n 'make_root_directories_if_not_found': True,\n 'unison_path': '/usr/bin/unison', # Default ubuntu path for unison\n 'unison_remote_ssh_keyfile': \"\",\n 'unison_local_hostname': platform.node(),\n 'running_data_dir': self.config['data_dir'] + os.sep + \"running-sync-instance-information\",\n 'unison_log_dir': self.config['data_dir'] + os.sep + \"unison-logs\",\n 'unisonctrl_log_dir': self.config['data_dir'] + os.sep + \"unisonctrl-logs\",\n 'unison_user': getpass.getuser(),\n 'rotate_logs': \"time\",\n }\n\n # TODO: Implement allowedSettings, which force settings to be\n # in a given list of options\n\n # Apply default settings to fill gaps between explicitly set ones\n for key in defaultSettings:\n if (key not in self.config):\n self.config[key] = defaultSettings[key]\n\n # Ensure all required keys are specified\n for key in validSettings:\n if (key not in self.config):\n raise LookupError(\"Required config entry '\" + key + \"' not specified\")\n\n # Ensure no additional keys are specified\n for key in self.config:\n if (key not in validSettings):\n raise LookupError(\"Unknown config entry: '\" + key + \"'\")\n\n # Sanatize directory paths\n for key in settingPathsToSanitize:\n self.config[key] = self.sanatize_path(self.config[key])\n\n # If you reach here, configuration was read and imported without error\n\n return True", "async def importconfig(self, ctx, *, url):\n if url.startswith('http'):\n if url.startswith('https://hasteb.in') and 'raw' not in url:\n url = 'https://hasteb.in/raw/' + url[18:]\n\n async with self.bot.session.get(url) as resp:\n data = await resp.json(content_type=None)\n else:\n data = url\n data['guild_id'] = str(ctx.guild.id)\n await self.bot.db.update_guild_config(ctx.guild.id, {'$set': data})\n await ctx.send(self.bot.accept)", "def add_settings_early(self):\n\n # config settings\n config = {\n # some generic settings for every site, to point to location of some stuff\n mconst.DEF_SETTINGNAME_pkgdirimps_sitempacks: [pkgdirimp_sitempacks],\n mconst.DEF_SETTINGNAME_controllerroot: pkgdirimp_controllers,\n mconst.DEF_SETTINGNAME_sitefilepath: misc.calc_modulefiledirpath(__file__),\n # should we also load mewlo site installed setuptools plugins\n mconst.DEF_SETTINGNAME_flag_importsetuptoolspacks: True,\n mconst.DEF_SETTINGNAME_replaceshadowpath: '${sitefilepath}/replaceshadow',\n }\n self.settings.merge_settings_key(mconst.DEF_SETTINGSEC_config, config)\n\n # config settings\n config = {\n # Name of site\n mconst.DEF_SETTINGNAME_sitename: 'Mewlo',\n # Specify where this site serves from\n # these siteurls should not end in / so if you are serving a site at root just use relative of '' and absolute of 'http://sitename.com'\n mconst.DEF_SETTINGNAME_siteurl_relative: '',\n mconst.DEF_SETTINGNAME_siteurl_absolute: 'http://127.0.0.1:8080',\n #mconst.DEF_SETTINGNAME_siteurl_relative: '/public/publicity',\n #mconst.DEF_SETTINGNAME_siteurl_absolute: 'http://127.0.0.1:8080/public/publicity',\n }\n self.settings.merge_settings_key(mconst.DEF_SETTINGSEC_config, config)\n\n # config settings\n config = {\n # online status information\n mconst.DEF_SETTINGNAME_isenabled: True,\n mconst.DEF_SETTINGNAME_isonline: True,\n mconst.DEF_SETTINGNAME_offline_mode: 'maintenance',\n mconst.DEF_SETTINGNAME_offline_message: 'We are down for leap-year maintenance; we will be back soon.',\n mconst.DEF_SETTINGNAME_offline_allowadmin: False,\n }\n self.settings.merge_settings_key(mconst.DEF_SETTINGSEC_config, config)\n\n\n\n # extension pack config -- we need to explicitly enable plugins\n packconfig = {\n 'mouser.mewlotestplug' : {\n 'isenabled': False,\n },\n 'mouser.testpack' : {\n 'isenabled': False,\n },\n 'mewlo.siteaddon.account' : {\n 'isenabled': True,\n },\n 'mewlo.siteaddon.group' : {\n 'isenabled': True,\n },\n }\n self.settings.merge_settings_key(mconst.DEF_SETTINGSEC_packs, packconfig)\n\n\n # database config\n databaseconfig = {\n 'settings' : {\n 'sqlalchemy_loglevel' : logging.NOTSET,\n #'sqlalchemy_loglevel' : logging.INFO,\n },\n 'default' : {\n 'url' : 'sqlite:///${dbfilepath}/mewlo_testsite1.sqlite',\n #'tablename_prefix': 'mewlo_',\n 'flag_echologging' : False,\n },\n 'mysql_unused' : {\n # Sample configuration for mysql\n 'url' : 'mysql://mewlo_user:mewlo_pass@localhost:3306/mewlo_testsite1',\n 'tablename_prefix': 'mewlo_'\n },\n }\n self.settings.merge_settings_key(mconst.DEF_SETTINGSEC_database, databaseconfig)\n self.settings.listappend_settings_key(mconst.DEF_SETTINGSEC_make_dirs, '${dbfilepath}')\n\n # email config settings\n mailconfig = {\n # online status information\n 'smtp_host': self.get_configval('mail_smtp_host'),\n 'smtp_login': self.get_configval('mail_smtp_login'),\n 'smtp_port': self.get_configval('mail_smtp_port'),\n 'smtp_mode': self.get_configval('mail_smtp_mode'),\n 'smtp_password': self.get_configval('mail_smtp_password'),\n 'mail_from' : self.get_configval('mail_from'),\n }\n self.settings.merge_settings_key(mconst.DEF_SETTINGSEC_mail, mailconfig)\n\n\n # account siteaddon settings\n siteaddonconfig = {\n # online status information\n 'registration_mode': 'immediate',\n 'flag_require_email_verified_before_login': False,\n }\n self.settings.merge_settings_key('siteaddon_account', siteaddonconfig)\n\n\n\n # ATTN: UNFINISHED\n # asset mounts config\n if (False):\n assetmountconfig = {\n 'default' : {\n # an internal assetmount just needs a url route\n 'type': 'internal',\n 'routeid': 'static_files',\n },\n 'external' : {\n 'type': 'external',\n 'filepath': '${mewlofilepath}/public_assets',\n 'urlpath': 'http://127.0.0.1/mewlo/public_assets',\n },\n }\n self.settings.merge_settings_key(mconst.DEF_SETTINGSEC_asset_mounts, assetmountconfig)\n\n\n\n\n\n #print \"TESTING CONFIG1:\"\n #self.run_configfunc('sayhello',1,2,3)\n #print \"TESTING CONFIG2:\"\n #self.run_allconfigfuncs('sayhello',1,2,3)", "def prepare_settings(self):\n\n self.settings = load_settings_as_template(DEFAULT_SETTINGS_PATH)\n self.settings['experiment']['file_paths'] = [os.path.join(TEST_DIR, _) for _ in self.file_paths]\n self.settings['experiment']['fasta_paths'] = [os.path.join(TEST_DIR, _) for _ in self.fasta_paths]", "def _import(self, datadict):\n self.GUID = datadict.get(\"GUID\", uuid.uuid1())\n self.FileName = datadict.get(\"FileName\", \"\")\n self.Name = datadict.get(\"Name\", \"\")\n self.Projects = datadict.get(\"Projects\", [])\n self.VSVersion = datadict.get(\"VSVersion\", None)", "def load(config_file_name=\"network_importer.toml\", config_data=None):\n global SETTINGS\n\n if config_data:\n SETTINGS = _configure_backend(Settings(**config_data))\n return\n\n if os.path.exists(config_file_name):\n config_string = Path(config_file_name).read_text()\n config_tmp = toml.loads(config_string)\n SETTINGS = _configure_backend(Settings(**config_tmp))\n return\n\n SETTINGS = Settings()", "def load_settings():\r\n if os.path.exists('settings.json'):\r\n json_data = open('settings.json').read()\r\n\r\n data = json.loads(json_data)\r\n return data\r\n else:\r\n return False", "def LoadConfig(): # -> None\n global user, headers\n with open('docs/default.json', 'r') as f:\n data = json.load(f)\n user = data[\"User\"]\n headers = data[\"Headers\"]" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
return the overrides in the layer
def dataOverrides(self): return self._overrides
[ "def overrides(self):\n return self._overrides", "def get_overrides(self, namespace=None):\n return {}", "def overrides(self) -> ConfigNodePropertyArray:\n return self._overrides", "def overrides(self) -> tuple[dict[str, Any], dict[str, Any]]:\n settings = {}\n if self.actions:\n settings = self.actions.overrides\n if self.validations:\n settings |= self.validations.overrides\n\n filter_settings = {}\n if self.extra_fields:\n filter_settings = self.extra_fields.model_dump(exclude_unset=True)\n\n return settings, filter_settings", "def get_overrides(conn):\n with conn.cursor(cursor_factory=RealDictCursor) as cur:\n cur.execute(sql_overrides)\n return cur.fetchall()", "def get_overrides_columns(self):\n if hasattr(self, '_overrides'):\n return list(self._overrides.columns)\n return []", "def filter_contiguity_overrides(self):\n return self.filter_nodes('//ContiguityOverrides/ContiguityOverride')", "def get_overrides(token_fields_base, token_fields_from_args):\n overrides = []\n for key_raw, _ in token_fields_from_args.items():\n keys = key_raw.split('.')\n base_ref = token_fields_base\n try:\n for key in keys:\n base_ref = base_ref[key]\n # no KeyError means that the token_fields_base has an existing value corresponding with the arg\n overrides.append(key_raw)\n except KeyError:\n pass\n return overrides", "def shrinkage_overrides(self):\n return self._shrinkage_overrides", "def merge_overrides(self, context, file_overrides=None, set_overrides=None):\n return self._helm.merge_overrides(file_overrides, set_overrides)", "def get_overrides_columns(self):\n\n if hasattr(self, '_overrides'):\n return list(self._overrides.columns)\n return []", "def _resolve_overrides(self):\r\n if not self.override_targets:\r\n return self._pre_override_dependencies\r\n\r\n result = OrderedSet()\r\n\r\n # resolve overrides and fetch all of their \"artifact-providing\" dependencies\r\n excludes = set()\r\n for override_target in self.override_targets:\r\n # add pre_override deps of the target as exclusions\r\n for resolved in override_target.resolve():\r\n excludes.update(self._excludes(resolved))\r\n # prepend the target as a new target\r\n result.add(override_target)\r\n\r\n # add excludes for each artifact\r\n for direct_dep in self._pre_override_dependencies:\r\n # add relevant excludes to jar dependencies\r\n for jar_dep in self._jar_dependencies(direct_dep):\r\n for exclude in excludes:\r\n jar_dep.exclude(exclude.org, exclude.name)\r\n result.add(direct_dep)\r\n\r\n return result", "def _extract_hook_position(self):\n model_hook_layers = {}\n for config in self._loss_config_list:\n model_name_pairs = config['model_name_pairs']\n layers_name = config['layers_name']\n for model_name_pair in model_name_pairs:\n for idx, model_name in enumerate(model_name_pair):\n if model_name not in model_hook_layers:\n model_hook_layers[model_name] = [layers_name[idx]]\n else:\n model_hook_layers[model_name].append(layers_name[idx])\n for model_name, hook_layers in model_hook_layers.items():\n model_hook_layers[model_name] = list(set(hook_layers))\n return model_hook_layers", "def conditional_overrides(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['GoogleCloudChannelV1ConditionalOverrideArgs']]]]:\n return pulumi.get(self, \"conditional_overrides\")", "def listChainOverrides(self):\n cbt = self._chains_by_type\n return cbt and sorted(cbt.items()) or ()", "def add_over(self, override: ItemConfig) -> None:\n self.all_conf = lazy_conf.concat(self.all_conf, override.all_conf)\n\n for vers_id, styles in override.versions.items():\n our_styles = self.versions.setdefault(vers_id, {})\n for sty_id, style in styles.items():\n if sty_id not in our_styles:\n our_styles[sty_id] = style\n else:\n our_styles[sty_id] = lazy_conf.concat(our_styles[sty_id], style)", "def get_override_settings(self):\n return self.overriden_settings or {}", "def layers(self):\n return self['layers']", "def override_paramset(self, override_str):\n\n paramset = ParamSet()\n if not override_str:\n return paramset\n\n override = eval(override_str, {}, {})\n if not override:\n return paramset\n\n for override_name in override:\n # The override can have a node_name/parm format which allows for point\n # instance overrides to override parms in a network.\n\n cached_override = self.override_cache.get(override_name, None)\n if cached_override is not None:\n # Hint to just skip\n if cached_override == -1:\n continue\n if isinstance(cached_override, PBRTParam):\n # textures which can't be overriden\n paramset.add(cached_override)\n continue\n pbrt_name, pbrt_type, tuple_names = cached_override\n if tuple_names:\n value = [override[x] for x in tuple_names]\n else:\n value = override[override_name]\n pbrt_param = PBRTParam(pbrt_type, pbrt_name, value)\n paramset.add(pbrt_param)\n continue\n\n override_match = self.override_pat.match(override_name)\n spectrum_type = override_match.group(\"spectrum\")\n parm_name = override_match.group(\"parm\")\n override_node = override_match.group(\"node\")\n if override_node is not None and override_node != self.name:\n self.override_cache[override_name] = -1\n continue\n\n # There can be two style of \"overrides\" one is a straight parm override\n # which is similar to what Houdini does. The other style of override is\n # for the spectrum type parms. Since spectrum parms can be of different\n # types and the Material Overrides only support \"rgb\" we are limited\n # in the types of spectrum overrides we can do. To work around this we'll\n # support a different style, override_parm:spectrum_type. If the parm name\n # ends in one of the \"rgb/color\" types then we'll handle it differently.\n # TODO add a comment as to what the value would look like\n\n # NOTE: The material SOP will use a parm style dictionary if there\n # parm name matches exactly\n # ie) if there is a color parm you will get\n # {'colorb':0.372511,'colorg':0.642467,'colorr':0.632117,}\n # But if the parm name doesn't match (which we are allowing\n # for you will get something like this -\n # {'colora':(0.632117,0.642467,0.372511),}\n\n # Once we have a parm name, we need to determine what \"style\" it is.\n # Whether its a hou.ParmTuple or hou.Parm style.\n tuple_names = tuple()\n parm_tuple = self.node.parmTuple(parm_name)\n if parm_tuple is None:\n # We couldn't find a tuple of that name, so let's try a parm\n parm = self.node.parm(parm_name)\n if parm is None:\n # Nope, not valid either, let's move along\n self.override_cache[override_name] = -1\n continue\n # if its a parm but not a parmtuple it must be a split.\n parm_tuple = parm.tuple()\n # we need to \"combine\" these and process them all at once and\n # then skip any other occurances. The skipping is handled by\n # the overall caching mechanism. self.override_cache\n tuple_names = tuple([x.name() for x in parm_tuple])\n\n # This is for wrangling parm names of texture nodes due to having a\n # signature parm.\n pbrt_parm_name = self.pbrt_parm_name(parm_tuple.name())\n\n if spectrum_type is None and tuple_names:\n # This is a \"traditional\" override, no spectrum or node name prefix\n value = [override[x] for x in tuple_names]\n pbrt_param = self._hou_parm_to_pbrt_param(\n parm_tuple, pbrt_parm_name, value\n )\n elif spectrum_type in (\"spectrum\", \"xyz\", \"blackbody\"):\n pbrt_param = PBRTParam(\n spectrum_type, pbrt_parm_name, override[override_name]\n )\n elif not tuple_names:\n pbrt_param = self._hou_parm_to_pbrt_param(\n parm_tuple, pbrt_parm_name, override[override_name]\n )\n else:\n raise ValueError(\"Unable to wrangle override name: %s\" % override_name)\n\n paramset.add(pbrt_param)\n\n # From here to the end of the loop is to allow for caching\n\n if pbrt_param.type == \"texture\":\n self.override_cache[override_name] = pbrt_param\n continue\n\n # we are making an assumption a split parm will never be a spectrum\n # or have a node prefix. The Material SOP doesn't allow for it as well.\n for name in tuple_names:\n # The -1 means \"continue\"\n self.override_cache[name] = -1\n # Sanity check\n if tuple_names and override_name not in tuple_names:\n raise ValueError(\n \"Override name: %s, not valid for a parmTuple\" % override_name\n )\n # override_name must match one of the tuple_names\n self.override_cache[override_name] = (\n pbrt_param.name,\n pbrt_param.param_type,\n tuple_names,\n )\n return paramset" ]
{ "objective": { "paired": [], "self": [], "triplet": [ [ "query", "document", "negatives" ] ] } }